diff --git a/api/src/endpoints.rs b/api/src/endpoints.rs index 8a3ff11c6..6258e114d 100644 --- a/api/src/endpoints.rs +++ b/api/src/endpoints.rs @@ -76,11 +76,11 @@ impl ApiEndpoint for OutputApi { debug!("GET output {}", id); let c = util::from_hex(id.clone()).map_err(|_| Error::Argument(format!("Not a valid commitment: {}", id)))?; - match self.chain.get_unspent(&Commitment::from_vec(c)) { - Some(utxo) => Ok(utxo), - None => Err(Error::NotFound), - } - + // TODO - can probably clean up the error mapping here + match self.chain.get_unspent(&Commitment::from_vec(c)) { + Ok(utxo) => Ok(utxo), + Err(_) => Err(Error::NotFound), + } } } diff --git a/chain/src/chain.rs b/chain/src/chain.rs index c847bee65..ae3eca774 100644 --- a/chain/src/chain.rs +++ b/chain/src/chain.rs @@ -32,18 +32,6 @@ use core::global::{MiningParameterMode,MINING_PARAMETER_MODE}; const MAX_ORPHANS: usize = 20; -/// Helper macro to transform a Result into an Option with None in case -/// of error -macro_rules! none_err { - ($trying:expr) => {{ - let tried = $trying; - if let Err(_) = tried { - return None; - } - tried.unwrap() - }} -} - /// Facade to the blockchain block processing pipeline and storage. Provides /// the current view of the UTXO set according to the chain state. Also /// maintains locking for the pipeline to avoid conflicting processing. @@ -127,7 +115,6 @@ impl Chain { /// has been added to the longest chain, None if it's added to an (as of /// now) orphan chain. pub fn process_block(&self, b: Block, opts: Options) -> Result, Error> { - let head = self.store.head().map_err(&Error::StoreErr)?; let ctx = self.ctx_from_head(head, opts); @@ -211,35 +198,37 @@ impl Chain { } } - /// Gets an unspent output from its commitment. With return None if the - /// output - /// doesn't exist or has been spent. This querying is done in a way that's - /// constistent with the current chain state and more specifically the + /// Gets an unspent output from its commitment. + /// Will return an Error if the output doesn't exist or has been spent. + /// This querying is done in a way that's + /// consistent with the current chain state and more specifically the /// current /// branch it is on in case of forks. - pub fn get_unspent(&self, output_ref: &Commitment) -> Option { + pub fn get_unspent(&self, output_ref: &Commitment) -> Result { // TODO use an actual UTXO tree // in the meantime doing it the *very* expensive way: // 1. check the output exists // 2. run the chain back from the head to check it hasn't been spent if let Ok(out) = self.store.get_output_by_commit(output_ref) { - let head = none_err!(self.store.head()); - let mut block_h = head.last_block_h; - loop { - let b = none_err!(self.store.get_block(&block_h)); - for input in b.inputs { - if input.commitment() == *output_ref { - return None; + if let Ok(head) = self.store.head() { + let mut block_h = head.last_block_h; + loop { + if let Ok(b) = self.store.get_block(&block_h) { + for input in b.inputs { + if input.commitment() == *output_ref { + return Err(Error::OutputSpent); + } + } + if b.header.height == 1 { + return Ok(out); + } else { + block_h = b.header.previous; + } } } - if b.header.height == 1 { - return Some(out); - } else { - block_h = b.header.previous; - } } } - None + Err(Error::OutputNotFound) } /// Total difficulty at the head of the chain @@ -274,12 +263,12 @@ impl Chain { ) } - /// Gets the block header by the provided output commitment - pub fn get_block_header_by_output_commit(&self, commit: &Commitment) -> Result { - self.store.get_block_header_by_output_commit(commit).map_err( - &Error::StoreErr, - ) - } + /// Gets the block header by the provided output commitment + pub fn get_block_header_by_output_commit(&self, commit: &Commitment) -> Result { + self.store.get_block_header_by_output_commit(commit).map_err( + &Error::StoreErr, + ) + } /// Get the tip of the header chain pub fn get_header_head(&self) -> Result { diff --git a/chain/src/pipe.rs b/chain/src/pipe.rs index d99fc8314..5e4a4f436 100644 --- a/chain/src/pipe.rs +++ b/chain/src/pipe.rs @@ -22,6 +22,7 @@ use time; use core::consensus; use core::core::hash::{Hash, Hashed}; use core::core::{BlockHeader, Block}; +use core::core::transaction; use types::*; use store; use core::global; @@ -63,6 +64,7 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result, Er // in sync mode, the header has already been validated validate_header(&b.header, &mut ctx)?; } + validate_block(b, &mut ctx)?; debug!( "Block at {} with hash {} is valid, going to save and append.", @@ -167,16 +169,41 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E } /// Fully validate the block content. -fn validate_block(b: &Block, ctx: &mut BlockContext) -> Result<(), Error> { - if b.header.height > ctx.head.height + 1 { +fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> { + if block.header.height > ctx.head.height + 1 { return Err(Error::Orphan); } let curve = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); - try!(b.validate(&curve).map_err(&Error::InvalidBlockProof)); + try!(block.validate(&curve).map_err(&Error::InvalidBlockProof)); + + // check that all the outputs of the block are "new" - + // that they do not clobber any existing unspent outputs (by their commitment) + // + // TODO - do we need to do this here (and can we do this here if we need access to the chain) + // see check_duplicate_outputs in pool for the analogous operation on transaction outputs + // for output in &block.outputs { + // here we would check that the output is not a duplicate output based on the current chain + // }; + // TODO check every input exists as a UTXO using the UTXO index + // check that any coinbase outputs are spendable (that they have matured sufficiently) + for input in &block.inputs { + if let Ok(output) = ctx.store.get_output_by_commit(&input.commitment()) { + if output.features.contains(transaction::COINBASE_OUTPUT) { + if let Ok(output_header) = ctx.store.get_block_header_by_output_commit(&input.commitment()) { + + // TODO - make sure we are not off-by-1 here vs. the equivalent tansaction validation rule + if block.header.height <= output_header.height + consensus::COINBASE_MATURITY { + return Err(Error::ImmatureCoinbase); + } + }; + }; + }; + }; + Ok(()) } diff --git a/chain/src/types.rs b/chain/src/types.rs index 1c1b47ad8..30a16d6aa 100644 --- a/chain/src/types.rs +++ b/chain/src/types.rs @@ -25,17 +25,17 @@ use core::ser; use grin_store; bitflags! { - /// Options for block validation - pub flags Options: u32 { - /// None flag - const NONE = 0b00000001, - /// Runs without checking the Proof of Work, mostly to make testing easier. - const SKIP_POW = 0b00000010, - /// Runs PoW verification with a lower cycle size. - const EASY_POW = 0b00000100, - /// Adds block while in syncing mode. - const SYNC = 0b00001000, - } +/// Options for block validation + pub flags Options: u32 { + /// None flag + const NONE = 0b00000001, + /// Runs without checking the Proof of Work, mostly to make testing easier. + const SKIP_POW = 0b00000010, + /// Runs PoW verification with a lower cycle size. + const EASY_POW = 0b00000100, + /// Adds block while in syncing mode. + const SYNC = 0b00001000, + } } /// Errors @@ -57,6 +57,12 @@ pub enum Error { InvalidBlockTime, /// Block height is invalid (not previous + 1) InvalidBlockHeight, + /// coinbase can only be spent after it has matured (n blocks) + ImmatureCoinbase, + /// output not found + OutputNotFound, + /// output spent + OutputSpent, /// Internal issue when trying to save or load data from store StoreErr(grin_store::Error), /// Error serializing or deserializing a type diff --git a/chain/tests/test_coinbase_maturity.rs b/chain/tests/test_coinbase_maturity.rs new file mode 100644 index 000000000..cf501e1d8 --- /dev/null +++ b/chain/tests/test_coinbase_maturity.rs @@ -0,0 +1,160 @@ +// Copyright 2017 The Grin Developers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +extern crate grin_core as core; +extern crate grin_chain as chain; +extern crate env_logger; +extern crate time; +extern crate rand; +extern crate secp256k1zkp as secp; +extern crate grin_pow as pow; + +use std::fs; +use std::sync::Arc; +use rand::os::OsRng; + +use chain::types::*; +use core::core::build; +use core::core::transaction; +use core::consensus; +use core::global; +use core::global::MiningParameterMode; + +use pow::{types, cuckoo, MiningWorker}; + +fn clean_output_dir(dir_name:&str){ + let _ = fs::remove_dir_all(dir_name); +} + +#[test] +fn test_coinbase_maturity() { + let _ = env_logger::init(); + clean_output_dir(".grin"); + global::set_mining_mode(MiningParameterMode::AutomatedTesting); + + let mut rng = OsRng::new().unwrap(); + let mut genesis_block = None; + if !chain::Chain::chain_exists(".grin".to_string()){ + genesis_block=pow::mine_genesis_block(None); + } + let chain = chain::Chain::init(".grin".to_string(), Arc::new(NoopAdapter {}), + genesis_block, pow::verify_size).unwrap(); + + let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); + + let mut miner_config = types::MinerConfig { + enable_mining: true, + burn_reward: true, + ..Default::default() + }; + miner_config.cuckoo_miner_plugin_dir = Some(String::from("../target/debug/deps")); + + let mut cuckoo_miner = cuckoo::Miner::new(consensus::EASINESS, global::sizeshift() as u32, global::proofsize()); + + let prev = chain.head_header().unwrap(); + let reward_key = secp::key::SecretKey::new(&secp, &mut rng); + let mut block = core::core::Block::new(&prev, vec![], reward_key).unwrap(); + block.header.timestamp = prev.timestamp + time::Duration::seconds(60); + + let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap(); + block.header.difficulty = difficulty.clone(); + + pow::pow_size( + &mut cuckoo_miner, + &mut block.header, + difficulty, + global::sizeshift() as u32, + ).unwrap(); + + assert_eq!(block.outputs.len(), 1); + assert!(block.outputs[0].features.contains(transaction::COINBASE_OUTPUT)); + + chain.process_block(block, chain::EASY_POW).unwrap(); + + let prev = chain.head_header().unwrap(); + + let amount = consensus::REWARD; + let (coinbase_txn, _) = build::transaction(vec![ + build::input(amount, reward_key), + build::output_rand(amount-1), + build::with_fee(1)] + ).unwrap(); + + let reward_key = secp::key::SecretKey::new(&secp, &mut rng); + let mut block = core::core::Block::new(&prev, vec![&coinbase_txn], reward_key).unwrap(); + + block.header.timestamp = prev.timestamp + time::Duration::seconds(60); + + let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap(); + block.header.difficulty = difficulty.clone(); + + pow::pow_size( + &mut cuckoo_miner, + &mut block.header, + difficulty, + global::sizeshift() as u32, + ).unwrap(); + + let result = chain.process_block(block, chain::EASY_POW); + match result { + Err(Error::ImmatureCoinbase) => (), + _ => panic!("expected ImmatureCoinbase error here"), + }; + + // mine 10 blocks so we increase the height sufficiently + // coinbase will mature and be spendable in the block after these + for _ in 0..10 { + let prev = chain.head_header().unwrap(); + + let reward_key = secp::key::SecretKey::new(&secp, &mut rng); + let mut block = core::core::Block::new(&prev, vec![], reward_key).unwrap(); + block.header.timestamp = prev.timestamp + time::Duration::seconds(60); + + let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap(); + block.header.difficulty = difficulty.clone(); + + pow::pow_size( + &mut cuckoo_miner, + &mut block.header, + difficulty, + global::sizeshift() as u32, + ).unwrap(); + + chain.process_block(block, chain::EASY_POW).unwrap(); + }; + + let prev = chain.head_header().unwrap(); + + let reward_key = secp::key::SecretKey::new(&secp, &mut rng); + let mut block = core::core::Block::new(&prev, vec![&coinbase_txn], reward_key).unwrap(); + + block.header.timestamp = prev.timestamp + time::Duration::seconds(60); + + let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap(); + block.header.difficulty = difficulty.clone(); + + pow::pow_size( + &mut cuckoo_miner, + &mut block.header, + difficulty, + global::sizeshift() as u32, + ).unwrap(); + + let result = chain.process_block(block, chain::EASY_POW); + match result { + Ok(_) => (), + Err(Error::ImmatureCoinbase) => panic!("we should not get an ImmatureCoinbase here"), + Err(_) => panic!("we did not expect an error here"), + }; +} diff --git a/core/src/consensus.rs b/core/src/consensus.rs index 54ba6ca06..cac9996f1 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -26,6 +26,11 @@ use core::target::Difficulty; /// The block subsidy amount pub const REWARD: u64 = 1_000_000_000; +/// Number of blocks before a coinbase matures and can be spent +/// TODO - reduced this for testing - need to investigate if we can lower this in test env +// pub const COINBASE_MATURITY: u64 = 1_000; +pub const COINBASE_MATURITY: u64 = 10; + /// Block interval, in seconds, the network will tune its next_target for. Note /// that we may reduce this value in the future as we get more data on mining /// with Cuckoo Cycle, networks improve and block propagation is optimized @@ -192,15 +197,15 @@ mod test { fn next_target_adjustment() { // not enough data assert_eq!(next_difficulty(vec![]).unwrap(), Difficulty::from_num(MINIMUM_DIFFICULTY)); - + assert_eq!(next_difficulty(vec![Ok((60, Difficulty::one()))]).unwrap(), Difficulty::from_num(MINIMUM_DIFFICULTY)); - + assert_eq!(next_difficulty(repeat(60, 10, DIFFICULTY_ADJUST_WINDOW)).unwrap(), Difficulty::from_num(MINIMUM_DIFFICULTY)); // just enough data, right interval, should stay constant - + let just_enough = DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW; assert_eq!(next_difficulty(repeat(60, 1000, just_enough)).unwrap(), Difficulty::from_num(1000)); diff --git a/core/src/core/block.rs b/core/src/core/block.rs index 790f7944d..7efddd373 100644 --- a/core/src/core/block.rs +++ b/core/src/core/block.rs @@ -38,7 +38,7 @@ bitflags! { } /// Block header, fairly standard compared to other blockchains. -#[derive(Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq)] pub struct BlockHeader { /// Height of this block since the genesis block (height 0) pub height: u64, @@ -258,7 +258,7 @@ impl Block { // build vectors with all inputs and all outputs, ordering them by hash // needs to be a fold so we don't end up with a vector of vectors and we - // want to fullt own the refs (not just a pointer like flat_map). + // want to fully own the refs (not just a pointer like flat_map). let mut inputs = txs.iter() .fold(vec![], |mut acc, ref tx| { let mut inputs = tx.inputs.clone(); @@ -307,28 +307,37 @@ impl Block { /// Matches any output with a potential spending input, eliminating them /// from the block. Provides a simple way to compact the block. The /// elimination is stable with respect to inputs and outputs order. + /// + /// NOTE: exclude coinbase from compaction process + /// if a block contains a new coinbase output and + /// is a transaction spending a previous coinbase + /// we do not want to compact these away + /// pub fn compact(&self) -> Block { - // the chosen ones - let mut new_inputs = vec![]; + let in_set = self.inputs + .iter() + .map(|inp| inp.commitment()) + .collect::>(); - // build a set of all output commitments - let mut out_set = HashSet::new(); - for out in &self.outputs { - out_set.insert(out.commitment()); - } - // removes from the set any hash referenced by an input, keeps the inputs that - // don't have a match - for inp in &self.inputs { - if !out_set.remove(&inp.commitment()) { - new_inputs.push(*inp); - } - } - // we got ourselves a keep list in that set - let new_outputs = self.outputs - .iter() - .filter(|out| out_set.contains(&(out.commitment()))) - .map(|&out| out) - .collect::>(); + let out_set = self.outputs + .iter() + .filter(|out| !out.features.contains(COINBASE_OUTPUT)) + .map(|out| out.commitment()) + .collect::>(); + + let commitments_to_compact = in_set.intersection(&out_set).collect::>(); + + let new_inputs = self.inputs + .iter() + .filter(|inp| !commitments_to_compact.contains(&inp.commitment())) + .map(|&inp| inp) + .collect::>(); + + let new_outputs = self.outputs + .iter() + .filter(|out| !commitments_to_compact.contains(&out.commitment())) + .map(|&out| out) + .collect::>(); let tx_merkle = merkle_inputs_outputs(&new_inputs, &new_outputs); @@ -429,12 +438,12 @@ impl Block { fn verify_coinbase(&self, secp: &Secp256k1) -> Result<(), secp::Error> { let cb_outs = self.outputs .iter() - .filter(|out| out.features.intersects(COINBASE_OUTPUT)) + .filter(|out| out.features.contains(COINBASE_OUTPUT)) .map(|o| o.clone()) .collect::>(); let cb_kerns = self.kernels .iter() - .filter(|k| k.features.intersects(COINBASE_KERNEL)) + .filter(|k| k.features.contains(COINBASE_KERNEL)) .map(|k| k.clone()) .collect::>(); diff --git a/grin/src/adapters.rs b/grin/src/adapters.rs index d15d9b286..259a692cd 100644 --- a/grin/src/adapters.rs +++ b/grin/src/adapters.rs @@ -18,6 +18,7 @@ use std::thread; use chain::{self, ChainAdapter}; use core::core::{self, Output}; +use core::core::block::BlockHeader; use core::core::hash::{Hash, Hashed}; use core::core::target::Difficulty; use p2p::{self, NetAdapter, Server, PeerStore, PeerData, State}; @@ -292,7 +293,22 @@ impl PoolToChainAdapter { } impl pool::BlockChain for PoolToChainAdapter { - fn get_unspent(&self, output_ref: &Commitment) -> Option { + fn get_unspent(&self, output_ref: &Commitment) -> Result { self.chain.borrow().get_unspent(output_ref) + .map_err(|e| match e { + chain::types::Error::OutputNotFound => pool::PoolError::OutputNotFound, + chain::types::Error::OutputSpent => pool::PoolError::OutputSpent, + _ => pool::PoolError::GenericPoolError, + }) + } + + fn get_block_header_by_output_commit(&self, commit: &Commitment) -> Result { + self.chain.borrow().get_block_header_by_output_commit(commit) + .map_err(|_| pool::PoolError::GenericPoolError) + } + + fn head_header(&self) -> Result { + self.chain.borrow().head_header() + .map_err(|_| pool::PoolError::GenericPoolError) } } diff --git a/pool/src/blockchain.rs b/pool/src/blockchain.rs index a64a55364..18972e6b2 100644 --- a/pool/src/blockchain.rs +++ b/pool/src/blockchain.rs @@ -18,7 +18,25 @@ use secp::pedersen::Commitment; use std::sync::RwLock; -use types::BlockChain; +use types::{BlockChain, PoolError}; + +#[derive(Debug)] +pub struct DummyBlockHeaderIndex { + block_headers: HashMap +} + +impl DummyBlockHeaderIndex { + pub fn insert(&mut self, commit: Commitment, block_header: block::BlockHeader) { + self.block_headers.insert(commit, block_header); + } + + pub fn get_block_header_by_output_commit(&self, commit: Commitment) -> Result<&block::BlockHeader, PoolError> { + match self.block_headers.get(&commit) { + Some(h) => Ok(h), + None => Err(PoolError::GenericPoolError) + } + } +} /// A DummyUtxoSet for mocking up the chain pub struct DummyUtxoSet { @@ -78,20 +96,45 @@ impl DummyUtxoSet { /// need #[allow(dead_code)] pub struct DummyChainImpl { - utxo: RwLock + utxo: RwLock, + block_headers: RwLock, + head_header: RwLock>, } #[allow(dead_code)] impl DummyChainImpl { pub fn new() -> DummyChainImpl { DummyChainImpl{ - utxo: RwLock::new(DummyUtxoSet{outputs: HashMap::new()})} + utxo: RwLock::new(DummyUtxoSet{outputs: HashMap::new()}), + block_headers: RwLock::new(DummyBlockHeaderIndex{block_headers: HashMap::new()}), + head_header: RwLock::new(vec![]), + } } } impl BlockChain for DummyChainImpl { - fn get_unspent(&self, commitment: &Commitment) -> Option { - self.utxo.read().unwrap().get_output(commitment).cloned() + fn get_unspent(&self, commitment: &Commitment) -> Result { + let output = self.utxo.read().unwrap().get_output(commitment).cloned(); + match output { + Some(o) => Ok(o), + None => Err(PoolError::GenericPoolError), + } + } + + fn get_block_header_by_output_commit(&self, commit: &Commitment) -> Result { + match self.block_headers.read().unwrap().get_block_header_by_output_commit(*commit) { + Ok(h) => Ok(h.clone()), + Err(e) => Err(e), + } + } + + fn head_header(&self) -> Result { + let headers = self.head_header.read().unwrap(); + if headers.len() > 0 { + Ok(headers[0].clone()) + } else { + Err(PoolError::GenericPoolError) + } } } @@ -102,9 +145,19 @@ impl DummyChain for DummyChainImpl { fn apply_block(&self, b: &block::Block) { self.utxo.write().unwrap().with_block(b); } + fn store_header_by_output_commitment(&self, commitment: Commitment, block_header: &block::BlockHeader) { + self.block_headers.write().unwrap().insert(commitment, block_header.clone()); + } + fn store_head_header(&self, block_header: &block::BlockHeader) { + let mut h = self.head_header.write().unwrap(); + h.clear(); + h.insert(0, block_header.clone()); + } } pub trait DummyChain: BlockChain { fn update_utxo_set(&mut self, new_utxo: DummyUtxoSet); fn apply_block(&self, b: &block::Block); + fn store_header_by_output_commitment(&self, commitment: Commitment, block_header: &block::BlockHeader); + fn store_head_header(&self, block_header: &block::BlockHeader); } diff --git a/pool/src/pool.rs b/pool/src/pool.rs index de1ae8b0f..e5927ce96 100644 --- a/pool/src/pool.rs +++ b/pool/src/pool.rs @@ -20,6 +20,7 @@ pub use graph; use core::core::transaction; use core::core::block; use core::core::hash; +use core::consensus; use secp; use secp::pedersen::Commitment; @@ -77,9 +78,10 @@ impl TransactionPool where T: BlockChain { // output designated by output_commitment. fn search_blockchain_unspents(&self, output_commitment: &Commitment) -> Option { self.blockchain.get_unspent(output_commitment). - map(|_| match self.pool.get_blockchain_spent(output_commitment) { + ok(). + map(|output| match self.pool.get_blockchain_spent(output_commitment) { Some(x) => Parent::AlreadySpent{other_tx: x.destination_hash().unwrap()}, - None => Parent::BlockTransaction, + None => Parent::BlockTransaction{output}, }) } @@ -109,7 +111,7 @@ impl TransactionPool where T: BlockChain { /// Attempts to add a transaction to the pool. /// - /// Adds a transation to the memory pool, deferring to the orphans pool + /// Adds a transaction to the memory pool, deferring to the orphans pool /// if necessary, and performing any connection-related validity checks. /// Happens under an exclusive mutable reference gated by the write portion /// of a RWLock. @@ -118,7 +120,7 @@ impl TransactionPool where T: BlockChain { let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); tx.validate(&secp).map_err(|_| PoolError::Invalid)?; - // The first check invovles ensuring that an identical transaction is + // The first check involves ensuring that an identical transaction is // not already in the pool's transaction set. // A non-authoritative similar check should be performed under the // pool's read lock before we get to this point, which would catch the @@ -132,7 +134,6 @@ impl TransactionPool where T: BlockChain { return Err(PoolError::AlreadyInPool) } - // The next issue is to identify all unspent outputs that // this transaction will consume and make sure they exist in the set. let mut pool_refs: Vec = Vec::new(); @@ -140,8 +141,7 @@ impl TransactionPool where T: BlockChain { let mut blockchain_refs: Vec = Vec::new(); for input in &tx.inputs { - let base = graph::Edge::new(None, Some(tx_hash), - input.commitment()); + let base = graph::Edge::new(None, Some(tx_hash), input.commitment()); // Note that search_for_best_output does not examine orphans, by // design. If an incoming transaction consumes pool outputs already @@ -149,7 +149,22 @@ impl TransactionPool where T: BlockChain { // into the pool. match self.search_for_best_output(&input.commitment()) { Parent::PoolTransaction{tx_ref: x} => pool_refs.push(base.with_source(Some(x))), - Parent::BlockTransaction => blockchain_refs.push(base), + Parent::BlockTransaction{output} => { + // TODO - pull this out into a separate function? + if output.features.contains(transaction::COINBASE_OUTPUT) { + if let Ok(out_header) = self.blockchain.get_block_header_by_output_commit(&output.commitment()) { + if let Ok(head_header) = self.blockchain.head_header() { + if head_header.height <= out_header.height + consensus::COINBASE_MATURITY { + return Err(PoolError::ImmatureCoinbase{ + header: out_header, + output: output.commitment() + }) + }; + }; + }; + }; + blockchain_refs.push(base); + }, Parent::Unknown => orphan_refs.push(base), Parent::AlreadySpent{other_tx: x} => return Err(PoolError::DoubleSpend{other_tx: x, spent_output: input.commitment()}), } @@ -232,7 +247,7 @@ impl TransactionPool where T: BlockChain { // Checking against current blockchain unspent outputs // We want outputs even if they're spent by pool txs, so we ignore // consumed_blockchain_outputs - if self.blockchain.get_unspent(&output.commitment()).is_some() { + if self.blockchain.get_unspent(&output.commitment()).is_ok() { return Err(PoolError::DuplicateOutput{ other_tx: None, in_chain: true, @@ -417,7 +432,7 @@ impl TransactionPool where T: BlockChain { for output in &tx_ref.unwrap().outputs { match self.pool.get_internal_spent_output(&output.commitment()) { Some(x) => { - if self.blockchain.get_unspent(&x.output_commitment()).is_none() { + if self.blockchain.get_unspent(&x.output_commitment()).is_err() { self.mark_transaction(x.destination_hash().unwrap(), marked_txs); } }, @@ -465,7 +480,7 @@ impl TransactionPool where T: BlockChain { #[cfg(test)] mod tests { use super::*; - use types::*; + use types::*; use secp::{Secp256k1, ContextFlag, constants}; use secp::key; use core::core::build; @@ -488,7 +503,7 @@ mod tests { fn test_basic_pool_add() { let mut dummy_chain = DummyChainImpl::new(); - let parent_transaction = test_transaction(vec![5,6,7],vec![11,4]); + let parent_transaction = test_transaction(vec![5,6,7], vec![11,4]); // We want this transaction to be rooted in the blockchain. let new_utxo = DummyUtxoSet::empty(). with_output(test_output(5)). @@ -540,7 +555,7 @@ mod tests { expect_output_parent!(read_pool, Parent::AlreadySpent{other_tx: _}, 11, 5); expect_output_parent!(read_pool, - Parent::BlockTransaction, 8); + Parent::BlockTransaction{output: _}, 8); expect_output_parent!(read_pool, Parent::Unknown, 20); @@ -629,9 +644,61 @@ mod tests { } } + #[test] + fn test_immature_coinbase() { + let mut dummy_chain = DummyChainImpl::new(); + let coinbase_output = test_coinbase_output(15); + dummy_chain.update_utxo_set(DummyUtxoSet::empty().with_output(coinbase_output)); + + let chain_ref = Arc::new(dummy_chain); + let pool = RwLock::new(test_setup(&chain_ref)); + + { + let mut write_pool = pool.write().unwrap(); + + let coinbase_header = block::BlockHeader {height: 1, ..block::BlockHeader::default()}; + chain_ref.store_header_by_output_commitment(coinbase_output.commitment(), &coinbase_header); + + let head_header = block::BlockHeader {height: 2, ..block::BlockHeader::default()}; + chain_ref.store_head_header(&head_header); + + let txn = test_transaction(vec![15], vec![10, 4]); + let result = write_pool.add_to_memory_pool(test_source(), txn); + match result { + Err(PoolError::ImmatureCoinbase{header: _, output: out}) => { + assert_eq!(out, coinbase_output.commitment()); + }, + _ => panic!("expected ImmatureCoinbase error here"), + }; + + let head_header = block::BlockHeader {height: 11, ..block::BlockHeader::default()}; + chain_ref.store_head_header(&head_header); + + let txn = test_transaction(vec![15], vec![10, 4]); + let result = write_pool.add_to_memory_pool(test_source(), txn); + match result { + Err(PoolError::ImmatureCoinbase{header: _, output: out}) => { + assert_eq!(out, coinbase_output.commitment()); + }, + _ => panic!("expected ImmatureCoinbase error here"), + }; + + let head_header = block::BlockHeader {height: 12, ..block::BlockHeader::default()}; + chain_ref.store_head_header(&head_header); + + let txn = test_transaction(vec![15], vec![10, 4]); + let result = write_pool.add_to_memory_pool(test_source(), txn); + match result { + Ok(_) => {}, + Err(_) => panic!("this should not return an error here"), + }; + } + } + #[test] /// Testing an expected orphan fn test_add_orphan() { + // TODO we need a test here } #[test] @@ -750,7 +817,7 @@ mod tests { assert_eq!(read_pool.total_size(), 4); // We should have available blockchain outputs at 9 and 3 - expect_output_parent!(read_pool, Parent::BlockTransaction, 9, 3); + expect_output_parent!(read_pool, Parent::BlockTransaction{output: _}, 9, 3); // We should have spent blockchain outputs at 4 and 7 expect_output_parent!(read_pool, @@ -895,6 +962,17 @@ mod tests { proof: ec.range_proof(0, value, output_key, output_commitment, ec.nonce())} } + /// Deterministically generate a coinbase output defined by our test scheme + fn test_coinbase_output(value: u64) -> transaction::Output { + let ec = Secp256k1::with_caps(ContextFlag::Commit); + let output_key = test_key(value); + let output_commitment = ec.commit(value, output_key).unwrap(); + transaction::Output{ + features: transaction::COINBASE_OUTPUT, + commit: output_commitment, + proof: ec.range_proof(0, value, output_key, output_commitment)} + } + /// Makes a SecretKey from a single u64 fn test_key(value: u64) -> key::SecretKey { let ec = Secp256k1::with_caps(ContextFlag::Commit); diff --git a/pool/src/types.rs b/pool/src/types.rs index b62364384..795c59743 100644 --- a/pool/src/types.rs +++ b/pool/src/types.rs @@ -24,6 +24,7 @@ use secp::pedersen::Commitment; pub use graph; +use core::core::block; use core::core::transaction; use core::core::hash; @@ -46,7 +47,7 @@ pub struct TxSource { #[derive(Clone)] pub enum Parent { Unknown, - BlockTransaction, + BlockTransaction{output: transaction::Output}, PoolTransaction{tx_ref: hash::Hash}, AlreadySpent{other_tx: hash::Hash}, } @@ -55,7 +56,7 @@ impl fmt::Debug for Parent { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Parent::Unknown => write!(f, "Parent: Unknown"), - &Parent::BlockTransaction => write!(f, "Parent: Block Transaction"), + &Parent::BlockTransaction{output: _} => write!(f, "Parent: Block Transaction"), &Parent::PoolTransaction{tx_ref: x} => write!(f, "Parent: Pool Transaction ({:?})", x), &Parent::AlreadySpent{other_tx: x} => write!(f, @@ -88,17 +89,38 @@ pub enum PoolError { /// The spent output spent_output: Commitment }, + /// Attempt to spend a coinbase output before it matures (1000 blocks?) + ImmatureCoinbase{ + /// The block header of the block containing the coinbase output + header: block::BlockHeader, + /// The unspent coinbase output + output: Commitment, + }, /// An orphan successfully added to the orphans set OrphanTransaction, + /// TODO - wip, just getting imports working, remove this and use more specific errors + GenericPoolError, + /// TODO - is this the right level of abstraction for pool errors? + OutputNotFound, + /// TODO - is this the right level of abstraction for pool errors? + OutputSpent, } /// Interface that the pool requires from a blockchain implementation. pub trait BlockChain { - /// Get an unspent output by its commitment. Will return None if the output - /// is spent or if it doesn't exist. The blockchain is expected to produce - /// a result with its current view of the most worked chain, ignoring - /// orphans, etc. - fn get_unspent(&self, output_ref: &Commitment) -> Option; + /// Get an unspent output by its commitment. Will return None if the output + /// is spent or if it doesn't exist. The blockchain is expected to produce + /// a result with its current view of the most worked chain, ignoring + /// orphans, etc. + fn get_unspent(&self, output_ref: &Commitment) + -> Result; + + /// Get the block header by output commitment (needed for spending coinbase after n blocks) + fn get_block_header_by_output_commit(&self, commit: &Commitment) + -> Result; + + /// Get the block header at the head + fn head_header(&self) -> Result; } /// Pool contains the elements of the graph that are connected, in full, to