rustfmt (new version)

This commit is contained in:
Ignotus Peverell 2017-07-27 21:13:34 +00:00
parent 3c3b12b13a
commit 22bff54f66
No known key found for this signature in database
GPG key ID: 99CD25F39F8F8211
5 changed files with 144 additions and 110 deletions

View file

@ -53,7 +53,7 @@ pub struct Chain {
head: Arc<Mutex<Tip>>, head: Arc<Mutex<Tip>>,
block_process_lock: Arc<Mutex<bool>>, block_process_lock: Arc<Mutex<bool>>,
orphans: Arc<Mutex<VecDeque<Block>>>, orphans: Arc<Mutex<VecDeque<Block>>>,
test_mode: bool, test_mode: bool,
} }
@ -67,10 +67,11 @@ impl Chain {
/// on the current chain head to make sure it exists and creates one based /// on the current chain head to make sure it exists and creates one based
/// on /// on
/// the genesis block if necessary. /// the genesis block if necessary.
pub fn init(test_mode: bool, pub fn init(
db_root: String, test_mode: bool,
adapter: Arc<ChainAdapter>) db_root: String,
-> Result<Chain, Error> { adapter: Arc<ChainAdapter>,
) -> Result<Chain, Error> {
let chain_store = store::ChainKVStore::new(db_root)?; let chain_store = store::ChainKVStore::new(db_root)?;
// check if we have a head in store, otherwise the genesis block is it // check if we have a head in store, otherwise the genesis block is it
@ -105,7 +106,7 @@ impl Chain {
adapter: adapter, adapter: adapter,
head: Arc::new(Mutex::new(head)), head: Arc::new(Mutex::new(head)),
block_process_lock: Arc::new(Mutex::new(true)), block_process_lock: Arc::new(Mutex::new(true)),
orphans: Arc::new(Mutex::new(VecDeque::with_capacity(MAX_ORPHANS+1))), orphans: Arc::new(Mutex::new(VecDeque::with_capacity(MAX_ORPHANS + 1))),
test_mode: test_mode, test_mode: test_mode,
}) })
} }
@ -120,32 +121,33 @@ impl Chain {
let res = pipe::process_block(&b, ctx); let res = pipe::process_block(&b, ctx);
match res { match res {
Ok(Some(ref tip)) => { Ok(Some(ref tip)) => {
// block got accepted and extended the head, updating our head // block got accepted and extended the head, updating our head
let chain_head = self.head.clone(); let chain_head = self.head.clone();
let mut head = chain_head.lock().unwrap(); let mut head = chain_head.lock().unwrap();
*head = tip.clone(); *head = tip.clone();
self.check_orphans(); self.check_orphans();
} }
Err(Error::Orphan) => { Err(Error::Orphan) => {
let mut orphans = self.orphans.lock().unwrap(); let mut orphans = self.orphans.lock().unwrap();
orphans.push_front(b); orphans.push_front(b);
orphans.truncate(MAX_ORPHANS); orphans.truncate(MAX_ORPHANS);
} }
_ => {} _ => {}
} }
res res
} }
/// Attempt to add a new header to the header chain. Only necessary during /// Attempt to add a new header to the header chain. Only necessary during
/// sync. /// sync.
pub fn process_block_header(&self, pub fn process_block_header(
bh: &BlockHeader, &self,
opts: Options) bh: &BlockHeader,
-> Result<Option<Tip>, Error> { opts: Options,
) -> Result<Option<Tip>, Error> {
let head = self.store.get_header_head().map_err(&Error::StoreErr)?; let head = self.store.get_header_head().map_err(&Error::StoreErr)?;
let ctx = self.ctx_from_head(head, opts); let ctx = self.ctx_from_head(head, opts);
@ -167,32 +169,32 @@ impl Chain {
} }
} }
/// Pop orphans out of the queue and check if we can now accept them. /// Pop orphans out of the queue and check if we can now accept them.
fn check_orphans(&self) { fn check_orphans(&self) {
// first check how many we have to retry, unfort. we can't extend the lock // first check how many we have to retry, unfort. we can't extend the lock
// in the loop as it needs to be freed before going in process_block // in the loop as it needs to be freed before going in process_block
let mut orphan_count = 0; let mut orphan_count = 0;
{ {
let orphans = self.orphans.lock().unwrap(); let orphans = self.orphans.lock().unwrap();
orphan_count = orphans.len(); orphan_count = orphans.len();
} }
// pop each orphan and retry, if still orphaned, will be pushed again // pop each orphan and retry, if still orphaned, will be pushed again
let mut opts = NONE; let mut opts = NONE;
if self.test_mode { if self.test_mode {
opts = opts | EASY_POW; opts = opts | EASY_POW;
} }
for _ in 0..orphan_count { for _ in 0..orphan_count {
let mut popped = None; let mut popped = None;
{ {
let mut orphans = self.orphans.lock().unwrap(); let mut orphans = self.orphans.lock().unwrap();
popped = orphans.pop_back(); popped = orphans.pop_back();
} }
if let Some(orphan) = popped { if let Some(orphan) = popped {
self.process_block(orphan, opts); self.process_block(orphan, opts);
} }
} }
} }
/// Gets an unspent output from its commitment. With return None if the /// Gets an unspent output from its commitment. With return None if the
/// output /// output
@ -252,7 +254,9 @@ impl Chain {
/// Gets the block header at the provided height /// Gets the block header at the provided height
pub fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> { pub fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> {
self.store.get_header_by_height(height).map_err(&Error::StoreErr) self.store.get_header_by_height(height).map_err(
&Error::StoreErr,
)
} }
/// Get the tip of the header chain /// Get the tip of the header chain

View file

@ -46,11 +46,13 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
// TODO should just take a promise for a block with a full header so we don't // TODO should just take a promise for a block with a full header so we don't
// spend resources reading the full block when its header is invalid // spend resources reading the full block when its header is invalid
info!("Starting validation pipeline for block {} at {} with {} inputs and {} outputs.", info!(
b.hash(), "Starting validation pipeline for block {} at {} with {} inputs and {} outputs.",
b.header.height, b.hash(),
b.inputs.len(), b.header.height,
b.outputs.len()); b.inputs.len(),
b.outputs.len()
);
check_known(b.hash(), &mut ctx)?; check_known(b.hash(), &mut ctx)?;
if !ctx.opts.intersects(SYNC) { if !ctx.opts.intersects(SYNC) {
@ -58,9 +60,11 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
validate_header(&b.header, &mut ctx)?; validate_header(&b.header, &mut ctx)?;
} }
validate_block(b, &mut ctx)?; validate_block(b, &mut ctx)?;
debug!("Block at {} with hash {} is valid, going to save and append.", debug!(
b.header.height, "Block at {} with hash {} is valid, going to save and append.",
b.hash()); b.header.height,
b.hash()
);
ctx.lock.lock(); ctx.lock.lock();
add_block(b, &mut ctx)?; add_block(b, &mut ctx)?;
@ -69,9 +73,11 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
pub fn process_block_header(bh: &BlockHeader, mut ctx: BlockContext) -> Result<Option<Tip>, Error> { pub fn process_block_header(bh: &BlockHeader, mut ctx: BlockContext) -> Result<Option<Tip>, Error> {
info!("Starting validation pipeline for block header {} at {}.", info!(
bh.hash(), "Starting validation pipeline for block header {} at {}.",
bh.height); bh.hash(),
bh.height
);
check_known(bh.hash(), &mut ctx)?; check_known(bh.hash(), &mut ctx)?;
validate_header(&bh, &mut ctx)?; validate_header(&bh, &mut ctx)?;
add_block_header(bh, &mut ctx)?; add_block_header(bh, &mut ctx)?;
@ -106,7 +112,9 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
return Err(Error::Orphan); return Err(Error::Orphan);
} }
let prev = try!(ctx.store.get_block_header(&header.previous).map_err(&Error::StoreErr)); let prev = try!(ctx.store.get_block_header(&header.previous).map_err(
&Error::StoreErr,
));
if header.height != prev.height + 1 { if header.height != prev.height + 1 {
return Err(Error::InvalidBlockHeight); return Err(Error::InvalidBlockHeight);
@ -117,7 +125,8 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
return Err(Error::InvalidBlockTime); return Err(Error::InvalidBlockTime);
} }
if header.timestamp > if header.timestamp >
time::now() + time::Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64)) { time::now() + time::Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))
{
// refuse blocks more than 12 blocks intervals in future (as in bitcoin) // refuse blocks more than 12 blocks intervals in future (as in bitcoin)
// TODO add warning in p2p code if local time is too different from peers // TODO add warning in p2p code if local time is too different from peers
return Err(Error::InvalidBlockTime); return Err(Error::InvalidBlockTime);
@ -131,8 +140,9 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
} }
let diff_iter = store::DifficultyIter::from(header.previous, ctx.store.clone()); let diff_iter = store::DifficultyIter::from(header.previous, ctx.store.clone());
let difficulty = let difficulty = consensus::next_difficulty(diff_iter).map_err(|e| {
consensus::next_difficulty(diff_iter).map_err(|e| Error::Other(e.to_string()))?; Error::Other(e.to_string())
})?;
if header.difficulty < difficulty { if header.difficulty < difficulty {
return Err(Error::DifficultyTooLow); return Err(Error::DifficultyTooLow);
} }
@ -224,9 +234,11 @@ fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option
ctx.store.save_header_head(&tip).map_err(&Error::StoreErr)?; ctx.store.save_header_head(&tip).map_err(&Error::StoreErr)?;
ctx.head = tip.clone(); ctx.head = tip.clone();
info!("Updated block header head to {} at {}.", info!(
bh.hash(), "Updated block header head to {} at {}.",
bh.height); bh.hash(),
bh.height
);
Ok(Some(tip)) Ok(Some(tip))
} else { } else {
Ok(None) Ok(None)

View file

@ -82,7 +82,9 @@ impl ChainStore for ChainKVStore {
} }
fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> { fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
option_to_not_found(self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, &mut h.to_vec()))) option_to_not_found(self.db.get_ser(
&to_key(BLOCK_HEADER_PREFIX, &mut h.to_vec()),
))
} }
fn check_block_exists(&self, h: &Hash) -> Result<bool, Error> { fn check_block_exists(&self, h: &Hash) -> Result<bool, Error> {
@ -94,20 +96,27 @@ impl ChainStore for ChainKVStore {
let mut batch = self.db let mut batch = self.db
.batch() .batch()
.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)? .put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)?
.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut b.hash().to_vec())[..], .put_ser(
&b.header)?; &to_key(BLOCK_HEADER_PREFIX, &mut b.hash().to_vec())[..],
&b.header,
)?;
// saving the full output under its hash, as well as a commitment to hash index // saving the full output under its hash, as well as a commitment to hash index
for out in &b.outputs { for out in &b.outputs {
let mut out_bytes = out.commit.as_ref().to_vec(); let mut out_bytes = out.commit.as_ref().to_vec();
batch = batch.put_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut out_bytes)[..], out)?; batch = batch.put_ser(
&to_key(OUTPUT_COMMIT_PREFIX, &mut out_bytes)[..],
out,
)?;
} }
batch.write() batch.write()
} }
fn save_block_header(&self, bh: &BlockHeader) -> Result<(), Error> { fn save_block_header(&self, bh: &BlockHeader) -> Result<(), Error> {
self.db.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut bh.hash().to_vec())[..], self.db.put_ser(
bh) &to_key(BLOCK_HEADER_PREFIX, &mut bh.hash().to_vec())[..],
bh,
)
} }
fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> { fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> {
@ -115,17 +124,24 @@ impl ChainStore for ChainKVStore {
} }
fn get_output_by_commit(&self, commit: &Commitment) -> Result<Output, Error> { fn get_output_by_commit(&self, commit: &Commitment) -> Result<Output, Error> {
option_to_not_found(self.db option_to_not_found(self.db.get_ser(&to_key(
.get_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut commit.as_ref().to_vec()))) OUTPUT_COMMIT_PREFIX,
&mut commit.as_ref().to_vec(),
)))
} }
fn has_output_commit(&self, commit: &Commitment) -> Result<Hash, Error> { fn has_output_commit(&self, commit: &Commitment) -> Result<Hash, Error> {
option_to_not_found(self.db option_to_not_found(self.db.get_ser(&to_key(
.get_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut commit.as_ref().to_vec()))) OUTPUT_COMMIT_PREFIX,
&mut commit.as_ref().to_vec(),
)))
} }
fn setup_height(&self, bh: &BlockHeader) -> Result<(), Error> { fn setup_height(&self, bh: &BlockHeader) -> Result<(), Error> {
self.db.put_ser(&u64_to_key(HEADER_HEIGHT_PREFIX, bh.height), bh)?; self.db.put_ser(
&u64_to_key(HEADER_HEIGHT_PREFIX, bh.height),
bh,
)?;
let mut prev_h = bh.previous; let mut prev_h = bh.previous;
let mut prev_height = bh.height - 1; let mut prev_height = bh.height - 1;
@ -133,9 +149,10 @@ impl ChainStore for ChainKVStore {
let prev = self.get_header_by_height(prev_height)?; let prev = self.get_header_by_height(prev_height)?;
if prev.hash() != prev_h { if prev.hash() != prev_h {
let real_prev = self.get_block_header(&prev_h)?; let real_prev = self.get_block_header(&prev_h)?;
self.db self.db.put_ser(
.put_ser(&u64_to_key(HEADER_HEIGHT_PREFIX, real_prev.height), &u64_to_key(HEADER_HEIGHT_PREFIX, real_prev.height),
&real_prev); &real_prev,
);
prev_h = real_prev.previous; prev_h = real_prev.previous;
prev_height = real_prev.height - 1; prev_height = real_prev.height - 1;
} else { } else {

View file

@ -41,8 +41,8 @@ bitflags! {
pub enum Error { pub enum Error {
/// The block doesn't fit anywhere in our chain /// The block doesn't fit anywhere in our chain
Unfit(String), Unfit(String),
/// Special case of orphan blocks /// Special case of orphan blocks
Orphan, Orphan,
/// Difficulty is too low either compared to ours or the block PoW hash /// Difficulty is too low either compared to ours or the block PoW hash
DifficultyTooLow, DifficultyTooLow,
/// Addition of difficulties on all previous block is wrong /// Addition of difficulties on all previous block is wrong
@ -199,7 +199,7 @@ pub trait ChainAdapter {
fn block_accepted(&self, b: &Block); fn block_accepted(&self, b: &Block);
} }
pub struct NoopAdapter { } pub struct NoopAdapter {}
impl ChainAdapter for NoopAdapter { impl ChainAdapter for NoopAdapter {
fn block_accepted(&self, b: &Block) {} fn block_accepted(&self, b: &Block) {}
} }

View file

@ -40,28 +40,28 @@ use grin_core::pow::MiningWorker;
#[test] #[test]
fn mine_empty_chain() { fn mine_empty_chain() {
env_logger::init(); env_logger::init();
let mut rng = OsRng::new().unwrap(); let mut rng = OsRng::new().unwrap();
let chain = grin_chain::Chain::init(true, ".grin".to_string(), Arc::new(NoopAdapter{})).unwrap(); let chain = grin_chain::Chain::init(true, ".grin".to_string(), Arc::new(NoopAdapter {}))
.unwrap();
// mine and add a few blocks // mine and add a few blocks
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
let reward_key = secp::key::SecretKey::new(&secp, &mut rng); let reward_key = secp::key::SecretKey::new(&secp, &mut rng);
let server_config = ServerConfig::default(); let server_config = ServerConfig::default();
let mut miner_config = grin::MinerConfig{ let mut miner_config = grin::MinerConfig {
enable_mining: true, enable_mining: true,
burn_reward: true, burn_reward: true,
..Default::default() ..Default::default()
}; };
miner_config.cuckoo_miner_plugin_dir = Some(String::from("../target/debug/deps")); miner_config.cuckoo_miner_plugin_dir = Some(String::from("../target/debug/deps"));
let mut cuckoo_miner = PluginMiner::new(consensus::EASINESS, let mut cuckoo_miner = PluginMiner::new(consensus::EASINESS, consensus::TEST_SIZESHIFT as u32);
consensus::TEST_SIZESHIFT as u32 ); cuckoo_miner.init(miner_config, server_config);
cuckoo_miner.init(miner_config ,server_config);
for n in 1..4 { for n in 1..4 {
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap(); let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60); b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
@ -69,13 +69,13 @@ fn mine_empty_chain() {
b.header.difficulty = difficulty.clone(); b.header.difficulty = difficulty.clone();
pow::pow_size( pow::pow_size(
&mut cuckoo_miner, &mut cuckoo_miner,
&mut b.header, &mut b.header,
difficulty, difficulty,
consensus::TEST_SIZESHIFT as u32, consensus::TEST_SIZESHIFT as u32,
).unwrap(); ).unwrap();
let bhash = b.hash(); let bhash = b.hash();
chain.process_block(b, grin_chain::EASY_POW).unwrap(); chain.process_block(b, grin_chain::EASY_POW).unwrap();
// checking our new head // checking our new head
@ -87,38 +87,39 @@ fn mine_empty_chain() {
#[test] #[test]
fn mine_forks() { fn mine_forks() {
env_logger::init(); env_logger::init();
let mut rng = OsRng::new().unwrap(); let mut rng = OsRng::new().unwrap();
let chain = grin_chain::Chain::init(true, ".grin2".to_string(), Arc::new(NoopAdapter{})).unwrap(); let chain = grin_chain::Chain::init(true, ".grin2".to_string(), Arc::new(NoopAdapter {}))
.unwrap();
// mine and add a few blocks // mine and add a few blocks
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
let reward_key = secp::key::SecretKey::new(&secp, &mut rng); let reward_key = secp::key::SecretKey::new(&secp, &mut rng);
for n in 1..4 { for n in 1..4 {
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap(); let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60); b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
b.header.total_difficulty = Difficulty::from_num(2*n); b.header.total_difficulty = Difficulty::from_num(2 * n);
let bhash = b.hash(); let bhash = b.hash();
chain.process_block(b, grin_chain::SKIP_POW).unwrap(); chain.process_block(b, grin_chain::SKIP_POW).unwrap();
// checking our new head // checking our new head
thread::sleep(::std::time::Duration::from_millis(50)); thread::sleep(::std::time::Duration::from_millis(50));
let head = chain.head().unwrap(); let head = chain.head().unwrap();
assert_eq!(head.height, n as u64); assert_eq!(head.height, n as u64);
assert_eq!(head.last_block_h, bhash); assert_eq!(head.last_block_h, bhash);
assert_eq!(head.prev_block_h, prev.hash()); assert_eq!(head.prev_block_h, prev.hash());
// build another block with higher difficulty // build another block with higher difficulty
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap(); let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60); b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
b.header.total_difficulty = Difficulty::from_num(2*n+1); b.header.total_difficulty = Difficulty::from_num(2 * n + 1);
let bhash = b.hash(); let bhash = b.hash();
chain.process_block(b, grin_chain::SKIP_POW).unwrap(); chain.process_block(b, grin_chain::SKIP_POW).unwrap();
// checking head switch // checking head switch
thread::sleep(::std::time::Duration::from_millis(50)); thread::sleep(::std::time::Duration::from_millis(50));
let head = chain.head().unwrap(); let head = chain.head().unwrap();
assert_eq!(head.height, n as u64); assert_eq!(head.height, n as u64);
assert_eq!(head.last_block_h, bhash); assert_eq!(head.last_block_h, bhash);