mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
rustfmt (new version)
This commit is contained in:
parent
3c3b12b13a
commit
22bff54f66
5 changed files with 144 additions and 110 deletions
|
@ -53,7 +53,7 @@ pub struct Chain {
|
|||
|
||||
head: Arc<Mutex<Tip>>,
|
||||
block_process_lock: Arc<Mutex<bool>>,
|
||||
orphans: Arc<Mutex<VecDeque<Block>>>,
|
||||
orphans: Arc<Mutex<VecDeque<Block>>>,
|
||||
|
||||
test_mode: bool,
|
||||
}
|
||||
|
@ -67,10 +67,11 @@ impl Chain {
|
|||
/// on the current chain head to make sure it exists and creates one based
|
||||
/// on
|
||||
/// the genesis block if necessary.
|
||||
pub fn init(test_mode: bool,
|
||||
db_root: String,
|
||||
adapter: Arc<ChainAdapter>)
|
||||
-> Result<Chain, Error> {
|
||||
pub fn init(
|
||||
test_mode: bool,
|
||||
db_root: String,
|
||||
adapter: Arc<ChainAdapter>,
|
||||
) -> Result<Chain, Error> {
|
||||
let chain_store = store::ChainKVStore::new(db_root)?;
|
||||
|
||||
// check if we have a head in store, otherwise the genesis block is it
|
||||
|
@ -105,7 +106,7 @@ impl Chain {
|
|||
adapter: adapter,
|
||||
head: Arc::new(Mutex::new(head)),
|
||||
block_process_lock: Arc::new(Mutex::new(true)),
|
||||
orphans: Arc::new(Mutex::new(VecDeque::with_capacity(MAX_ORPHANS+1))),
|
||||
orphans: Arc::new(Mutex::new(VecDeque::with_capacity(MAX_ORPHANS + 1))),
|
||||
test_mode: test_mode,
|
||||
})
|
||||
}
|
||||
|
@ -120,32 +121,33 @@ impl Chain {
|
|||
|
||||
let res = pipe::process_block(&b, ctx);
|
||||
|
||||
match res {
|
||||
Ok(Some(ref tip)) => {
|
||||
// block got accepted and extended the head, updating our head
|
||||
let chain_head = self.head.clone();
|
||||
let mut head = chain_head.lock().unwrap();
|
||||
*head = tip.clone();
|
||||
match res {
|
||||
Ok(Some(ref tip)) => {
|
||||
// block got accepted and extended the head, updating our head
|
||||
let chain_head = self.head.clone();
|
||||
let mut head = chain_head.lock().unwrap();
|
||||
*head = tip.clone();
|
||||
|
||||
self.check_orphans();
|
||||
}
|
||||
Err(Error::Orphan) => {
|
||||
let mut orphans = self.orphans.lock().unwrap();
|
||||
orphans.push_front(b);
|
||||
orphans.truncate(MAX_ORPHANS);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.check_orphans();
|
||||
}
|
||||
Err(Error::Orphan) => {
|
||||
let mut orphans = self.orphans.lock().unwrap();
|
||||
orphans.push_front(b);
|
||||
orphans.truncate(MAX_ORPHANS);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// Attempt to add a new header to the header chain. Only necessary during
|
||||
/// sync.
|
||||
pub fn process_block_header(&self,
|
||||
bh: &BlockHeader,
|
||||
opts: Options)
|
||||
-> Result<Option<Tip>, Error> {
|
||||
pub fn process_block_header(
|
||||
&self,
|
||||
bh: &BlockHeader,
|
||||
opts: Options,
|
||||
) -> Result<Option<Tip>, Error> {
|
||||
|
||||
let head = self.store.get_header_head().map_err(&Error::StoreErr)?;
|
||||
let ctx = self.ctx_from_head(head, opts);
|
||||
|
@ -167,32 +169,32 @@ impl Chain {
|
|||
}
|
||||
}
|
||||
|
||||
/// Pop orphans out of the queue and check if we can now accept them.
|
||||
fn check_orphans(&self) {
|
||||
// first check how many we have to retry, unfort. we can't extend the lock
|
||||
// in the loop as it needs to be freed before going in process_block
|
||||
let mut orphan_count = 0;
|
||||
{
|
||||
let orphans = self.orphans.lock().unwrap();
|
||||
orphan_count = orphans.len();
|
||||
}
|
||||
/// Pop orphans out of the queue and check if we can now accept them.
|
||||
fn check_orphans(&self) {
|
||||
// first check how many we have to retry, unfort. we can't extend the lock
|
||||
// in the loop as it needs to be freed before going in process_block
|
||||
let mut orphan_count = 0;
|
||||
{
|
||||
let orphans = self.orphans.lock().unwrap();
|
||||
orphan_count = orphans.len();
|
||||
}
|
||||
|
||||
// pop each orphan and retry, if still orphaned, will be pushed again
|
||||
// pop each orphan and retry, if still orphaned, will be pushed again
|
||||
let mut opts = NONE;
|
||||
if self.test_mode {
|
||||
opts = opts | EASY_POW;
|
||||
}
|
||||
for _ in 0..orphan_count {
|
||||
let mut popped = None;
|
||||
{
|
||||
let mut orphans = self.orphans.lock().unwrap();
|
||||
popped = orphans.pop_back();
|
||||
}
|
||||
if let Some(orphan) = popped {
|
||||
self.process_block(orphan, opts);
|
||||
}
|
||||
}
|
||||
}
|
||||
for _ in 0..orphan_count {
|
||||
let mut popped = None;
|
||||
{
|
||||
let mut orphans = self.orphans.lock().unwrap();
|
||||
popped = orphans.pop_back();
|
||||
}
|
||||
if let Some(orphan) = popped {
|
||||
self.process_block(orphan, opts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets an unspent output from its commitment. With return None if the
|
||||
/// output
|
||||
|
@ -252,7 +254,9 @@ impl Chain {
|
|||
|
||||
/// Gets the block header at the provided height
|
||||
pub fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> {
|
||||
self.store.get_header_by_height(height).map_err(&Error::StoreErr)
|
||||
self.store.get_header_by_height(height).map_err(
|
||||
&Error::StoreErr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Get the tip of the header chain
|
||||
|
|
|
@ -46,11 +46,13 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
|
|||
// TODO should just take a promise for a block with a full header so we don't
|
||||
// spend resources reading the full block when its header is invalid
|
||||
|
||||
info!("Starting validation pipeline for block {} at {} with {} inputs and {} outputs.",
|
||||
b.hash(),
|
||||
b.header.height,
|
||||
b.inputs.len(),
|
||||
b.outputs.len());
|
||||
info!(
|
||||
"Starting validation pipeline for block {} at {} with {} inputs and {} outputs.",
|
||||
b.hash(),
|
||||
b.header.height,
|
||||
b.inputs.len(),
|
||||
b.outputs.len()
|
||||
);
|
||||
check_known(b.hash(), &mut ctx)?;
|
||||
|
||||
if !ctx.opts.intersects(SYNC) {
|
||||
|
@ -58,9 +60,11 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
|
|||
validate_header(&b.header, &mut ctx)?;
|
||||
}
|
||||
validate_block(b, &mut ctx)?;
|
||||
debug!("Block at {} with hash {} is valid, going to save and append.",
|
||||
b.header.height,
|
||||
b.hash());
|
||||
debug!(
|
||||
"Block at {} with hash {} is valid, going to save and append.",
|
||||
b.header.height,
|
||||
b.hash()
|
||||
);
|
||||
|
||||
ctx.lock.lock();
|
||||
add_block(b, &mut ctx)?;
|
||||
|
@ -69,9 +73,11 @@ pub fn process_block(b: &Block, mut ctx: BlockContext) -> Result<Option<Tip>, Er
|
|||
|
||||
pub fn process_block_header(bh: &BlockHeader, mut ctx: BlockContext) -> Result<Option<Tip>, Error> {
|
||||
|
||||
info!("Starting validation pipeline for block header {} at {}.",
|
||||
bh.hash(),
|
||||
bh.height);
|
||||
info!(
|
||||
"Starting validation pipeline for block header {} at {}.",
|
||||
bh.hash(),
|
||||
bh.height
|
||||
);
|
||||
check_known(bh.hash(), &mut ctx)?;
|
||||
validate_header(&bh, &mut ctx)?;
|
||||
add_block_header(bh, &mut ctx)?;
|
||||
|
@ -106,7 +112,9 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
return Err(Error::Orphan);
|
||||
}
|
||||
|
||||
let prev = try!(ctx.store.get_block_header(&header.previous).map_err(&Error::StoreErr));
|
||||
let prev = try!(ctx.store.get_block_header(&header.previous).map_err(
|
||||
&Error::StoreErr,
|
||||
));
|
||||
|
||||
if header.height != prev.height + 1 {
|
||||
return Err(Error::InvalidBlockHeight);
|
||||
|
@ -117,7 +125,8 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
return Err(Error::InvalidBlockTime);
|
||||
}
|
||||
if header.timestamp >
|
||||
time::now() + time::Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64)) {
|
||||
time::now() + time::Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))
|
||||
{
|
||||
// refuse blocks more than 12 blocks intervals in future (as in bitcoin)
|
||||
// TODO add warning in p2p code if local time is too different from peers
|
||||
return Err(Error::InvalidBlockTime);
|
||||
|
@ -131,8 +140,9 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
}
|
||||
|
||||
let diff_iter = store::DifficultyIter::from(header.previous, ctx.store.clone());
|
||||
let difficulty =
|
||||
consensus::next_difficulty(diff_iter).map_err(|e| Error::Other(e.to_string()))?;
|
||||
let difficulty = consensus::next_difficulty(diff_iter).map_err(|e| {
|
||||
Error::Other(e.to_string())
|
||||
})?;
|
||||
if header.difficulty < difficulty {
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
|
@ -224,9 +234,11 @@ fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option
|
|||
ctx.store.save_header_head(&tip).map_err(&Error::StoreErr)?;
|
||||
|
||||
ctx.head = tip.clone();
|
||||
info!("Updated block header head to {} at {}.",
|
||||
bh.hash(),
|
||||
bh.height);
|
||||
info!(
|
||||
"Updated block header head to {} at {}.",
|
||||
bh.hash(),
|
||||
bh.height
|
||||
);
|
||||
Ok(Some(tip))
|
||||
} else {
|
||||
Ok(None)
|
||||
|
|
|
@ -82,7 +82,9 @@ impl ChainStore for ChainKVStore {
|
|||
}
|
||||
|
||||
fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, &mut h.to_vec())))
|
||||
option_to_not_found(self.db.get_ser(
|
||||
&to_key(BLOCK_HEADER_PREFIX, &mut h.to_vec()),
|
||||
))
|
||||
}
|
||||
|
||||
fn check_block_exists(&self, h: &Hash) -> Result<bool, Error> {
|
||||
|
@ -94,20 +96,27 @@ impl ChainStore for ChainKVStore {
|
|||
let mut batch = self.db
|
||||
.batch()
|
||||
.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)?
|
||||
.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut b.hash().to_vec())[..],
|
||||
&b.header)?;
|
||||
.put_ser(
|
||||
&to_key(BLOCK_HEADER_PREFIX, &mut b.hash().to_vec())[..],
|
||||
&b.header,
|
||||
)?;
|
||||
|
||||
// saving the full output under its hash, as well as a commitment to hash index
|
||||
for out in &b.outputs {
|
||||
let mut out_bytes = out.commit.as_ref().to_vec();
|
||||
batch = batch.put_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut out_bytes)[..], out)?;
|
||||
batch = batch.put_ser(
|
||||
&to_key(OUTPUT_COMMIT_PREFIX, &mut out_bytes)[..],
|
||||
out,
|
||||
)?;
|
||||
}
|
||||
batch.write()
|
||||
}
|
||||
|
||||
fn save_block_header(&self, bh: &BlockHeader) -> Result<(), Error> {
|
||||
self.db.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut bh.hash().to_vec())[..],
|
||||
bh)
|
||||
self.db.put_ser(
|
||||
&to_key(BLOCK_HEADER_PREFIX, &mut bh.hash().to_vec())[..],
|
||||
bh,
|
||||
)
|
||||
}
|
||||
|
||||
fn get_header_by_height(&self, height: u64) -> Result<BlockHeader, Error> {
|
||||
|
@ -115,17 +124,24 @@ impl ChainStore for ChainKVStore {
|
|||
}
|
||||
|
||||
fn get_output_by_commit(&self, commit: &Commitment) -> Result<Output, Error> {
|
||||
option_to_not_found(self.db
|
||||
.get_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut commit.as_ref().to_vec())))
|
||||
option_to_not_found(self.db.get_ser(&to_key(
|
||||
OUTPUT_COMMIT_PREFIX,
|
||||
&mut commit.as_ref().to_vec(),
|
||||
)))
|
||||
}
|
||||
|
||||
fn has_output_commit(&self, commit: &Commitment) -> Result<Hash, Error> {
|
||||
option_to_not_found(self.db
|
||||
.get_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut commit.as_ref().to_vec())))
|
||||
option_to_not_found(self.db.get_ser(&to_key(
|
||||
OUTPUT_COMMIT_PREFIX,
|
||||
&mut commit.as_ref().to_vec(),
|
||||
)))
|
||||
}
|
||||
|
||||
fn setup_height(&self, bh: &BlockHeader) -> Result<(), Error> {
|
||||
self.db.put_ser(&u64_to_key(HEADER_HEIGHT_PREFIX, bh.height), bh)?;
|
||||
self.db.put_ser(
|
||||
&u64_to_key(HEADER_HEIGHT_PREFIX, bh.height),
|
||||
bh,
|
||||
)?;
|
||||
|
||||
let mut prev_h = bh.previous;
|
||||
let mut prev_height = bh.height - 1;
|
||||
|
@ -133,9 +149,10 @@ impl ChainStore for ChainKVStore {
|
|||
let prev = self.get_header_by_height(prev_height)?;
|
||||
if prev.hash() != prev_h {
|
||||
let real_prev = self.get_block_header(&prev_h)?;
|
||||
self.db
|
||||
.put_ser(&u64_to_key(HEADER_HEIGHT_PREFIX, real_prev.height),
|
||||
&real_prev);
|
||||
self.db.put_ser(
|
||||
&u64_to_key(HEADER_HEIGHT_PREFIX, real_prev.height),
|
||||
&real_prev,
|
||||
);
|
||||
prev_h = real_prev.previous;
|
||||
prev_height = real_prev.height - 1;
|
||||
} else {
|
||||
|
|
|
@ -41,8 +41,8 @@ bitflags! {
|
|||
pub enum Error {
|
||||
/// The block doesn't fit anywhere in our chain
|
||||
Unfit(String),
|
||||
/// Special case of orphan blocks
|
||||
Orphan,
|
||||
/// Special case of orphan blocks
|
||||
Orphan,
|
||||
/// Difficulty is too low either compared to ours or the block PoW hash
|
||||
DifficultyTooLow,
|
||||
/// Addition of difficulties on all previous block is wrong
|
||||
|
@ -199,7 +199,7 @@ pub trait ChainAdapter {
|
|||
fn block_accepted(&self, b: &Block);
|
||||
}
|
||||
|
||||
pub struct NoopAdapter { }
|
||||
pub struct NoopAdapter {}
|
||||
impl ChainAdapter for NoopAdapter {
|
||||
fn block_accepted(&self, b: &Block) {}
|
||||
}
|
||||
|
|
|
@ -40,28 +40,28 @@ use grin_core::pow::MiningWorker;
|
|||
|
||||
#[test]
|
||||
fn mine_empty_chain() {
|
||||
env_logger::init();
|
||||
env_logger::init();
|
||||
let mut rng = OsRng::new().unwrap();
|
||||
let chain = grin_chain::Chain::init(true, ".grin".to_string(), Arc::new(NoopAdapter{})).unwrap();
|
||||
let chain = grin_chain::Chain::init(true, ".grin".to_string(), Arc::new(NoopAdapter {}))
|
||||
.unwrap();
|
||||
|
||||
// mine and add a few blocks
|
||||
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
|
||||
let reward_key = secp::key::SecretKey::new(&secp, &mut rng);
|
||||
|
||||
let server_config = ServerConfig::default();
|
||||
let mut miner_config = grin::MinerConfig{
|
||||
enable_mining: true,
|
||||
burn_reward: true,
|
||||
..Default::default()
|
||||
};
|
||||
let mut miner_config = grin::MinerConfig {
|
||||
enable_mining: true,
|
||||
burn_reward: true,
|
||||
..Default::default()
|
||||
};
|
||||
miner_config.cuckoo_miner_plugin_dir = Some(String::from("../target/debug/deps"));
|
||||
|
||||
let mut cuckoo_miner = PluginMiner::new(consensus::EASINESS,
|
||||
consensus::TEST_SIZESHIFT as u32 );
|
||||
cuckoo_miner.init(miner_config ,server_config);
|
||||
|
||||
let mut cuckoo_miner = PluginMiner::new(consensus::EASINESS, consensus::TEST_SIZESHIFT as u32);
|
||||
cuckoo_miner.init(miner_config, server_config);
|
||||
|
||||
for n in 1..4 {
|
||||
let prev = chain.head_header().unwrap();
|
||||
let prev = chain.head_header().unwrap();
|
||||
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
|
@ -69,13 +69,13 @@ fn mine_empty_chain() {
|
|||
b.header.difficulty = difficulty.clone();
|
||||
|
||||
pow::pow_size(
|
||||
&mut cuckoo_miner,
|
||||
&mut b.header,
|
||||
difficulty,
|
||||
consensus::TEST_SIZESHIFT as u32,
|
||||
).unwrap();
|
||||
&mut cuckoo_miner,
|
||||
&mut b.header,
|
||||
difficulty,
|
||||
consensus::TEST_SIZESHIFT as u32,
|
||||
).unwrap();
|
||||
|
||||
let bhash = b.hash();
|
||||
let bhash = b.hash();
|
||||
chain.process_block(b, grin_chain::EASY_POW).unwrap();
|
||||
|
||||
// checking our new head
|
||||
|
@ -87,38 +87,39 @@ fn mine_empty_chain() {
|
|||
|
||||
#[test]
|
||||
fn mine_forks() {
|
||||
env_logger::init();
|
||||
env_logger::init();
|
||||
let mut rng = OsRng::new().unwrap();
|
||||
let chain = grin_chain::Chain::init(true, ".grin2".to_string(), Arc::new(NoopAdapter{})).unwrap();
|
||||
let chain = grin_chain::Chain::init(true, ".grin2".to_string(), Arc::new(NoopAdapter {}))
|
||||
.unwrap();
|
||||
|
||||
// mine and add a few blocks
|
||||
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
|
||||
let reward_key = secp::key::SecretKey::new(&secp, &mut rng);
|
||||
|
||||
for n in 1..4 {
|
||||
let prev = chain.head_header().unwrap();
|
||||
let prev = chain.head_header().unwrap();
|
||||
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
b.header.total_difficulty = Difficulty::from_num(2*n);
|
||||
let bhash = b.hash();
|
||||
b.header.total_difficulty = Difficulty::from_num(2 * n);
|
||||
let bhash = b.hash();
|
||||
chain.process_block(b, grin_chain::SKIP_POW).unwrap();
|
||||
|
||||
// checking our new head
|
||||
thread::sleep(::std::time::Duration::from_millis(50));
|
||||
thread::sleep(::std::time::Duration::from_millis(50));
|
||||
let head = chain.head().unwrap();
|
||||
assert_eq!(head.height, n as u64);
|
||||
assert_eq!(head.last_block_h, bhash);
|
||||
assert_eq!(head.prev_block_h, prev.hash());
|
||||
|
||||
// build another block with higher difficulty
|
||||
// build another block with higher difficulty
|
||||
let mut b = core::Block::new(&prev, vec![], reward_key).unwrap();
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
b.header.total_difficulty = Difficulty::from_num(2*n+1);
|
||||
let bhash = b.hash();
|
||||
b.header.total_difficulty = Difficulty::from_num(2 * n + 1);
|
||||
let bhash = b.hash();
|
||||
chain.process_block(b, grin_chain::SKIP_POW).unwrap();
|
||||
|
||||
// checking head switch
|
||||
thread::sleep(::std::time::Duration::from_millis(50));
|
||||
thread::sleep(::std::time::Duration::from_millis(50));
|
||||
let head = chain.head().unwrap();
|
||||
assert_eq!(head.height, n as u64);
|
||||
assert_eq!(head.last_block_h, bhash);
|
||||
|
|
Loading…
Reference in a new issue