Several simple fixes to reflect changes from core (Result instead of Option for errors, consensus module, etc.)

This commit is contained in:
Ignotus Peverell 2016-11-15 17:29:42 -08:00
parent 3c5e2b2958
commit 1d2b23a4a5
No known key found for this signature in database
GPG key ID: 99CD25F39F8F8211
6 changed files with 57 additions and 61 deletions

View file

@ -24,7 +24,6 @@
extern crate bitflags; extern crate bitflags;
extern crate byteorder; extern crate byteorder;
#[macro_use(try_o)]
extern crate grin_core as core; extern crate grin_core as core;
extern crate grin_store; extern crate grin_store;
extern crate secp256k1zkp as secp; extern crate secp256k1zkp as secp;

View file

@ -16,7 +16,9 @@
use secp; use secp;
use core::core::{Hash, BlockHeader, Block, Proof}; use core::consensus;
use core::core::hash::Hash;
use core::core::{BlockHeader, Block, Proof};
use core::pow; use core::pow;
use types; use types;
use types::{Tip, ChainStore}; use types::{Tip, ChainStore};
@ -51,14 +53,11 @@ pub enum Error {
StoreErr(types::Error), StoreErr(types::Error),
} }
pub fn process_block(b: &Block, store: &ChainStore, opts: Options) -> Option<Error> { pub fn process_block(b: &Block, store: &ChainStore, opts: Options) -> Result<(), Error> {
// TODO should just take a promise for a block with a full header so we don't // TODO should just take a promise for a block with a full header so we don't
// spend resources reading the full block when its header is invalid // spend resources reading the full block when its header is invalid
let head = match store.head() { let head = try!(store.head().map_err(&Error::StoreErr));
Ok(head) => head,
Err(err) => return Some(Error::StoreErr(err)),
};
let mut ctx = BlockContext { let mut ctx = BlockContext {
opts: opts, opts: opts,
store: store, store: store,
@ -66,12 +65,12 @@ pub fn process_block(b: &Block, store: &ChainStore, opts: Options) -> Option<Err
tip: None, tip: None,
}; };
try_o!(validate_header(&b, &mut ctx)); try!(validate_header(&b, &mut ctx));
try_o!(set_tip(&b.header, &mut ctx)); try!(set_tip(&b.header, &mut ctx));
try_o!(validate_block(b, &mut ctx)); try!(validate_block(b, &mut ctx));
try_o!(add_block(b, &mut ctx)); try!(add_block(b, &mut ctx));
try_o!(update_tips(&mut ctx)); try!(update_tips(&mut ctx));
None Ok(())
} }
// block processing pipeline // block processing pipeline
@ -87,46 +86,45 @@ pub fn process_block(b: &Block, store: &ChainStore, opts: Options) -> Option<Err
/// to make it as cheap as possible. The different validations are also /// to make it as cheap as possible. The different validations are also
/// arranged by order of cost to have as little DoS surface as possible. /// arranged by order of cost to have as little DoS surface as possible.
/// TODO actually require only the block header (with length information) /// TODO actually require only the block header (with length information)
fn validate_header(b: &Block, ctx: &mut BlockContext) -> Option<Error> { fn validate_header(b: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
let header = &b.header; let header = &b.header;
println!("{} {}", header.height, ctx.head.height);
if header.height > ctx.head.height + 1 { if header.height > ctx.head.height + 1 {
// TODO actually handle orphans and add them to a size-limited set // TODO actually handle orphans and add them to a size-limited set
return Some(Error::Unfit("orphan".to_string())); return Err(Error::Unfit("orphan".to_string()));
} }
// TODO check time wrt to chain time, refuse older than 100 blocks or too far // TODO check time wrt to chain time, refuse older than 100 blocks or too far
// in future // in future
// TODO maintain current difficulty // TODO maintain current difficulty
let diff_target = Proof(pow::MAX_TARGET); let diff_target = consensus::MAX_TARGET;
if ctx.opts.intersects(EASY_POW) { if ctx.opts.intersects(EASY_POW) {
if !pow::verify20(b, diff_target) { if !pow::verify20(b, diff_target) {
return Some(Error::InvalidPow); return Err(Error::InvalidPow);
} }
} else if !pow::verify(b, diff_target) { } else if !pow::verify(b, diff_target) {
return Some(Error::InvalidPow); return Err(Error::InvalidPow);
} }
None Ok(())
} }
fn set_tip(h: &BlockHeader, ctx: &mut BlockContext) -> Option<Error> { fn set_tip(h: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
ctx.tip = Some(ctx.head.clone()); ctx.tip = Some(ctx.head.clone());
None Ok(())
} }
fn validate_block(b: &Block, ctx: &mut BlockContext) -> Option<Error> { fn validate_block(b: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
// TODO check tx merkle tree // TODO check tx merkle tree
let curve = secp::Secp256k1::with_caps(secp::ContextFlag::Commit); let curve = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
try_o!(b.verify(&curve).err().map(&Error::InvalidBlockProof)); try!(b.verify(&curve).map_err(&Error::InvalidBlockProof));
None Ok(())
} }
fn add_block(b: &Block, ctx: &mut BlockContext) -> Option<Error> { fn add_block(b: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
ctx.tip = ctx.tip.as_ref().map(|t| t.append(b.hash())); ctx.tip = ctx.tip.as_ref().map(|t| t.append(b.hash()));
ctx.store.save_block(b).map(&Error::StoreErr) ctx.store.save_block(b).map_err(&Error::StoreErr)
} }
fn update_tips(ctx: &mut BlockContext) -> Option<Error> { fn update_tips(ctx: &mut BlockContext) -> Result<(), Error> {
ctx.store.save_head(ctx.tip.as_ref().unwrap()).map(&Error::StoreErr) ctx.store.save_head(ctx.tip.as_ref().unwrap()).map_err(&Error::StoreErr)
} }

View file

@ -46,20 +46,20 @@ impl ChainStore for ChainKVStore {
option_to_not_found(self.db.get_ser(&vec![HEAD_PREFIX])) option_to_not_found(self.db.get_ser(&vec![HEAD_PREFIX]))
} }
fn save_block(&self, b: &Block) -> Option<Error> { fn save_block(&self, b: &Block) -> Result<(), Error> {
self.db.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b).map(&to_store_err) self.db.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b).map_err(&to_store_err)
} }
fn save_head(&self, t: &Tip) -> Option<Error> { fn save_head(&self, t: &Tip) -> Result<(), Error> {
try_o!(self.save_tip(t)); try!(self.save_tip(t));
self.db.put_ser(&vec![HEAD_PREFIX], t).map(&to_store_err) self.db.put_ser(&vec![HEAD_PREFIX], t).map_err(&to_store_err)
} }
fn save_tip(&self, t: &Tip) -> Option<Error> { fn save_tip(&self, t: &Tip) -> Result<(), Error> {
let last_branch = t.lineage.last_branch(); let last_branch = t.lineage.last_branch();
let mut k = vec![TIP_PREFIX, SEP]; let mut k = vec![TIP_PREFIX, SEP];
k.write_u32::<BigEndian>(last_branch); k.write_u32::<BigEndian>(last_branch);
self.db.put_ser(&mut k, t).map(&to_store_err) self.db.put_ser(&mut k, t).map_err(&to_store_err)
} }
} }

View file

@ -14,7 +14,8 @@
//! Base types that the block chain pipeline requires. //! Base types that the block chain pipeline requires.
use core::core::{Hash, Block}; use core::core::hash::Hash;
use core::core::Block;
use core::ser; use core::ser;
/// The lineage of a fork, defined as a series of numbers. Each new branch gets /// The lineage of a fork, defined as a series of numbers. Each new branch gets
@ -40,12 +41,12 @@ impl Lineage {
/// Serialization for lineage, necessary to serialize fork tips. /// Serialization for lineage, necessary to serialize fork tips.
impl ser::Writeable for Lineage { impl ser::Writeable for Lineage {
fn write(&self, writer: &mut ser::Writer) -> Option<ser::Error> { fn write(&self, writer: &mut ser::Writer) -> Result<(), ser::Error> {
try_o!(writer.write_u32(self.0.len() as u32)); try!(writer.write_u32(self.0.len() as u32));
for num in &self.0 { for num in &self.0 {
try_o!(writer.write_u32(*num)); try!(writer.write_u32(*num));
} }
None Ok(())
} }
} }
/// Deserialization for lineage, necessary to deserialize fork tips. /// Deserialization for lineage, necessary to deserialize fork tips.
@ -99,10 +100,10 @@ impl Tip {
/// Serialization of a tip, required to save to datastore. /// Serialization of a tip, required to save to datastore.
impl ser::Writeable for Tip { impl ser::Writeable for Tip {
fn write(&self, writer: &mut ser::Writer) -> Option<ser::Error> { fn write(&self, writer: &mut ser::Writer) -> Result<(), ser::Error> {
try_o!(writer.write_u64(self.height)); try!(writer.write_u64(self.height));
try_o!(writer.write_fixed_bytes(&self.last_block_h)); try!(writer.write_fixed_bytes(&self.last_block_h));
try_o!(writer.write_fixed_bytes(&self.prev_block_h)); try!(writer.write_fixed_bytes(&self.prev_block_h));
self.lineage.write(writer) self.lineage.write(writer)
} }
} }
@ -137,11 +138,11 @@ pub trait ChainStore {
fn head(&self) -> Result<Tip, Error>; fn head(&self) -> Result<Tip, Error>;
/// Save the provided block in store /// Save the provided block in store
fn save_block(&self, b: &Block) -> Option<Error>; fn save_block(&self, b: &Block) -> Result<(), Error>;
/// Save the provided tip as the current head of our chain /// Save the provided tip as the current head of our chain
fn save_head(&self, t: &Tip) -> Option<Error>; fn save_head(&self, t: &Tip) -> Result<(), Error>;
/// Save the provided tip without setting it as head /// Save the provided tip without setting it as head
fn save_tip(&self, t: &Tip) -> Option<Error>; fn save_tip(&self, t: &Tip) -> Result<(), Error>;
} }

View file

@ -22,6 +22,7 @@ use rand::os::OsRng;
use grin_chain::types::*; use grin_chain::types::*;
use grin_core::pow; use grin_core::pow;
use grin_core::core; use grin_core::core;
use grin_core::consensus;
#[test] #[test]
fn mine_empty_chain() { fn mine_empty_chain() {
@ -31,11 +32,11 @@ fn mine_empty_chain() {
// save a genesis block // save a genesis block
let gen = grin_core::genesis::genesis(); let gen = grin_core::genesis::genesis();
assert!(store.save_block(&gen).is_none()); store.save_block(&gen).unwrap();
// setup a new head tip // setup a new head tip
let tip = Tip::new(gen.hash()); let tip = Tip::new(gen.hash());
assert!(store.save_head(&tip).is_none()); store.save_head(&tip).unwrap();
// mine and add a few blocks // mine and add a few blocks
let mut prev = gen; let mut prev = gen;
@ -46,13 +47,10 @@ fn mine_empty_chain() {
let mut b = core::Block::new(prev.header, vec![], reward_key).unwrap(); let mut b = core::Block::new(prev.header, vec![], reward_key).unwrap();
println!("=> {} {:?}", b.header.height, b.verify(&curve)); println!("=> {} {:?}", b.header.height, b.verify(&curve));
let (proof, nonce) = pow::pow20(&b, core::Proof(pow::MAX_TARGET)).unwrap(); let (proof, nonce) = pow::pow20(&b, consensus::MAX_TARGET).unwrap();
b.header.pow = proof; b.header.pow = proof;
b.header.nonce = nonce; b.header.nonce = nonce;
if let Some(e) = grin_chain::pipe::process_block(&b, &store, grin_chain::pipe::EASY_POW) { grin_chain::pipe::process_block(&b, &store, grin_chain::pipe::EASY_POW).unwrap();
println!("err: {:?}", e);
panic!();
}
// checking our new head // checking our new head
let head = store.head().unwrap(); let head = store.head().unwrap();

View file

@ -63,18 +63,18 @@ impl Store {
} }
/// Writes a single key/value pair to the db /// Writes a single key/value pair to the db
pub fn put(&self, key: &[u8], value: Vec<u8>) -> Option<Error> { pub fn put(&self, key: &[u8], value: Vec<u8>) -> Result<(), Error> {
let db = self.rdb.write().unwrap(); let db = self.rdb.write().unwrap();
db.put(key, &value[..]).err().map(Error::RocksDbErr) db.put(key, &value[..]).map_err(Error::RocksDbErr)
} }
/// Writes a single key and its `Writeable` value to the db. Encapsulates /// Writes a single key and its `Writeable` value to the db. Encapsulates
/// serialization. /// serialization.
pub fn put_ser(&self, key: &[u8], value: &ser::Writeable) -> Option<Error> { pub fn put_ser(&self, key: &[u8], value: &ser::Writeable) -> Result<(), Error> {
let ser_value = ser::ser_vec(value); let ser_value = ser::ser_vec(value);
match ser_value { match ser_value {
Ok(data) => self.put(key, data), Ok(data) => self.put(key, data),
Err(err) => Some(Error::SerErr(err)), Err(err) => Err(Error::SerErr(err)),
} }
} }
@ -98,8 +98,8 @@ impl Store {
} }
/// Deletes a key/value pair from the db /// Deletes a key/value pair from the db
pub fn delete(&self, key: &[u8]) -> Option<Error> { pub fn delete(&self, key: &[u8]) -> Result<(), Error> {
let db = self.rdb.write().unwrap(); let db = self.rdb.write().unwrap();
db.delete(key).err().map(Error::RocksDbErr) db.delete(key).map_err(Error::RocksDbErr)
} }
} }