Update bitflags to ^0.1 (#682)

* Removed unused crates
* Add listconnectedpeers in grin client
* Update bitflags to ^0.1 globally
This commit is contained in:
Quentin Le Sceller 2018-02-05 14:43:54 -05:00 committed by Ignotus Peverell
parent 1f7dd4eb73
commit 8a7eb94759
22 changed files with 109 additions and 109 deletions

View file

@ -24,7 +24,7 @@ use serde::Serialize;
use serde_json; use serde_json;
use chain; use chain;
use core::core::{OutputIdentifier, Transaction, DEFAULT_OUTPUT, COINBASE_OUTPUT}; use core::core::{OutputIdentifier, Transaction, OutputFeatures};
use core::core::hash::{Hash, Hashed}; use core::core::hash::{Hash, Hashed};
use core::ser; use core::ser;
use pool; use pool;
@ -68,8 +68,8 @@ impl UtxoHandler {
// to compare against the hash in the output MMR. // to compare against the hash in the output MMR.
// For now we can just try both (but this probably needs to be part of the api params) // For now we can just try both (but this probably needs to be part of the api params)
let outputs = [ let outputs = [
OutputIdentifier::new(DEFAULT_OUTPUT, &commit), OutputIdentifier::new(OutputFeatures::DEFAULT_OUTPUT, &commit),
OutputIdentifier::new(COINBASE_OUTPUT, &commit) OutputIdentifier::new(OutputFeatures::COINBASE_OUTPUT, &commit)
]; ];
for x in outputs.iter() { for x in outputs.iter() {

View file

@ -244,7 +244,7 @@ impl OutputPrintable {
include_proof: bool, include_proof: bool,
) -> OutputPrintable { ) -> OutputPrintable {
let output_type = let output_type =
if output.features.contains(core::transaction::COINBASE_OUTPUT) { if output.features.contains(core::transaction::OutputFeatures::COINBASE_OUTPUT) {
OutputType::Coinbase OutputType::Coinbase
} else { } else {
OutputType::Transaction OutputType::Transaction

View file

@ -5,7 +5,7 @@ authors = ["Ignotus Peverell <igno.peverell@protonmail.com>"]
workspace = ".." workspace = ".."
[dependencies] [dependencies]
bitflags = "^0.7.0" bitflags = "^1.0"
byteorder = "^0.5" byteorder = "^0.5"
slog = { version = "^2.0.12", features = ["max_level_trace", "release_max_level_trace"] } slog = { version = "^2.0.12", features = ["max_level_trace", "release_max_level_trace"] }
serde = "~1.0.8" serde = "~1.0.8"

View file

@ -237,7 +237,7 @@ pub fn process_block(&self, b: Block, opts: Options)
} }
// notifying other parts of the system of the update // notifying other parts of the system of the update
if !opts.contains(SYNC) { if !opts.contains(Options::SYNC) {
// broadcast the block // broadcast the block
let adapter = self.adapter.clone(); let adapter = self.adapter.clone();
adapter.block_accepted(&b, opts); adapter.block_accepted(&b, opts);
@ -254,7 +254,7 @@ pub fn process_block(&self, b: Block, opts: Options)
// or less relevant blocks somehow. // or less relevant blocks somehow.
// We should also probably consider banning nodes that send us really old blocks. // We should also probably consider banning nodes that send us really old blocks.
// //
if !opts.contains(SYNC) { if !opts.contains(Options::SYNC) {
// broadcast the block // broadcast the block
let adapter = self.adapter.clone(); let adapter = self.adapter.clone();
adapter.block_accepted(&b, opts); adapter.block_accepted(&b, opts);

View file

@ -43,4 +43,4 @@ pub mod types;
// Re-export the base interface // Re-export the base interface
pub use chain::Chain; pub use chain::Chain;
pub use types::{ChainAdapter, ChainStore, Error, Options, Tip, NONE, SKIP_POW, SYNC, MINE}; pub use types::{ChainAdapter, ChainStore, Error, Options, Tip};

View file

@ -211,7 +211,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
return Err(Error::InvalidBlockTime); return Err(Error::InvalidBlockTime);
} }
if !ctx.opts.contains(SKIP_POW) { if !ctx.opts.contains(Options::SKIP_POW) {
let n = global::sizeshift() as u32; let n = global::sizeshift() as u32;
if !(ctx.pow_verifier)(header, n) { if !(ctx.pow_verifier)(header, n) {
error!(LOGGER, "pipe: validate_header failed for cuckoo shift size {}", n); error!(LOGGER, "pipe: validate_header failed for cuckoo shift size {}", n);
@ -242,7 +242,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
return Err(Error::InvalidBlockTime); return Err(Error::InvalidBlockTime);
} }
if !ctx.opts.contains(SKIP_POW) { if !ctx.opts.contains(Options::SKIP_POW) {
// verify the proof of work and related parameters // verify the proof of work and related parameters
// explicit check to ensure we are not below the minimum difficulty // explicit check to ensure we are not below the minimum difficulty
@ -362,7 +362,7 @@ fn update_head(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, Error>
// in sync mode, only update the "body chain", otherwise update both the // in sync mode, only update the "body chain", otherwise update both the
// "header chain" and "body chain", updating the header chain in sync resets // "header chain" and "body chain", updating the header chain in sync resets
// all additional "future" headers we've received // all additional "future" headers we've received
if ctx.opts.contains(SYNC) { if ctx.opts.contains(Options::SYNC) {
ctx.store ctx.store
.save_body_head(&tip) .save_body_head(&tip)
.map_err(|e| Error::StoreErr(e, "pipe save body".to_owned()))?; .map_err(|e| Error::StoreErr(e, "pipe save body".to_owned()))?;

View file

@ -20,7 +20,7 @@ use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use core::core::{Block, SumCommit, Input, Output, OutputIdentifier, TxKernel, COINBASE_OUTPUT}; use core::core::{Block, SumCommit, Input, Output, OutputIdentifier, TxKernel, OutputFeatures};
use core::core::pmmr::{HashSum, NoSum, Summable, PMMR}; use core::core::pmmr::{HashSum, NoSum, Summable, PMMR};
use core::core::hash::Hashed; use core::core::hash::Hashed;
use grin_store; use grin_store;
@ -134,7 +134,7 @@ impl SumTrees {
// it claims to be spending, and that it is coinbase or non-coinbase. // it claims to be spending, and that it is coinbase or non-coinbase.
// If we are spending a coinbase output then go find the block // If we are spending a coinbase output then go find the block
// and check the coinbase maturity rule is being met. // and check the coinbase maturity rule is being met.
if input.features.contains(COINBASE_OUTPUT) { if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
let block_hash = &input.out_block let block_hash = &input.out_block
.expect("input spending coinbase output must have a block hash"); .expect("input spending coinbase output must have a block hash");
let block = self.commit_index.get_block(&block_hash)?; let block = self.commit_index.get_block(&block_hash)?;
@ -280,7 +280,7 @@ impl<'a> Extension<'a> {
// yet and it will be needed to calculate that hash. to work around this, // yet and it will be needed to calculate that hash. to work around this,
// we insert coinbase outputs first to add at least one output of padding // we insert coinbase outputs first to add at least one output of padding
for out in &b.outputs { for out in &b.outputs {
if out.features.contains(COINBASE_OUTPUT) { if out.features.contains(OutputFeatures::COINBASE_OUTPUT) {
self.apply_output(out)?; self.apply_output(out)?;
} }
} }
@ -293,7 +293,7 @@ impl<'a> Extension<'a> {
// now all regular, non coinbase outputs // now all regular, non coinbase outputs
for out in &b.outputs { for out in &b.outputs {
if !out.features.contains(COINBASE_OUTPUT) { if !out.features.contains(OutputFeatures::COINBASE_OUTPUT) {
self.apply_output(out)?; self.apply_output(out)?;
} }
} }
@ -336,7 +336,7 @@ impl<'a> Extension<'a> {
// it claims to be spending, and it is coinbase or non-coinbase. // it claims to be spending, and it is coinbase or non-coinbase.
// If we are spending a coinbase output then go find the block // If we are spending a coinbase output then go find the block
// and check the coinbase maturity rule is being met. // and check the coinbase maturity rule is being met.
if input.features.contains(COINBASE_OUTPUT) { if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
let block_hash = &input.out_block let block_hash = &input.out_block
.expect("input spending coinbase output must have a block hash"); .expect("input spending coinbase output must have a block hash");
let block = self.commit_index.get_block(&block_hash)?; let block = self.commit_index.get_block(&block_hash)?;

View file

@ -27,15 +27,15 @@ use grin_store;
bitflags! { bitflags! {
/// Options for block validation /// Options for block validation
pub flags Options: u32 { pub struct Options: u32 {
/// No flags /// No flags
const NONE = 0b00000000, const NONE = 0b00000000;
/// Runs without checking the Proof of Work, mostly to make testing easier. /// Runs without checking the Proof of Work, mostly to make testing easier.
const SKIP_POW = 0b00000001, const SKIP_POW = 0b00000001;
/// Adds block while in syncing mode. /// Adds block while in syncing mode.
const SYNC = 0b00000010, const SYNC = 0b00000010;
/// Block validation on a block we mined ourselves /// Block validation on a block we mined ourselves
const MINE = 0b00000100, const MINE = 0b00000100;
} }
} }

View file

@ -96,7 +96,7 @@ fn mine_empty_chain() {
).unwrap(); ).unwrap();
let bhash = b.hash(); let bhash = b.hash();
chain.process_block(b, chain::MINE).unwrap(); chain.process_block(b, chain::Options::MINE).unwrap();
// checking our new head // checking our new head
let head = chain.head().unwrap(); let head = chain.head().unwrap();
@ -128,7 +128,7 @@ fn mine_forks() {
// add a first block to not fork genesis // add a first block to not fork genesis
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
let b = prepare_block(&kc, &prev, &chain, 2); let b = prepare_block(&kc, &prev, &chain, 2);
chain.process_block(b, chain::SKIP_POW).unwrap(); chain.process_block(b, chain::Options::SKIP_POW).unwrap();
// mine and add a few blocks // mine and add a few blocks
@ -142,7 +142,7 @@ fn mine_forks() {
// process the first block to extend the chain // process the first block to extend the chain
let bhash = b1.hash(); let bhash = b1.hash();
chain.process_block(b1, chain::SKIP_POW).unwrap(); chain.process_block(b1, chain::Options::SKIP_POW).unwrap();
// checking our new head // checking our new head
let head = chain.head().unwrap(); let head = chain.head().unwrap();
@ -152,7 +152,7 @@ fn mine_forks() {
// process the 2nd block to build a fork with more work // process the 2nd block to build a fork with more work
let bhash = b2.hash(); let bhash = b2.hash();
chain.process_block(b2, chain::SKIP_POW).unwrap(); chain.process_block(b2, chain::Options::SKIP_POW).unwrap();
// checking head switch // checking head switch
let head = chain.head().unwrap(); let head = chain.head().unwrap();
@ -171,7 +171,7 @@ fn mine_losing_fork() {
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
let b1 = prepare_block(&kc, &prev, &chain, 2); let b1 = prepare_block(&kc, &prev, &chain, 2);
let b1head = b1.header.clone(); let b1head = b1.header.clone();
chain.process_block(b1, chain::SKIP_POW).unwrap(); chain.process_block(b1, chain::Options::SKIP_POW).unwrap();
// prepare the 2 successor, sibling blocks, one with lower diff // prepare the 2 successor, sibling blocks, one with lower diff
let b2 = prepare_block(&kc, &b1head, &chain, 4); let b2 = prepare_block(&kc, &b1head, &chain, 4);
@ -180,14 +180,14 @@ fn mine_losing_fork() {
// add higher difficulty first, prepare its successor, then fork // add higher difficulty first, prepare its successor, then fork
// with lower diff // with lower diff
chain.process_block(b2, chain::SKIP_POW).unwrap(); chain.process_block(b2, chain::Options::SKIP_POW).unwrap();
assert_eq!(chain.head_header().unwrap().hash(), b2head.hash()); assert_eq!(chain.head_header().unwrap().hash(), b2head.hash());
let b3 = prepare_block(&kc, &b2head, &chain, 5); let b3 = prepare_block(&kc, &b2head, &chain, 5);
chain.process_block(bfork, chain::SKIP_POW).unwrap(); chain.process_block(bfork, chain::Options::SKIP_POW).unwrap();
// adding the successor // adding the successor
let b3head = b3.header.clone(); let b3head = b3.header.clone();
chain.process_block(b3, chain::SKIP_POW).unwrap(); chain.process_block(b3, chain::Options::SKIP_POW).unwrap();
assert_eq!(chain.head_header().unwrap().hash(), b3head.hash()); assert_eq!(chain.head_header().unwrap().hash(), b3head.hash());
} }
@ -209,10 +209,10 @@ fn longer_fork() {
if n < 5 { if n < 5 {
let b_fork = b.clone(); let b_fork = b.clone();
chain_fork.process_block(b_fork, chain::SKIP_POW).unwrap(); chain_fork.process_block(b_fork, chain::Options::SKIP_POW).unwrap();
} }
chain.process_block(b, chain::SKIP_POW).unwrap(); chain.process_block(b, chain::Options::SKIP_POW).unwrap();
prev = bh; prev = bh;
} }
@ -229,9 +229,9 @@ fn longer_fork() {
let bh_fork = b_fork.header.clone(); let bh_fork = b_fork.header.clone();
let b = b_fork.clone(); let b = b_fork.clone();
chain.process_block(b, chain::SKIP_POW).unwrap(); chain.process_block(b, chain::Options::SKIP_POW).unwrap();
chain_fork.process_block(b_fork, chain::SKIP_POW).unwrap(); chain_fork.process_block(b_fork, chain::Options::SKIP_POW).unwrap();
prev_fork = bh_fork; prev_fork = bh_fork;
} }
} }
@ -250,13 +250,13 @@ fn spend_in_fork() {
let b = prepare_block(&kc, &fork_head, &chain, 2); let b = prepare_block(&kc, &fork_head, &chain, 2);
let block_hash = b.hash(); let block_hash = b.hash();
fork_head = b.header.clone(); fork_head = b.header.clone();
chain.process_block(b, chain::SKIP_POW).unwrap(); chain.process_block(b, chain::Options::SKIP_POW).unwrap();
// now mine three further blocks // now mine three further blocks
for n in 3..6 { for n in 3..6 {
let b = prepare_block(&kc, &fork_head, &chain, n); let b = prepare_block(&kc, &fork_head, &chain, n);
fork_head = b.header.clone(); fork_head = b.header.clone();
chain.process_block(b, chain::SKIP_POW).unwrap(); chain.process_block(b, chain::Options::SKIP_POW).unwrap();
} }
let lock_height = 1 + global::coinbase_maturity(); let lock_height = 1 + global::coinbase_maturity();
@ -273,7 +273,7 @@ fn spend_in_fork() {
let next = prepare_block_tx(&kc, &fork_head, &chain, 7, vec![&tx1]); let next = prepare_block_tx(&kc, &fork_head, &chain, 7, vec![&tx1]);
let prev_main = next.header.clone(); let prev_main = next.header.clone();
chain.process_block(next.clone(), chain::SKIP_POW).unwrap(); chain.process_block(next.clone(), chain::Options::SKIP_POW).unwrap();
let (tx2, _) = build::transaction( let (tx2, _) = build::transaction(
vec![ vec![
@ -286,16 +286,16 @@ fn spend_in_fork() {
let next = prepare_block_tx(&kc, &prev_main, &chain, 9, vec![&tx2]); let next = prepare_block_tx(&kc, &prev_main, &chain, 9, vec![&tx2]);
let prev_main = next.header.clone(); let prev_main = next.header.clone();
chain.process_block(next, chain::SKIP_POW).unwrap(); chain.process_block(next, chain::Options::SKIP_POW).unwrap();
// mine 2 forked blocks from the first // mine 2 forked blocks from the first
let fork = prepare_fork_block_tx(&kc, &fork_head, &chain, 6, vec![&tx1]); let fork = prepare_fork_block_tx(&kc, &fork_head, &chain, 6, vec![&tx1]);
let prev_fork = fork.header.clone(); let prev_fork = fork.header.clone();
chain.process_block(fork, chain::SKIP_POW).unwrap(); chain.process_block(fork, chain::Options::SKIP_POW).unwrap();
let fork_next = prepare_fork_block_tx(&kc, &prev_fork, &chain, 8, vec![&tx2]); let fork_next = prepare_fork_block_tx(&kc, &prev_fork, &chain, 8, vec![&tx2]);
let prev_fork = fork_next.header.clone(); let prev_fork = fork_next.header.clone();
chain.process_block(fork_next, chain::SKIP_POW).unwrap(); chain.process_block(fork_next, chain::Options::SKIP_POW).unwrap();
// check state // check state
let head = chain.head_header().unwrap(); let head = chain.head_header().unwrap();
@ -307,7 +307,7 @@ fn spend_in_fork() {
// make the fork win // make the fork win
let fork_next = prepare_fork_block(&kc, &prev_fork, &chain, 10); let fork_next = prepare_fork_block(&kc, &prev_fork, &chain, 10);
let prev_fork = fork_next.header.clone(); let prev_fork = fork_next.header.clone();
chain.process_block(fork_next, chain::SKIP_POW).unwrap(); chain.process_block(fork_next, chain::Options::SKIP_POW).unwrap();
// check state // check state
let head = chain.head_header().unwrap(); let head = chain.head_header().unwrap();

View file

@ -99,13 +99,13 @@ fn test_coinbase_maturity() {
assert!( assert!(
block.outputs[0] block.outputs[0]
.features .features
.contains(transaction::COINBASE_OUTPUT) .contains(transaction::OutputFeatures::COINBASE_OUTPUT)
); );
// we will need this later when we want to spend the coinbase output // we will need this later when we want to spend the coinbase output
let block_hash = block.hash(); let block_hash = block.hash();
chain.process_block(block, chain::MINE).unwrap(); chain.process_block(block, chain::Options::MINE).unwrap();
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
@ -178,7 +178,7 @@ fn test_coinbase_maturity() {
global::sizeshift() as u32, global::sizeshift() as u32,
).unwrap(); ).unwrap();
chain.process_block(block, chain::MINE).unwrap(); chain.process_block(block, chain::Options::MINE).unwrap();
} }
let prev = chain.head_header().unwrap(); let prev = chain.head_header().unwrap();
@ -213,7 +213,7 @@ fn test_coinbase_maturity() {
global::sizeshift() as u32, global::sizeshift() as u32,
).unwrap(); ).unwrap();
let result = chain.process_block(block, chain::MINE); let result = chain.process_block(block, chain::Options::MINE);
match result { match result {
Ok(_) => (), Ok(_) => (),
Err(Error::ImmatureCoinbase) => panic!("we should not get an ImmatureCoinbase here"), Err(Error::ImmatureCoinbase) => panic!("we should not get an ImmatureCoinbase here"),

View file

@ -5,7 +5,7 @@ authors = ["Ignotus Peverell <igno.peverell@protonmail.com>"]
workspace = ".." workspace = ".."
[dependencies] [dependencies]
bitflags = "~0.7.0" bitflags = "^1.0"
blake2-rfc = "~0.2.17" blake2-rfc = "~0.2.17"
byteorder = "^0.5" byteorder = "^0.5"
slog = { version = "^2.0.12", features = ["max_level_trace", "release_max_level_trace"] } slog = { version = "^2.0.12", features = ["max_level_trace", "release_max_level_trace"] }

View file

@ -29,8 +29,8 @@ use core::{
Proof, Proof,
TxKernel, TxKernel,
Transaction, Transaction,
COINBASE_KERNEL, OutputFeatures,
COINBASE_OUTPUT KernelFeatures
}; };
use consensus; use consensus;
use consensus::{exceeds_weight, reward, REWARD, VerifySortOrder}; use consensus::{exceeds_weight, reward, REWARD, VerifySortOrder};
@ -452,18 +452,18 @@ impl Block {
let mut out_full = self.outputs let mut out_full = self.outputs
.iter() .iter()
.filter(|x| x.features.contains(COINBASE_OUTPUT)) .filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT))
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut kern_full = self.kernels let mut kern_full = self.kernels
.iter() .iter()
.filter(|x| x.features.contains(COINBASE_KERNEL)) .filter(|x| x.features.contains(KernelFeatures::COINBASE_KERNEL))
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut kern_ids = self.kernels let mut kern_ids = self.kernels
.iter() .iter()
.filter(|x| !x.features.contains(COINBASE_KERNEL)) .filter(|x| !x.features.contains(KernelFeatures::COINBASE_KERNEL))
.map(|x| x.short_id(&block_hash)) .map(|x| x.short_id(&block_hash))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -570,7 +570,7 @@ impl Block {
let out_set = self.outputs let out_set = self.outputs
.iter() .iter()
.filter(|out| !out.features.contains(COINBASE_OUTPUT)) .filter(|out| !out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.map(|out| out.commitment()) .map(|out| out.commitment())
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
@ -679,13 +679,13 @@ impl Block {
fn verify_coinbase(&self) -> Result<(), Error> { fn verify_coinbase(&self) -> Result<(), Error> {
let cb_outs = self.outputs let cb_outs = self.outputs
.iter() .iter()
.filter(|out| out.features.contains(COINBASE_OUTPUT)) .filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.cloned() .cloned()
.collect::<Vec<Output>>(); .collect::<Vec<Output>>();
let cb_kerns = self.kernels let cb_kerns = self.kernels
.iter() .iter()
.filter(|kernel| kernel.features.contains(COINBASE_KERNEL)) .filter(|kernel| kernel.features.contains(KernelFeatures::COINBASE_KERNEL))
.cloned() .cloned()
.collect::<Vec<TxKernel>>(); .collect::<Vec<TxKernel>>();
@ -738,7 +738,7 @@ impl Block {
// _and_ that we trust this claim. // _and_ that we trust this claim.
// We should have already confirmed the entry from the MMR exists // We should have already confirmed the entry from the MMR exists
// and has the expected hash. // and has the expected hash.
assert!(output.features.contains(COINBASE_OUTPUT)); assert!(output.features.contains(OutputFeatures::COINBASE_OUTPUT));
if let Some(_) = self.outputs if let Some(_) = self.outputs
.iter() .iter()
@ -788,7 +788,7 @@ impl Block {
let rproof = keychain.range_proof(reward(fees), key_id, commit, msg)?; let rproof = keychain.range_proof(reward(fees), key_id, commit, msg)?;
let output = Output { let output = Output {
features: COINBASE_OUTPUT, features: OutputFeatures::COINBASE_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: rproof, proof: rproof,
@ -809,7 +809,7 @@ impl Block {
let sig = keychain.aggsig_sign_from_key_id(&msg, &key_id)?; let sig = keychain.aggsig_sign_from_key_id(&msg, &key_id)?;
let proof = TxKernel { let proof = TxKernel {
features: COINBASE_KERNEL, features: KernelFeatures::COINBASE_KERNEL,
excess: excess, excess: excess,
excess_sig: sig, excess_sig: sig,
fee: 0, fee: 0,
@ -945,14 +945,14 @@ mod test {
let coinbase_outputs = b.outputs let coinbase_outputs = b.outputs
.iter() .iter()
.filter(|out| out.features.contains(COINBASE_OUTPUT)) .filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.map(|o| o.clone()) .map(|o| o.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_eq!(coinbase_outputs.len(), 1); assert_eq!(coinbase_outputs.len(), 1);
let coinbase_kernels = b.kernels let coinbase_kernels = b.kernels
.iter() .iter()
.filter(|out| out.features.contains(COINBASE_KERNEL)) .filter(|out| out.features.contains(KernelFeatures::COINBASE_KERNEL))
.map(|o| o.clone()) .map(|o| o.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_eq!(coinbase_kernels.len(), 1); assert_eq!(coinbase_kernels.len(), 1);
@ -970,8 +970,8 @@ mod test {
let keychain = Keychain::from_random_seed().unwrap(); let keychain = Keychain::from_random_seed().unwrap();
let mut b = new_block(vec![], &keychain); let mut b = new_block(vec![], &keychain);
assert!(b.outputs[0].features.contains(COINBASE_OUTPUT)); assert!(b.outputs[0].features.contains(OutputFeatures::COINBASE_OUTPUT));
b.outputs[0].features.remove(COINBASE_OUTPUT); b.outputs[0].features.remove(OutputFeatures::COINBASE_OUTPUT);
assert_eq!( assert_eq!(
b.verify_coinbase(), b.verify_coinbase(),
@ -992,8 +992,8 @@ mod test {
let keychain = Keychain::from_random_seed().unwrap(); let keychain = Keychain::from_random_seed().unwrap();
let mut b = new_block(vec![], &keychain); let mut b = new_block(vec![], &keychain);
assert!(b.kernels[0].features.contains(COINBASE_KERNEL)); assert!(b.kernels[0].features.contains(KernelFeatures::COINBASE_KERNEL));
b.kernels[0].features.remove(COINBASE_KERNEL); b.kernels[0].features.remove(KernelFeatures::COINBASE_KERNEL);
assert_eq!( assert_eq!(
b.verify_coinbase(), b.verify_coinbase(),
@ -1029,7 +1029,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed"); ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
5_676, 5_676
); );
} }
@ -1042,7 +1042,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed"); ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
16_224, 16_224
); );
} }
@ -1054,7 +1054,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed"); ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
5_662, 5_662
); );
} }
@ -1067,7 +1067,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed"); ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
5_668, 5_668
); );
} }
@ -1089,7 +1089,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed"); ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
111_156, 111_156
); );
} }
@ -1111,7 +1111,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed"); ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!( assert_eq!(
vec.len(), vec.len(),
5_722, 5_722
); );
} }
@ -1131,7 +1131,7 @@ mod test {
cb.kern_ids[0], cb.kern_ids[0],
b.kernels b.kernels
.iter() .iter()
.find(|x| !x.features.contains(COINBASE_KERNEL)) .find(|x| !x.features.contains(KernelFeatures::COINBASE_KERNEL))
.unwrap() .unwrap()
.short_id(&b.hash()) .short_id(&b.hash())
); );

View file

@ -27,7 +27,7 @@
use util::{secp, kernel_sig_msg}; use util::{secp, kernel_sig_msg};
use core::{Transaction, Input, Output, OutputFeatures, SwitchCommitHash, COINBASE_OUTPUT, DEFAULT_OUTPUT}; use core::{Transaction, Input, Output, OutputFeatures, SwitchCommitHash};
use core::hash::Hash; use core::hash::Hash;
use keychain; use keychain;
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier}; use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
@ -69,7 +69,7 @@ pub fn input(
key_id: Identifier, key_id: Identifier,
) -> Box<Append> { ) -> Box<Append> {
debug!(LOGGER, "Building input (spending regular output): {}, {}", value, key_id); debug!(LOGGER, "Building input (spending regular output): {}, {}", value, key_id);
build_input(value, DEFAULT_OUTPUT, Some(out_block), key_id) build_input(value, OutputFeatures::DEFAULT_OUTPUT, Some(out_block), key_id)
} }
/// Adds a coinbase input spending a coinbase output. /// Adds a coinbase input spending a coinbase output.
@ -80,7 +80,7 @@ pub fn coinbase_input(
key_id: Identifier, key_id: Identifier,
) -> Box<Append> { ) -> Box<Append> {
debug!(LOGGER, "Building input (spending coinbase): {}, {}", value, key_id); debug!(LOGGER, "Building input (spending coinbase): {}, {}", value, key_id);
build_input(value, COINBASE_OUTPUT, Some(out_block), key_id) build_input(value, OutputFeatures::COINBASE_OUTPUT, Some(out_block), key_id)
} }
/// Adds an output with the provided value and key identifier from the /// Adds an output with the provided value and key identifier from the
@ -120,7 +120,7 @@ pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
( (
tx.with_output(Output { tx.with_output(Output {
features: DEFAULT_OUTPUT, features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: rproof, proof: rproof,

View file

@ -319,7 +319,7 @@ mod test {
let tx_kernel = tx.build_kernel(excess); let tx_kernel = tx.build_kernel(excess);
let _ = tx_kernel.verify().unwrap(); let _ = tx_kernel.verify().unwrap();
assert_eq!(tx_kernel.features, DEFAULT_KERNEL); assert_eq!(tx_kernel.features, KernelFeatures::DEFAULT_KERNEL);
assert_eq!(tx_kernel.fee, tx.fee); assert_eq!(tx_kernel.fee, tx.fee);
assert_eq!(tx_kernel.excess, excess); assert_eq!(tx_kernel.excess, excess);
} }

View file

@ -38,11 +38,11 @@ pub const SWITCH_COMMIT_KEY_SIZE: usize = 32;
bitflags! { bitflags! {
/// Options for a kernel's structure or use /// Options for a kernel's structure or use
pub flags KernelFeatures: u8 { pub struct KernelFeatures: u8 {
/// No flags /// No flags
const DEFAULT_KERNEL = 0b00000000, const DEFAULT_KERNEL = 0b00000000;
/// Kernel matching a coinbase output /// Kernel matching a coinbase output
const COINBASE_KERNEL = 0b00000001, const COINBASE_KERNEL = 0b00000001;
} }
} }
@ -348,7 +348,7 @@ impl Transaction {
/// Builds a transaction kernel /// Builds a transaction kernel
pub fn build_kernel(&self, excess: Commitment) -> TxKernel { pub fn build_kernel(&self, excess: Commitment) -> TxKernel {
TxKernel { TxKernel {
features: DEFAULT_KERNEL, features: KernelFeatures::DEFAULT_KERNEL,
excess: excess, excess: excess,
excess_sig: self.excess_sig.clone(), excess_sig: self.excess_sig.clone(),
fee: self.fee, fee: self.fee,
@ -408,7 +408,7 @@ impl Writeable for Input {
writer.write_u8(self.features.bits())?; writer.write_u8(self.features.bits())?;
writer.write_fixed_bytes(&self.commit)?; writer.write_fixed_bytes(&self.commit)?;
if self.features.contains(COINBASE_OUTPUT) { if self.features.contains(OutputFeatures::COINBASE_OUTPUT) {
writer.write_fixed_bytes(&self.out_block.unwrap_or(ZERO_HASH))?; writer.write_fixed_bytes(&self.out_block.unwrap_or(ZERO_HASH))?;
} }
@ -426,7 +426,7 @@ impl Readable for Input {
let commit = Commitment::read(reader)?; let commit = Commitment::read(reader)?;
let out_block = if features.contains(COINBASE_OUTPUT) { let out_block = if features.contains(OutputFeatures::COINBASE_OUTPUT) {
Some(Hash::read(reader)?) Some(Hash::read(reader)?)
} else { } else {
None None
@ -469,11 +469,11 @@ impl Input {
bitflags! { bitflags! {
/// Options for block validation /// Options for block validation
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub flags OutputFeatures: u8 { pub struct OutputFeatures: u8 {
/// No flags /// No flags
const DEFAULT_OUTPUT = 0b00000000, const DEFAULT_OUTPUT = 0b00000000;
/// Output is a coinbase output, must not be spent until maturity /// Output is a coinbase output, must not be spent until maturity
const COINBASE_OUTPUT = 0b00000001, const COINBASE_OUTPUT = 0b00000001;
} }
} }
@ -920,7 +920,7 @@ mod test {
let sig = secp::Signature::from_raw_data(&[0;64]).unwrap(); let sig = secp::Signature::from_raw_data(&[0;64]).unwrap();
let kernel = TxKernel { let kernel = TxKernel {
features: DEFAULT_KERNEL, features: KernelFeatures::DEFAULT_KERNEL,
lock_height: 0, lock_height: 0,
excess: commit, excess: commit,
excess_sig: sig.clone(), excess_sig: sig.clone(),
@ -930,7 +930,7 @@ mod test {
let mut vec = vec![]; let mut vec = vec![];
ser::serialize(&mut vec, &kernel).expect("serialized failed"); ser::serialize(&mut vec, &kernel).expect("serialized failed");
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap(); let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(kernel2.features, DEFAULT_KERNEL); assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
assert_eq!(kernel2.lock_height, 0); assert_eq!(kernel2.lock_height, 0);
assert_eq!(kernel2.excess, commit); assert_eq!(kernel2.excess, commit);
assert_eq!(kernel2.excess_sig, sig.clone()); assert_eq!(kernel2.excess_sig, sig.clone());
@ -938,7 +938,7 @@ mod test {
// now check a kernel with lock_height serializes/deserializes correctly // now check a kernel with lock_height serializes/deserializes correctly
let kernel = TxKernel { let kernel = TxKernel {
features: DEFAULT_KERNEL, features: KernelFeatures::DEFAULT_KERNEL,
lock_height: 100, lock_height: 100,
excess: commit, excess: commit,
excess_sig: sig.clone(), excess_sig: sig.clone(),
@ -948,7 +948,7 @@ mod test {
let mut vec = vec![]; let mut vec = vec![];
ser::serialize(&mut vec, &kernel).expect("serialized failed"); ser::serialize(&mut vec, &kernel).expect("serialized failed");
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap(); let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(kernel2.features, DEFAULT_KERNEL); assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
assert_eq!(kernel2.lock_height, 100); assert_eq!(kernel2.lock_height, 100);
assert_eq!(kernel2.excess, commit); assert_eq!(kernel2.excess, commit);
assert_eq!(kernel2.excess_sig, sig.clone()); assert_eq!(kernel2.excess_sig, sig.clone());
@ -970,7 +970,7 @@ mod test {
let proof = keychain.range_proof(5, &key_id, commit, msg).unwrap(); let proof = keychain.range_proof(5, &key_id, commit, msg).unwrap();
let out = Output { let out = Output {
features: DEFAULT_OUTPUT, features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: proof, proof: proof,
@ -980,7 +980,7 @@ mod test {
ser::serialize(&mut vec, &out).expect("serialized failed"); ser::serialize(&mut vec, &out).expect("serialized failed");
let dout: Output = ser::deserialize(&mut &vec[..]).unwrap(); let dout: Output = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(dout.features, DEFAULT_OUTPUT); assert_eq!(dout.features, OutputFeatures::DEFAULT_OUTPUT);
assert_eq!(dout.commit, out.commit); assert_eq!(dout.commit, out.commit);
assert_eq!(dout.proof, out.proof); assert_eq!(dout.proof, out.proof);
} }
@ -1001,7 +1001,7 @@ mod test {
let proof = keychain.range_proof(1003, &key_id, commit, msg).unwrap(); let proof = keychain.range_proof(1003, &key_id, commit, msg).unwrap();
let output = Output { let output = Output {
features: DEFAULT_OUTPUT, features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: proof, proof: proof,
@ -1047,7 +1047,7 @@ mod test {
let commit = keychain.commit(5, &key_id).unwrap(); let commit = keychain.commit(5, &key_id).unwrap();
let input = Input { let input = Input {
features: DEFAULT_OUTPUT, features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit, commit: commit,
out_block: None, out_block: None,
}; };
@ -1062,7 +1062,7 @@ mod test {
// now generate the short_id for a *very* similar output (single feature flag different) // now generate the short_id for a *very* similar output (single feature flag different)
// and check it generates a different short_id // and check it generates a different short_id
let input = Input { let input = Input {
features: COINBASE_OUTPUT, features: OutputFeatures::COINBASE_OUTPUT,
commit: commit, commit: commit,
out_block: None, out_block: None,
}; };

View file

@ -18,7 +18,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use rand; use rand;
use rand::Rng; use rand::Rng;
use chain::{self, ChainAdapter, Options, MINE}; use chain::{self, ChainAdapter, Options};
use core::core; use core::core;
use core::core::block::BlockHeader; use core::core::block::BlockHeader;
use core::core::hash::{Hash, Hashed}; use core::core::hash::{Hash, Hashed};
@ -350,9 +350,9 @@ impl NetToChainAdapter {
/// Prepare options for the chain pipeline /// Prepare options for the chain pipeline
fn chain_opts(&self) -> chain::Options { fn chain_opts(&self) -> chain::Options {
let opts = if self.currently_syncing.load(Ordering::Relaxed) { let opts = if self.currently_syncing.load(Ordering::Relaxed) {
chain::SYNC chain::Options::SYNC
} else { } else {
chain::NONE chain::Options::NONE
}; };
opts opts
} }
@ -384,7 +384,7 @@ impl ChainAdapter for ChainToPoolAndNetAdapter {
// If block contains txs then broadcast the compact block. // If block contains txs then broadcast the compact block.
// If we received the block from another node then broadcast "header first" // If we received the block from another node then broadcast "header first"
// to minimize network traffic. // to minimize network traffic.
if opts.contains(MINE) { if opts.contains(Options::MINE) {
// propagate compact block out if we mined the block // propagate compact block out if we mined the block
// but broadcast full block if we have no txs // but broadcast full block if we have no txs
let cb = b.as_compact_block(); let cb = b.as_compact_block();

View file

@ -537,7 +537,7 @@ impl Miner {
b.hash() b.hash()
); );
b.header.pow = proof; b.header.pow = proof;
let res = self.chain.process_block(b, chain::MINE); let res = self.chain.process_block(b, chain::Options::MINE);
if let Err(e) = res { if let Err(e) = res {
error!( error!(
LOGGER, LOGGER,

View file

@ -565,7 +565,7 @@ mod test {
.add_blinding_factor(BlindingFactor::new(skey1)) .add_blinding_factor(BlindingFactor::new(skey1))
.add_blinding_factor(BlindingFactor::new(skey2)) .add_blinding_factor(BlindingFactor::new(skey2))
).unwrap(), ).unwrap(),
BlindingFactor::new(skey3), BlindingFactor::new(skey3)
); );
} }
} }

View file

@ -12,7 +12,7 @@ use std::clone::Clone;
use std::sync::RwLock; use std::sync::RwLock;
use core::core::{block, hash, transaction}; use core::core::{block, hash, transaction};
use core::core::{COINBASE_OUTPUT, Input, OutputIdentifier}; use core::core::{OutputFeatures, Input, OutputIdentifier};
use core::global; use core::global;
use core::core::hash::Hashed; use core::core::hash::Hashed;
use types::{BlockChain, PoolError}; use types::{BlockChain, PoolError};
@ -114,7 +114,7 @@ impl BlockChain for DummyChainImpl {
} }
fn is_matured(&self, input: &Input, height: u64) -> Result<(), PoolError> { fn is_matured(&self, input: &Input, height: u64) -> Result<(), PoolError> {
if !input.features.contains(COINBASE_OUTPUT) { if !input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
return Ok(()); return Ok(());
} }
let block_hash = input.out_block.expect("requires a block hash"); let block_hash = input.out_block.expect("requires a block hash");

View file

@ -298,7 +298,7 @@ mod tests {
use util::secp; use util::secp;
use keychain::Keychain; use keychain::Keychain;
use rand; use rand;
use core::core::{DEFAULT_OUTPUT, SwitchCommitHash}; use core::core::{OutputFeatures, SwitchCommitHash};
#[test] #[test]
fn test_add_entry() { fn test_add_entry() {
@ -317,12 +317,12 @@ mod tests {
let inputs = vec![ let inputs = vec![
core::transaction::Input::new( core::transaction::Input::new(
DEFAULT_OUTPUT, OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(50, &key_id2).unwrap(), keychain.commit(50, &key_id2).unwrap(),
None, None,
), ),
core::transaction::Input::new( core::transaction::Input::new(
DEFAULT_OUTPUT, OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(25, &key_id3).unwrap(), keychain.commit(25, &key_id3).unwrap(),
None, None,
), ),
@ -330,7 +330,7 @@ mod tests {
let msg = secp::pedersen::ProofMessage::empty(); let msg = secp::pedersen::ProofMessage::empty();
let output = core::transaction::Output { let output = core::transaction::Output {
features: DEFAULT_OUTPUT, features: OutputFeatures::DEFAULT_OUTPUT,
commit: output_commit, commit: output_commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: keychain proof: keychain

View file

@ -1321,7 +1321,7 @@ mod tests {
let proof = keychain.range_proof(value, &key_id, commit, msg).unwrap(); let proof = keychain.range_proof(value, &key_id, commit, msg).unwrap();
transaction::Output { transaction::Output {
features: transaction::DEFAULT_OUTPUT, features: transaction::OutputFeatures::DEFAULT_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: proof, proof: proof,
@ -1343,7 +1343,7 @@ mod tests {
let proof = keychain.range_proof(value, &key_id, commit, msg).unwrap(); let proof = keychain.range_proof(value, &key_id, commit, msg).unwrap();
transaction::Output { transaction::Output {
features: transaction::COINBASE_OUTPUT, features: transaction::OutputFeatures::COINBASE_OUTPUT,
commit: commit, commit: commit,
switch_commit_hash: switch_commit_hash, switch_commit_hash: switch_commit_hash,
proof: proof, proof: proof,

View file

@ -18,7 +18,7 @@ use util::secp::pedersen;
use api; use api;
use core::global; use core::global;
use core::core::{Output, SwitchCommitHash}; use core::core::{Output, SwitchCommitHash};
use core::core::transaction::{COINBASE_OUTPUT, DEFAULT_OUTPUT}; use core::core::transaction::OutputFeatures;
use types::{BlockIdentifier, WalletConfig, WalletData, OutputData, OutputStatus, Error}; use types::{BlockIdentifier, WalletConfig, WalletData, OutputData, OutputStatus, Error};
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
@ -87,8 +87,8 @@ fn retrieve_amount_and_coinbase_status(
let core_output = Output { let core_output = Output {
features: match output.output_type { features: match output.output_type {
api::OutputType::Coinbase => COINBASE_OUTPUT, api::OutputType::Coinbase => OutputFeatures::COINBASE_OUTPUT,
api::OutputType::Transaction => DEFAULT_OUTPUT, api::OutputType::Transaction => OutputFeatures::DEFAULT_OUTPUT,
}, },
proof: output.range_proof()?, proof: output.range_proof()?,
switch_commit_hash: output.switch_commit_hash()?, switch_commit_hash: output.switch_commit_hash()?,