hash (features|commitment) in output mmr (#615)

* experiment with lock_heights on outputs

* playing around with lock_height as part of the switch commitment hash

* cleanup

* include features in the switch commit hash key

* commit

* rebase off master

* commit

* cleanup

* missing docs

* rework coinbase maturity test to build valid tx

* pool and chain tests passing (inputs have switch commitments)

* commit

* cleanup

* check inputs spending coinbase outputs have valid lock_heights

* wip - got it building (tests still failing)

* use zero key for non coinbase switch commit hash

* fees and height wrong order...

* send output lock_height over to wallet via api

* no more header by height index
workaround this for wallet refresh and wallet restore

* refresh heights for unspent wallet outputs where missing

* TODO - might be slow?

* simplify - do not pass around lock_height for non coinbase outputs

* commit

* fix tests after merge

* build input vs coinbase_input
switch commit hash key encodes lock_height
cleanup output by commit index (currently broken...)

* is_unspent and get_unspent cleanup - we have no outputs, only switch_commit_hashes

* separate concept of utxo vs output in the api
utxos come from the sumtrees (and only the sumtrees, limited info)
outputs come from blocks (and we need to look them up via block height)

* cleanup

* better api support for block outputs with range proofs

* basic wallet operations appear to work
restore is not working fully
refresh refreshes heights correctly (at least appears to)

* wallet refresh and wallet restore appear to be working now

* fix core tests

* fix some mine_simple_chain tests

* fixup chain tests

* rework so pool tests pass

* wallet restore now safely habndles duplicate commitments (reused wallet keys)
for coinbase outputs where lock_height is _very_ important

* wip

* validate_coinbase_maturity
got things building
tests are failing

* lite vs full versions of is_unspent

* builds and working locally
zero-conf - what to do here?

* handle zero-conf edge case (use latest block)

* introduce OutputIdentifier, avoid leaking SumCommit everywhere

* fix the bad merge

* pool verifies coinbase maturity via is_matured
this uses sumtree in a consistent way

* cleanup

* add docs, cleanup build warnings

* fix core tests

* fix chain tests

* fix pool tests

* cleanup debug logging that we no longer need

* make out_block optional on an input (only care about it for spending coinbase outputs)

* cleanup

* bump the build
This commit is contained in:
AntiochP 2018-01-16 22:03:40 -05:00 committed by GitHub
parent 7e7c8e157e
commit cbd3b2ff87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 1345 additions and 901 deletions

View file

@ -24,9 +24,8 @@ use serde::Serialize;
use serde_json;
use chain;
use core::core::Transaction;
use core::core::hash::Hash;
use core::core::hash::Hashed;
use core::core::{OutputIdentifier, Transaction, DEFAULT_OUTPUT, COINBASE_OUTPUT};
use core::core::hash::{Hash, Hashed};
use core::ser;
use pool;
use p2p;
@ -54,38 +53,35 @@ impl Handler for IndexHandler {
// Supports retrieval of multiple outputs in a single request -
// GET /v1/chain/utxos/byids?id=xxx,yyy,zzz
// GET /v1/chain/utxos/byids?id=xxx&id=yyy&id=zzz
// GET /v1/chain/utxos/byheight?height=n
// GET /v1/chain/utxos/byheight?start_height=101&end_height=200
struct UtxoHandler {
chain: Arc<chain::Chain>,
}
impl UtxoHandler {
fn get_utxo(&self, id: &str, include_rp: bool, include_switch: bool) -> Result<Output, Error> {
debug!(LOGGER, "getting utxo: {}", id);
fn get_utxo(&self, id: &str) -> Result<Utxo, Error> {
let c = util::from_hex(String::from(id))
.map_err(|_| Error::Argument(format!("Not a valid commitment: {}", id)))?;
let commit = Commitment::from_vec(c);
let out = self.chain
.get_unspent(&commit)
.map_err(|_| Error::NotFound)?;
// We need the features here to be able to generate the necessary hash
// to compare against the hash in the output MMR.
// For now we can just try both (but this probably needs to be part of the api params)
let outputs = [
OutputIdentifier::new(DEFAULT_OUTPUT, &commit),
OutputIdentifier::new(COINBASE_OUTPUT, &commit)
];
let header = self.chain
.get_block_header_by_output_commit(&commit)
.map_err(|_| Error::NotFound)?;
Ok(Output::from_output(
&out,
&header,
include_rp,
include_switch,
))
for x in outputs.iter() {
if let Ok(_) = self.chain.is_unspent(&x) {
return Ok(Utxo::new(&commit))
}
}
Err(Error::NotFound)
}
fn utxos_by_ids(&self, req: &mut Request) -> Vec<Output> {
fn utxos_by_ids(&self, req: &mut Request) -> Vec<Utxo> {
let mut commitments: Vec<&str> = vec![];
let mut rp = false;
let mut switch = false;
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
if let Some(ids) = params.get("id") {
for id in ids {
@ -94,23 +90,25 @@ impl UtxoHandler {
}
}
}
if let Some(_) = params.get("include_rp") {
rp = true;
}
if let Some(_) = params.get("include_switch") {
switch = true;
}
}
let mut utxos: Vec<Output> = vec![];
for commit in commitments {
if let Ok(out) = self.get_utxo(commit, rp, switch) {
utxos.push(out);
debug!(LOGGER, "utxos_by_ids: {:?}", commitments);
let mut utxos: Vec<Utxo> = vec![];
for x in commitments {
if let Ok(utxo) = self.get_utxo(x) {
utxos.push(utxo);
}
}
utxos
}
fn utxos_at_height(&self, block_height: u64) -> BlockOutputs {
fn outputs_at_height(
&self,
block_height: u64,
commitments: Vec<Commitment>,
include_proof: bool,
) -> BlockOutputs {
let header = self.chain
.clone()
.get_header_by_height(block_height)
@ -119,8 +117,12 @@ impl UtxoHandler {
let outputs = block
.outputs
.iter()
.filter(|c| self.chain.is_unspent(&c.commit).unwrap())
.map(|k| OutputSwitch::from_output(k, &header))
.filter(|output| {
commitments.is_empty() || commitments.contains(&output.commit)
})
.map(|output| {
OutputPrintable::from_output(output, self.chain.clone(), include_proof)
})
.collect();
BlockOutputs {
header: BlockHeaderInfo::from_header(&header),
@ -128,11 +130,23 @@ impl UtxoHandler {
}
}
// returns utxos for a specified range of blocks
fn utxo_block_batch(&self, req: &mut Request) -> Vec<BlockOutputs> {
// returns outputs for a specified range of blocks
fn outputs_block_batch(&self, req: &mut Request) -> Vec<BlockOutputs> {
let mut commitments: Vec<Commitment> = vec![];
let mut start_height = 1;
let mut end_height = 1;
let mut include_rp = false;
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
if let Some(ids) = params.get("id") {
for id in ids {
for id in id.split(",") {
if let Ok(x) = util::from_hex(String::from(id)) {
commitments.push(Commitment::from_vec(x));
}
}
}
}
if let Some(heights) = params.get("start_height") {
for height in heights {
start_height = height.parse().unwrap();
@ -143,11 +157,28 @@ impl UtxoHandler {
end_height = height.parse().unwrap();
}
}
if let Some(_) = params.get("include_rp") {
include_rp = true;
}
}
debug!(
LOGGER,
"outputs_block_batch: {}-{}, {:?}, {:?}",
start_height,
end_height,
commitments,
include_rp,
);
let mut return_vec = vec![];
for i in start_height..end_height + 1 {
return_vec.push(self.utxos_at_height(i));
let res = self.outputs_at_height(i, commitments.clone(), include_rp);
if res.outputs.len() > 0 {
return_vec.push(res);
}
}
return_vec
}
}
@ -161,7 +192,7 @@ impl Handler for UtxoHandler {
}
match *path_elems.last().unwrap() {
"byids" => json_response(&self.utxos_by_ids(req)),
"byheight" => json_response(&self.utxo_block_batch(req)),
"byheight" => json_response(&self.outputs_block_batch(req)),
_ => Ok(Response::with((status::BadRequest, ""))),
}
}
@ -363,11 +394,8 @@ pub struct BlockHandler {
impl BlockHandler {
fn get_block(&self, h: &Hash) -> Result<BlockPrintable, Error> {
let block = self.chain
.clone()
.get_block(h)
.map_err(|_| Error::NotFound)?;
Ok(BlockPrintable::from_block(&block))
let block = self.chain.clone().get_block(h).map_err(|_| Error::NotFound)?;
Ok(BlockPrintable::from_block(&block, self.chain.clone(), false))
}
// Try to decode the string as a height or a hash.
@ -460,11 +488,17 @@ where
tx.outputs.len()
);
let res = self.tx_pool.write().unwrap().add_to_memory_pool(source, tx);
let res = self.tx_pool
.write()
.unwrap()
.add_to_memory_pool(source, tx);
match res {
Ok(()) => Ok(Response::with(status::Ok)),
Err(e) => Err(IronError::from(Error::Argument(format!("{:?}", e)))),
Err(e) => {
debug!(LOGGER, "error - {:?}", e);
Err(IronError::from(Error::Argument(format!("{:?}", e))))
}
}
}
}

View file

@ -13,13 +13,15 @@
// limitations under the License.
use std::sync::Arc;
use core::{core, global};
use core::{core, ser};
use core::core::hash::Hashed;
use core::core::SumCommit;
use chain;
use p2p;
use util::secp::pedersen;
use rest::*;
use util;
use util::secp::pedersen;
use util::secp::constants::MAX_PROOF_SIZE;
/// The state of the current fork tip
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -86,10 +88,10 @@ impl SumTrees {
pub fn from_head(head: Arc<chain::Chain>) -> SumTrees {
let roots = head.get_sumtree_roots();
SumTrees {
utxo_root_hash: util::to_hex(roots.0.hash.to_vec()),
utxo_root_sum: util::to_hex(roots.0.sum.commit.0.to_vec()),
range_proof_root_hash: util::to_hex(roots.1.hash.to_vec()),
kernel_root_hash: util::to_hex(roots.2.hash.to_vec()),
utxo_root_hash: roots.0.hash.to_hex(),
utxo_root_sum: roots.0.sum.to_hex(),
range_proof_root_hash: roots.1.hash.to_hex(),
kernel_root_hash: roots.2.hash.to_hex(),
}
}
}
@ -100,26 +102,18 @@ impl SumTrees {
pub struct SumTreeNode {
// The hash
pub hash: String,
// Output (if included)
pub output: Option<OutputPrintable>,
// SumCommit (features|commitment), optional (only for utxos)
pub sum: Option<SumCommit>,
}
impl SumTreeNode {
pub fn get_last_n_utxo(chain: Arc<chain::Chain>, distance: u64) -> Vec<SumTreeNode> {
let mut return_vec = Vec::new();
let last_n = chain.get_last_n_utxo(distance);
for elem_output in last_n {
let header = chain
.get_block_header_by_output_commit(&elem_output.1.commit)
.map_err(|_| Error::NotFound);
// Need to call further method to check if output is spent
let mut output = OutputPrintable::from_output(&elem_output.1, &header.unwrap(), true);
if let Ok(_) = chain.get_unspent(&elem_output.1.commit) {
output.spent = false;
}
for x in last_n {
return_vec.push(SumTreeNode {
hash: util::to_hex(elem_output.0.to_vec()),
output: Some(output),
hash: util::to_hex(x.hash.to_vec()),
sum: Some(x.sum),
});
}
return_vec
@ -131,7 +125,7 @@ impl SumTreeNode {
for elem in last_n {
return_vec.push(SumTreeNode {
hash: util::to_hex(elem.hash.to_vec()),
output: None,
sum: None,
});
}
return_vec
@ -143,7 +137,7 @@ impl SumTreeNode {
for elem in last_n {
return_vec.push(SumTreeNode {
hash: util::to_hex(elem.hash.to_vec()),
output: None,
sum: None,
});
}
return_vec
@ -157,50 +151,14 @@ pub enum OutputType {
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Output {
/// The type of output Coinbase|Transaction
pub output_type: OutputType,
/// The homomorphic commitment representing the output's amount
pub struct Utxo {
/// The output commitment representing the amount
pub commit: pedersen::Commitment,
/// switch commit hash
pub switch_commit_hash: Option<core::SwitchCommitHash>,
/// A proof that the commitment is in the right range
pub proof: Option<pedersen::RangeProof>,
/// The height of the block creating this output
pub height: u64,
/// The lock height (earliest block this output can be spent)
pub lock_height: u64,
}
impl Output {
pub fn from_output(
output: &core::Output,
block_header: &core::BlockHeader,
include_proof: bool,
include_switch: bool,
) -> Output {
let (output_type, lock_height) = match output.features {
x if x.contains(core::transaction::COINBASE_OUTPUT) => (
OutputType::Coinbase,
block_header.height + global::coinbase_maturity(),
),
_ => (OutputType::Transaction, 0),
};
Output {
output_type: output_type,
commit: output.commit,
switch_commit_hash: match include_switch {
true => Some(output.switch_commit_hash),
false => None,
},
proof: match include_proof {
true => Some(output.proof),
false => None,
},
height: block_header.height,
lock_height: lock_height,
}
impl Utxo {
pub fn new(commit: &pedersen::Commitment) -> Utxo {
Utxo { commit: commit.clone() }
}
}
@ -209,66 +167,73 @@ impl Output {
pub struct OutputPrintable {
/// The type of output Coinbase|Transaction
pub output_type: OutputType,
/// The homomorphic commitment representing the output's amount (as hex
/// string)
/// The homomorphic commitment representing the output's amount
/// (as hex string)
pub commit: String,
/// switch commit hash
pub switch_commit_hash: String,
/// The height of the block creating this output
pub height: u64,
/// The lock height (earliest block this output can be spent)
pub lock_height: u64,
/// Whether the output has been spent
pub spent: bool,
/// Rangeproof hash (as hex string)
pub proof_hash: Option<String>,
/// Rangeproof (as hex string)
pub proof: Option<String>,
/// Rangeproof hash (as hex string)
pub proof_hash: String,
}
impl OutputPrintable {
pub fn from_output(
output: &core::Output,
block_header: &core::BlockHeader,
include_proof_hash: bool,
chain: Arc<chain::Chain>,
include_proof: bool,
) -> OutputPrintable {
let (output_type, lock_height) = match output.features {
x if x.contains(core::transaction::COINBASE_OUTPUT) => (
OutputType::Coinbase,
block_header.height + global::coinbase_maturity(),
),
_ => (OutputType::Transaction, 0),
let output_type =
if output.features.contains(core::transaction::COINBASE_OUTPUT) {
OutputType::Coinbase
} else {
OutputType::Transaction
};
let out_id = core::OutputIdentifier::from_output(&output);
let spent = chain.is_unspent(&out_id).is_err();
let proof = if include_proof {
Some(util::to_hex(output.proof.bytes().to_vec()))
} else {
None
};
OutputPrintable {
output_type: output_type,
commit: util::to_hex(output.commit.0.to_vec()),
switch_commit_hash: util::to_hex(output.switch_commit_hash.hash.to_vec()),
height: block_header.height,
lock_height: lock_height,
spent: true,
proof_hash: match include_proof_hash {
true => Some(util::to_hex(output.proof.hash().to_vec())),
false => None,
},
switch_commit_hash: output.switch_commit_hash.to_hex(),
spent: spent,
proof: proof,
proof_hash: util::to_hex(output.proof.hash().to_vec()),
}
}
}
// As above, except just the info needed for wallet reconstruction
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct OutputSwitch {
/// the commit
pub commit: String,
/// switch commit hash
pub switch_commit_hash: [u8; core::SWITCH_COMMIT_HASH_SIZE],
/// The height of the block creating this output
pub height: u64,
}
// Convert the hex string back into a switch_commit_hash instance
pub fn switch_commit_hash(&self) -> Result<core::SwitchCommitHash, ser::Error> {
core::SwitchCommitHash::from_hex(&self.switch_commit_hash)
}
impl OutputSwitch {
pub fn from_output(output: &core::Output, block_header: &core::BlockHeader) -> OutputSwitch {
OutputSwitch {
commit: util::to_hex(output.commit.0.to_vec()),
switch_commit_hash: output.switch_commit_hash.hash,
height: block_header.height,
pub fn commit(&self) -> Result<pedersen::Commitment, ser::Error> {
let vec = util::from_hex(self.commit.clone())
.map_err(|_| ser::Error::HexError(format!("output commit hex_error")))?;
Ok(pedersen::Commitment::from_vec(vec))
}
pub fn range_proof(&self) -> Result<pedersen::RangeProof, ser::Error> {
if let Some(ref proof) = self.proof {
let vec = util::from_hex(proof.clone())
.map_err(|_| ser::Error::HexError(format!("output range_proof hex_error")))?;
let mut bytes = [0; MAX_PROOF_SIZE];
for i in 0..vec.len() {
bytes[i] = vec[i];
}
Ok(pedersen::RangeProof { proof: bytes, plen: vec.len() })
} else {
Err(ser::Error::HexError(format!("output range_proof missing")))
}
}
}
@ -374,16 +339,19 @@ pub struct BlockPrintable {
}
impl BlockPrintable {
pub fn from_block(block: &core::Block) -> BlockPrintable {
let inputs = block
.inputs
pub fn from_block(
block: &core::Block,
chain: Arc<chain::Chain>,
include_proof: bool,
) -> BlockPrintable {
let inputs = block.inputs
.iter()
.map(|input| util::to_hex((input.0).0.to_vec()))
.map(|x| util::to_hex(x.commitment().0.to_vec()))
.collect();
let outputs = block
.outputs
.iter()
.map(|output| OutputPrintable::from_output(output, &block.header, true))
.map(|output| OutputPrintable::from_output(output, chain.clone(), include_proof))
.collect();
let kernels = block
.kernels
@ -406,7 +374,7 @@ pub struct BlockOutputs {
/// The block header
pub header: BlockHeaderInfo,
/// A printable version of the outputs
pub outputs: Vec<OutputSwitch>,
pub outputs: Vec<OutputPrintable>,
}
#[derive(Serialize, Deserialize)]

View file

@ -19,12 +19,12 @@ use std::collections::HashMap;
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant};
use util::secp::pedersen::{Commitment, RangeProof};
use util::secp::pedersen::RangeProof;
use core::core::SumCommit;
use core::core::{Input, OutputIdentifier, SumCommit};
use core::core::pmmr::{HashSum, NoSum};
use core::core::{Block, BlockHeader, Output, TxKernel};
use core::core::{Block, BlockHeader, TxKernel};
use core::core::target::Difficulty;
use core::core::hash::Hash;
use grin_store::Error::NotFoundErr;
@ -34,6 +34,7 @@ use sumtree;
use types::*;
use util::LOGGER;
const MAX_ORPHAN_AGE_SECS: u64 = 30;
#[derive(Debug, Clone)]
@ -331,31 +332,23 @@ impl Chain {
}
}
/// Gets an unspent output from its commitment. With return None if the
/// output doesn't exist or has been spent. This querying is done in a
/// way that's consistent with the current chain state and more
/// specifically the current winning fork.
pub fn get_unspent(&self, output_ref: &Commitment) -> Result<Output, Error> {
match self.store.get_output_by_commit(output_ref) {
Ok(out) => {
let mut sumtrees = self.sumtrees.write().unwrap();
if sumtrees.is_unspent(output_ref)? {
Ok(out)
} else {
Err(Error::OutputNotFound)
}
}
Err(NotFoundErr) => Err(Error::OutputNotFound),
Err(e) => Err(Error::StoreErr(e, "chain get unspent".to_owned())),
}
}
/// Checks whether an output is unspent
pub fn is_unspent(&self, output_ref: &Commitment) -> Result<bool, Error> {
/// For the given commitment find the unspent output and return the associated
/// Return an error if the output does not exist or has been spent.
/// This querying is done in a way that is consistent with the current chain state,
/// specifically the current winning (valid, most work) fork.
pub fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), Error> {
let mut sumtrees = self.sumtrees.write().unwrap();
sumtrees.is_unspent(output_ref)
}
/// Check if the input has matured sufficiently for the given block height.
/// This only applies to inputs spending coinbase outputs.
/// An input spending a non-coinbase output will always pass this check.
pub fn is_matured(&self, input: &Input, height: u64) -> Result<(), Error> {
let mut sumtrees = self.sumtrees.write().unwrap();
sumtrees.is_matured(input, height)
}
/// Sets the sumtree roots on a brand new block by applying the block on the
/// current sumtree state.
pub fn set_sumtree_roots(&self, b: &mut Block, is_fork: bool) -> Result<(), Error> {
@ -391,17 +384,9 @@ impl Chain {
}
/// returns the last n nodes inserted into the utxo sum tree
/// returns sum tree hash plus output itself (as the sum is contained
/// in the output anyhow)
pub fn get_last_n_utxo(&self, distance: u64) -> Vec<(Hash, Output)> {
pub fn get_last_n_utxo(&self, distance: u64) -> Vec<HashSum<SumCommit>> {
let mut sumtrees = self.sumtrees.write().unwrap();
let mut return_vec = Vec::new();
let sum_nodes = sumtrees.last_n_utxo(distance);
for sum_commit in sum_nodes {
let output = self.store.get_output_by_commit(&sum_commit.sum.commit);
return_vec.push((sum_commit.hash, output.unwrap()));
}
return_vec
sumtrees.last_n_utxo(distance)
}
/// as above, for rangeproofs
@ -469,16 +454,6 @@ impl Chain {
})
}
/// Gets the block header by the provided output commitment
pub fn get_block_header_by_output_commit(
&self,
commit: &Commitment,
) -> Result<BlockHeader, Error> {
self.store
.get_block_header_by_output_commit(commit)
.map_err(|e| Error::StoreErr(e, "chain get commitment".to_owned()))
}
/// Get the tip of the current "sync" header chain.
/// This may be significantly different to current header chain.
pub fn get_sync_head(&self) -> Result<Tip, Error> {

View file

@ -22,7 +22,6 @@ use core::consensus;
use core::core::hash::{Hash, Hashed};
use core::core::{Block, BlockHeader};
use core::core::target::Difficulty;
use core::core::transaction;
use grin_store;
use types::*;
use store;
@ -291,21 +290,6 @@ fn validate_block(
return Err(Error::InvalidRoot);
}
// check for any outputs with lock_heights greater than current block height
for input in &b.inputs {
if let Ok(output) = ctx.store.get_output_by_commit(&input.commitment()) {
if output.features.contains(transaction::COINBASE_OUTPUT) {
if let Ok(output_header) = ctx.store
.get_block_header_by_output_commit(&input.commitment())
{
if b.header.height <= output_header.height + global::coinbase_maturity() {
return Err(Error::ImmatureCoinbase);
}
};
};
};
}
Ok(())
}

View file

@ -20,7 +20,7 @@ use util::secp::pedersen::Commitment;
use types::*;
use core::core::hash::{Hash, Hashed};
use core::core::{Block, BlockHeader, Output};
use core::core::{Block, BlockHeader};
use core::consensus::TargetError;
use core::core::target::Difficulty;
use grin_store::{self, option_to_not_found, to_key, Error, u64_to_key};
@ -33,8 +33,6 @@ const HEAD_PREFIX: u8 = 'H' as u8;
const HEADER_HEAD_PREFIX: u8 = 'I' as u8;
const SYNC_HEAD_PREFIX: u8 = 's' as u8;
const HEADER_HEIGHT_PREFIX: u8 = '8' as u8;
const OUTPUT_COMMIT_PREFIX: u8 = 'o' as u8;
const HEADER_BY_OUTPUT_PREFIX: u8 = 'p' as u8;
const COMMIT_POS_PREFIX: u8 = 'c' as u8;
const KERNEL_POS_PREFIX: u8 = 'k' as u8;
@ -106,70 +104,21 @@ impl ChainStore for ChainKVStore {
)
}
fn check_block_exists(&self, h: &Hash) -> Result<bool, Error> {
self.db.exists(&to_key(BLOCK_PREFIX, &mut h.to_vec()))
}
/// Save the block and its header
fn save_block(&self, b: &Block) -> Result<(), Error> {
// saving the block and its header
let mut batch = self.db
let batch = self.db
.batch()
.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)?
.put_ser(
&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..],
b,
)?
.put_ser(
&to_key(BLOCK_HEADER_PREFIX, &mut b.hash().to_vec())[..],
&b.header,
)?;
// saving the full output under its hash, as well as a commitment to hash index
for out in &b.outputs {
batch = batch
.put_ser(
&to_key(
OUTPUT_COMMIT_PREFIX,
&mut out.commitment().as_ref().to_vec(),
)[..],
out,
)?
.put_ser(
&to_key(
HEADER_BY_OUTPUT_PREFIX,
&mut out.commitment().as_ref().to_vec(),
)[..],
&b.hash(),
)?;
}
batch.write()
}
// lookup the block header hash by output commitment
// lookup the block header based on this hash
// to check the chain is correct compare this block header to
// the block header currently indexed at the relevant block height (tbd if
// actually necessary)
//
// NOTE: This index is not exhaustive.
// This node may not have seen this full block, so may not have populated the
// index.
// Block headers older than some threshold (2 months?) will not necessarily be
// included
// in this index.
//
fn get_block_header_by_output_commit(&self, commit: &Commitment) -> Result<BlockHeader, Error> {
let block_hash = self.db.get_ser(&to_key(
HEADER_BY_OUTPUT_PREFIX,
&mut commit.as_ref().to_vec(),
))?;
match block_hash {
Some(hash) => {
let block_header = self.get_block_header(&hash)?;
self.is_on_current_chain(&block_header)?;
Ok(block_header)
}
None => Err(Error::NotFoundErr),
}
}
fn is_on_current_chain(&self, header: &BlockHeader) -> Result<(), Error> {
let header_at_height = self.get_header_by_height(header.height)?;
if header.hash() == header_at_height.hash() {
@ -194,13 +143,6 @@ impl ChainStore for ChainKVStore {
self.db.delete(&u64_to_key(HEADER_HEIGHT_PREFIX, height))
}
fn get_output_by_commit(&self, commit: &Commitment) -> Result<Output, Error> {
option_to_not_found(
self.db
.get_ser(&to_key(OUTPUT_COMMIT_PREFIX, &mut commit.as_ref().to_vec())),
)
}
fn save_output_pos(&self, commit: &Commitment, pos: u64) -> Result<(), Error> {
self.db.put_ser(
&to_key(COMMIT_POS_PREFIX, &mut commit.as_ref().to_vec())[..],

View file

@ -20,9 +20,7 @@ use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
use util::secp::pedersen::{RangeProof, Commitment};
use core::core::{Block, SumCommit, Input, Output, TxKernel, COINBASE_OUTPUT};
use core::core::{Block, SumCommit, Input, Output, OutputIdentifier, TxKernel, COINBASE_OUTPUT};
use core::core::pmmr::{HashSum, NoSum, Summable, PMMR};
use core::core::hash::Hashed;
use grin_store;
@ -30,6 +28,7 @@ use grin_store::sumtree::PMMRBackend;
use types::ChainStore;
use types::Error;
use util::LOGGER;
use util::secp::pedersen::{RangeProof, Commitment};
const SUMTREES_SUBDIR: &'static str = "sumtrees";
const UTXO_SUBDIR: &'static str = "utxo";
@ -89,30 +88,62 @@ impl SumTrees {
})
}
/// Whether a given commitment exists in the Output MMR and it's unspent
pub fn is_unspent(&mut self, commit: &Commitment) -> Result<bool, Error> {
let rpos = self.commit_index.get_output_pos(commit);
match rpos {
/// Check is an output is unspent.
/// We look in the index to find the output MMR pos.
/// Then we check the entry in the output MMR and confirm the hash matches.
pub fn is_unspent(&mut self, output: &OutputIdentifier) -> Result<(), Error> {
match self.commit_index.get_output_pos(&output.commit) {
Ok(pos) => {
let output_pmmr = PMMR::at(
&mut self.output_pmmr_h.backend,
self.output_pmmr_h.last_pos
self.output_pmmr_h.last_pos,
);
if let Some(hs) = output_pmmr.get(pos) {
let hashsum = HashSum::from_summable(
pos,
&SumCommit::from_commit(&commit),
);
Ok(hs.hash == hashsum.hash)
if let Some(HashSum { hash, sum: _ }) = output_pmmr.get(pos) {
let sum_commit = output.as_sum_commit();
let hash_sum = HashSum::from_summable(pos, &sum_commit);
if hash == hash_sum.hash {
Ok(())
} else {
Err(Error::SumTreeErr(format!("sumtree hash mismatch")))
}
} else {
Ok(false)
Err(Error::OutputNotFound)
}
}
Err(grin_store::Error::NotFoundErr) => Ok(false),
Err(e) => Err(Error::StoreErr(e, "sumtree unspent check".to_owned())),
Err(grin_store::Error::NotFoundErr) => Err(Error::OutputNotFound),
Err(e) => Err(Error::StoreErr(e, format!("sumtree unspent check"))),
}
}
/// Check the output being spent by the input has sufficiently matured.
/// This only applies for coinbase outputs being spent (1,000 blocks).
/// Non-coinbase outputs will always pass this check.
/// For a coinbase output we find the block by the block hash provided in the input
/// and check coinbase maturty based on the height of this block.
pub fn is_matured(
&mut self,
input: &Input,
height: u64,
) -> Result<(), Error> {
// We should never be in a situation where we are checking maturity rules
// if the output is already spent (this should have already been checked).
let output = OutputIdentifier::from_input(&input);
assert!(self.is_unspent(&output).is_ok());
// At this point we can be sure the input is spending the output
// it claims to be spending, and that it is coinbase or non-coinbase.
// If we are spending a coinbase output then go find the block
// and check the coinbase maturity rule is being met.
if input.features.contains(COINBASE_OUTPUT) {
let block_hash = &input.out_block
.expect("input spending coinbase output must have a block hash");
let block = self.commit_index.get_block(&block_hash)?;
block.verify_coinbase_maturity(&input, height)
.map_err(|_| Error::ImmatureCoinbase)?;
}
Ok(())
}
/// returns the last N nodes inserted into the tree (i.e. the 'bottom'
/// nodes at level 0
pub fn last_n_utxo(&mut self, distance: u64) -> Vec<HashSum<SumCommit>> {
@ -253,8 +284,8 @@ impl<'a> Extension<'a> {
self.apply_output(out)?;
}
}
// then doing inputsm guarantees an input can't spend an output in the
// then doing inputs guarantees an input can't spend an output in the
// same block, enforcing block cut-through
for input in &b.inputs {
self.apply_input(input, b.header.height)?;
@ -266,7 +297,7 @@ impl<'a> Extension<'a> {
self.apply_output(out)?;
}
}
// finally, applying all kernels
for kernel in &b.kernels {
self.apply_kernel(kernel)?;
@ -275,24 +306,10 @@ impl<'a> Extension<'a> {
}
fn save_pos_index(&self) -> Result<(), Error> {
debug!(
LOGGER,
"sumtree: save_pos_index: outputs: {}, {:?}",
self.new_output_commits.len(),
self.new_output_commits.values().collect::<Vec<_>>(),
);
for (commit, pos) in &self.new_output_commits {
self.commit_index.save_output_pos(commit, *pos)?;
}
debug!(
LOGGER,
"sumtree: save_pos_index: kernels: {}, {:?}",
self.new_kernel_excesses.len(),
self.new_kernel_excesses.values().collect::<Vec<_>>(),
);
for (excess, pos) in &self.new_kernel_excesses {
self.commit_index.save_kernel_pos(excess, *pos)?;
}
@ -304,6 +321,32 @@ impl<'a> Extension<'a> {
let commit = input.commitment();
let pos_res = self.get_output_pos(&commit);
if let Ok(pos) = pos_res {
if let Some(HashSum { hash, sum: _ }) = self.output_pmmr.get(pos) {
let sum_commit = SumCommit::from_input(&input);
// check hash from pmmr matches hash from input
// if not then the input is not being honest about
// what it is attempting to spend...
let hash_sum = HashSum::from_summable(pos, &sum_commit);
if hash != hash_sum.hash {
return Err(Error::SumTreeErr(format!("output pmmr hash mismatch")));
}
// At this point we can be sure the input is spending the output
// it claims to be spending, and it is coinbase or non-coinbase.
// If we are spending a coinbase output then go find the block
// and check the coinbase maturity rule is being met.
if input.features.contains(COINBASE_OUTPUT) {
let block_hash = &input.out_block
.expect("input spending coinbase output must have a block hash");
let block = self.commit_index.get_block(&block_hash)?;
block.verify_coinbase_maturity(&input, height)
.map_err(|_| Error::ImmatureCoinbase)?;
}
}
// Now prune the output_pmmr and rproof_pmmr.
// Input is not valid if we cannot prune successfully (to spend an unspent output).
match self.output_pmmr.prune(pos, height as u32) {
Ok(true) => {
self.rproof_pmmr
@ -321,11 +364,7 @@ impl<'a> Extension<'a> {
fn apply_output(&mut self, out: &Output) -> Result<(), Error> {
let commit = out.commitment();
let switch_commit_hash = out.switch_commit_hash();
let sum_commit = SumCommit {
commit,
switch_commit_hash,
};
let sum_commit = SumCommit::from_output(out);
if let Ok(pos) = self.get_output_pos(&commit) {
// we need to check whether the commitment is in the current MMR view
@ -334,12 +373,12 @@ impl<'a> Extension<'a> {
// note that this doesn't show the commitment *never* existed, just
// that this is not an existing unspent commitment right now
if let Some(c) = self.output_pmmr.get(pos) {
let hashsum = HashSum::from_summable(pos, &sum_commit);
let hash_sum = HashSum::from_summable(pos, &sum_commit);
// processing a new fork so we may get a position on the old
// fork that exists but matches a different node
// filtering that case out
if c.hash == hashsum.hash {
if c.hash == hash_sum.hash {
return Err(Error::DuplicateCommitment(commit));
}
}

View file

@ -19,7 +19,7 @@ use std::io;
use util::secp::pedersen::Commitment;
use grin_store as store;
use core::core::{block, Block, BlockHeader, Output};
use core::core::{Block, BlockHeader, block, transaction};
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::ser;
@ -58,6 +58,8 @@ pub enum Error {
InvalidBlockHeight,
/// One of the root hashes in the block is invalid
InvalidRoot,
/// Something does not look right with the switch commitment
InvalidSwitchCommit,
/// One of the inputs in the block has already been spent
AlreadySpent(Commitment),
/// An output with that commitment already exists (should be unique)
@ -76,10 +78,12 @@ pub enum Error {
StoreErr(grin_store::Error, String),
/// Error serializing or deserializing a type
SerErr(ser::Error),
/// Error while updating the sum trees
/// Error with the sumtrees
SumTreeErr(String),
/// No chain exists and genesis block is required
GenesisBlockRequired,
/// Error from underlying tx handling
Transaction(transaction::Error),
/// Anything else
Other(String),
}
@ -117,6 +121,12 @@ impl Error {
}
}
impl From<transaction::Error> for Error {
fn from(e: transaction::Error) -> Error {
Error::Transaction(e)
}
}
/// The tip of a fork. A handle to the fork ancestry from its leaf in the
/// blockchain tree. References the max height and the latest and previous
/// blocks
@ -202,9 +212,6 @@ pub trait ChainStore: Send + Sync {
/// Gets a block header by hash
fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, store::Error>;
/// Checks whether a block has been been processed and saved
fn check_block_exists(&self, h: &Hash) -> Result<bool, store::Error>;
/// Save the provided block in store
fn save_block(&self, b: &Block) -> Result<(), store::Error>;
@ -236,15 +243,6 @@ pub trait ChainStore: Send + Sync {
/// Use the header_by_height index to verify the block header is where we think it is.
fn is_on_current_chain(&self, header: &BlockHeader) -> Result<(), store::Error>;
/// Gets an output by its commitment
fn get_output_by_commit(&self, commit: &Commitment) -> Result<Output, store::Error>;
/// Gets a block_header for the given input commit
fn get_block_header_by_output_commit(
&self,
commit: &Commitment,
) -> Result<BlockHeader, store::Error>;
/// Saves the position of an output, represented by its commitment, in the
/// UTXO MMR. Used as an index for spending and pruning.
fn save_output_pos(&self, commit: &Commitment, pos: u64) -> Result<(), store::Error>;

View file

@ -26,7 +26,7 @@ use std::sync::Arc;
use chain::Chain;
use chain::types::*;
use core::core::{Block, BlockHeader, Transaction, build};
use core::core::{Block, BlockHeader, Transaction, OutputIdentifier, build};
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::consensus;
@ -81,7 +81,7 @@ fn mine_empty_chain() {
vec![],
&keychain,
&pk,
difficulty.clone()
difficulty.clone(),
).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
@ -117,13 +117,6 @@ fn mine_empty_chain() {
// now check the block height index
let header_by_height = chain.get_header_by_height(n).unwrap();
assert_eq!(header_by_height.hash(), bhash);
// now check the header output index
let output = block.outputs[0];
let header_by_output_commit = chain
.get_block_header_by_output_commit(&output.commitment())
.unwrap();
assert_eq!(header_by_output_commit.hash(), bhash);
}
}
@ -250,17 +243,28 @@ fn spend_in_fork() {
let prev = chain.head_header().unwrap();
let kc = Keychain::from_random_seed().unwrap();
// mine 4 blocks, the 4th will be the root of the fork
let mut fork_head = prev;
for n in 2..6 {
// mine the first block and keep track of the block_hash
// so we can spend the coinbase later
let b = prepare_block(&kc, &fork_head, &chain, 2);
let block_hash = b.hash();
fork_head = b.header.clone();
chain.process_block(b, chain::SKIP_POW).unwrap();
// now mine three further blocks
for n in 3..6 {
let b = prepare_block(&kc, &fork_head, &chain, n);
fork_head = b.header.clone();
chain.process_block(b, chain::SKIP_POW).unwrap();
}
let lock_height = 1 + global::coinbase_maturity();
assert_eq!(lock_height, 4);
let (tx1, _) = build::transaction(
vec![
build::input(consensus::REWARD, kc.derive_key_id(2).unwrap()),
build::coinbase_input(consensus::REWARD, block_hash, kc.derive_key_id(2).unwrap()),
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
build::with_fee(20000),
],
@ -269,11 +273,11 @@ fn spend_in_fork() {
let next = prepare_block_tx(&kc, &fork_head, &chain, 7, vec![&tx1]);
let prev_main = next.header.clone();
chain.process_block(next, chain::SKIP_POW).unwrap();
chain.process_block(next.clone(), chain::SKIP_POW).unwrap();
let (tx2, _) = build::transaction(
vec![
build::input(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
build::input(consensus::REWARD - 20000, next.hash(), kc.derive_key_id(30).unwrap()),
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
build::with_fee(20000),
],
@ -297,9 +301,8 @@ fn spend_in_fork() {
let head = chain.head_header().unwrap();
assert_eq!(head.height, 6);
assert_eq!(head.hash(), prev_main.hash());
assert!(chain.is_unspent(&tx2.outputs[0].commitment()).unwrap());
let res = chain.is_unspent(&tx1.outputs[0].commitment());
assert!(!res.unwrap());
assert!(chain.is_unspent(&OutputIdentifier::from_output(&tx2.outputs[0])).is_ok());
assert!(chain.is_unspent(&OutputIdentifier::from_output(&tx1.outputs[0])).is_err());
// make the fork win
let fork_next = prepare_fork_block(&kc, &prev_fork, &chain, 10);
@ -310,8 +313,8 @@ fn spend_in_fork() {
let head = chain.head_header().unwrap();
assert_eq!(head.height, 7);
assert_eq!(head.hash(), prev_fork.hash());
assert!(chain.is_unspent(&tx2.outputs[0].commitment()).unwrap());
assert!(!chain.is_unspent(&tx1.outputs[0].commitment()).unwrap());
assert!(chain.is_unspent(&OutputIdentifier::from_output(&tx2.outputs[0])).is_ok());
assert!(chain.is_unspent(&OutputIdentifier::from_output(&tx1.outputs[0])).is_err());
}
fn prepare_block(kc: &Keychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {

View file

@ -55,7 +55,6 @@ fn test_various_store_indices() {
&key_id,
Difficulty::minimum()
).unwrap();
let commit = block.outputs[0].commitment();
let block_hash = block.hash();
chain_store.save_block(&block).unwrap();
@ -66,9 +65,4 @@ fn test_various_store_indices() {
let block_header = chain_store.get_header_by_height(1).unwrap();
assert_eq!(block_header.hash(), block_hash);
let block_header = chain_store
.get_block_header_by_output_commit(&commit)
.unwrap();
assert_eq!(block_header.hash(), block_hash);
}

View file

@ -99,18 +99,27 @@ fn test_coinbase_maturity() {
assert!(
block.outputs[0]
.features
.contains(transaction::COINBASE_OUTPUT,)
.contains(transaction::COINBASE_OUTPUT)
);
// we will need this later when we want to spend the coinbase output
let block_hash = block.hash();
chain.process_block(block, chain::NONE).unwrap();
let prev = chain.head_header().unwrap();
let amount = consensus::REWARD;
let lock_height = 1 + global::coinbase_maturity();
assert_eq!(lock_height, 4);
// here we build a tx that attempts to spend the earlier coinbase output
// this is not a valid tx as the coinbase output cannot be spent yet
let (coinbase_txn, _) = build::transaction(
vec![
build::input(amount, key_id1.clone()),
build::output(amount - 2, key_id2),
build::coinbase_input(amount, block_hash, key_id1.clone()),
build::output(amount - 2, key_id2.clone()),
build::with_fee(2),
],
&keychain,
@ -122,13 +131,17 @@ fn test_coinbase_maturity() {
vec![&coinbase_txn],
&keychain,
&key_id3,
Difficulty::minimum()
Difficulty::minimum(),
).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
block.header.difficulty = difficulty.clone();
chain.set_sumtree_roots(&mut block, false).unwrap();
match chain.set_sumtree_roots(&mut block, false) {
Err(Error::ImmatureCoinbase) => (),
_ => panic!("expected ImmatureCoinbase error here"),
}
pow::pow_size(
&mut cuckoo_miner,
@ -137,14 +150,8 @@ fn test_coinbase_maturity() {
global::sizeshift() as u32,
).unwrap();
let result = chain.process_block(block, chain::NONE);
match result {
Err(Error::ImmatureCoinbase) => (),
_ => panic!("expected ImmatureCoinbase error here"),
};
// mine enough blocks to increase the height sufficiently for
// coinbase to reach maturity and be spendable in the next block
// coinbase to reach maturity and be spendable in the next block
for _ in 0..3 {
let prev = chain.head_header().unwrap();
@ -176,14 +183,22 @@ fn test_coinbase_maturity() {
let prev = chain.head_header().unwrap();
let mut block =
core::core::Block::new(
&prev,
vec![&coinbase_txn],
&keychain,
&key_id4,
Difficulty::minimum()
).unwrap();
let (coinbase_txn, _) = build::transaction(
vec![
build::coinbase_input(amount, block_hash, key_id1.clone()),
build::output(amount - 2, key_id2.clone()),
build::with_fee(2),
],
&keychain,
).unwrap();
let mut block = core::core::Block::new(
&prev,
vec![&coinbase_txn],
&keychain,
&key_id4,
Difficulty::minimum(),
).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);

View file

@ -19,15 +19,25 @@ use util;
use util::{secp, static_secp_instance};
use std::collections::HashSet;
use core::Committed;
use core::{Input, Output, Proof, SwitchCommitHash, Transaction, TxKernel, COINBASE_KERNEL,
COINBASE_OUTPUT};
use core::{
Committed,
Input,
Output,
OutputIdentifier,
SwitchCommitHash,
Proof,
TxKernel,
Transaction,
COINBASE_KERNEL,
COINBASE_OUTPUT
};
use consensus;
use consensus::{exceeds_weight, reward, MINIMUM_DIFFICULTY, REWARD, VerifySortOrder};
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::target::Difficulty;
use core::transaction;
use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSorted, Writer};
use ser::{self, Readable, Reader, Writeable, Writer, WriteableSorted, read_and_verify_sorted};
use util::kernel_sig_msg;
use util::LOGGER;
use global;
use keychain;
@ -45,10 +55,7 @@ pub enum Error {
/// Too many inputs, outputs or kernels in the block
WeightExceeded,
/// Kernel not valid due to lock_height exceeding block header height
KernelLockHeight {
/// The lock_height causing this validation error
lock_height: u64,
},
KernelLockHeight(u64),
/// Underlying tx related error
Transaction(transaction::Error),
/// Underlying Secp256k1 error (signature validation or invalid public key typically)
@ -57,6 +64,15 @@ pub enum Error {
Keychain(keychain::Error),
/// Underlying consensus error (sort order currently)
Consensus(consensus::Error),
/// Coinbase has not yet matured and cannot be spent (1,000 blocks)
ImmatureCoinbase {
/// The height of the block containing the input spending the coinbase output
height: u64,
/// The lock_height needed to be reached for the coinbase output to mature
lock_height: u64,
},
/// Other unspecified error condition
Other(String)
}
impl From<transaction::Error> for Error {
@ -295,7 +311,12 @@ impl Block {
difficulty: Difficulty,
) -> Result<Block, Error> {
let fees = txs.iter().map(|tx| tx.fee).sum();
let (reward_out, reward_proof) = Block::reward_output(keychain, key_id, fees)?;
let (reward_out, reward_proof) = Block::reward_output(
keychain,
key_id,
fees,
prev.height + 1,
)?;
let block = Block::with_reward(prev, txs, reward_out, reward_proof, difficulty)?;
Ok(block)
}
@ -454,14 +475,20 @@ impl Block {
/// trees, reward, etc.
///
/// TODO - performs various verification steps - discuss renaming this to "verify"
/// as all the steps within are verify steps.
///
pub fn validate(&self) -> Result<(), Error> {
self.verify_weight()?;
self.verify_sorted()?;
self.verify_coinbase()?;
self.verify_kernels()?;
Ok(())
}
fn verify_weight(&self) -> Result<(), Error> {
if exceeds_weight(self.inputs.len(), self.outputs.len(), self.kernels.len()) {
return Err(Error::WeightExceeded);
}
self.verify_sorted()?;
self.verify_coinbase()?;
self.verify_kernels(false)?;
Ok(())
}
@ -474,15 +501,16 @@ impl Block {
/// Verifies the sum of input/output commitments match the sum in kernels
/// and that all kernel signatures are valid.
/// TODO - when would we skip_sig? Is this needed or used anywhere?
fn verify_kernels(&self, skip_sig: bool) -> Result<(), Error> {
fn verify_kernels(&self) -> Result<(), Error> {
for k in &self.kernels {
if k.fee & 1 != 0 {
return Err(Error::OddKernelFee);
}
// check we have no kernels with lock_heights greater than current height
// no tx can be included in a block earlier than its lock_height
if k.lock_height > self.header.height {
return Err(Error::KernelLockHeight { lock_height: k.lock_height });
return Err(Error::KernelLockHeight(k.lock_height));
}
}
@ -504,11 +532,10 @@ impl Block {
}
// verify all signatures with the commitment as pk
if !skip_sig {
for proof in &self.kernels {
proof.verify()?;
}
for proof in &self.kernels {
proof.verify()?;
}
Ok(())
}
@ -518,19 +545,17 @@ impl Block {
// * That the sum of blinding factors for all coinbase-marked outputs match
// the coinbase-marked kernels.
fn verify_coinbase(&self) -> Result<(), Error> {
let cb_outs = filter_map_vec!(self.outputs, |out| if out.features.contains(
COINBASE_OUTPUT,
)
{
Some(out.commitment())
} else {
None
});
let cb_kerns = filter_map_vec!(self.kernels, |k| if k.features.contains(COINBASE_KERNEL) {
Some(k.excess)
} else {
None
});
let cb_outs = self.outputs
.iter()
.filter(|out| out.features.contains(COINBASE_OUTPUT))
.cloned()
.collect::<Vec<Output>>();
let cb_kerns = self.kernels
.iter()
.filter(|kernel| kernel.features.contains(COINBASE_KERNEL))
.cloned()
.collect::<Vec<TxKernel>>();
let over_commit;
let out_adjust_sum;
@ -539,8 +564,14 @@ impl Block {
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
over_commit = secp.commit_value(reward(self.total_fees()))?;
out_adjust_sum = secp.commit_sum(cb_outs, vec![over_commit])?;
kerns_sum = secp.commit_sum(cb_kerns, vec![])?;
out_adjust_sum = secp.commit_sum(
cb_outs.iter().map(|x| x.commitment()).collect(),
vec![over_commit],
)?;
kerns_sum = secp.commit_sum(
cb_kerns.iter().map(|x| x.excess).collect(),
vec![],
)?;
}
if kerns_sum != out_adjust_sum {
@ -549,15 +580,53 @@ impl Block {
Ok(())
}
/// NOTE: this happens during apply_block (not the earlier validate_block)
///
/// Calculate lock_height as block_height + 1,000
/// Confirm height <= lock_height
pub fn verify_coinbase_maturity(
&self,
input: &Input,
height: u64,
) -> Result<(), Error> {
let output = OutputIdentifier::from_input(&input);
// We should only be calling verify_coinbase_maturity
// if the sender claims we are spending a coinbase output
// _and_ that we trust this claim.
// We should have already confirmed the entry from the MMR exists
// and has the expected hash.
assert!(output.features.contains(COINBASE_OUTPUT));
if let Some(_) = self.outputs
.iter()
.find(|x| OutputIdentifier::from_output(&x) == output)
{
let lock_height = self.header.height + global::coinbase_maturity();
if lock_height > height {
Err(Error::ImmatureCoinbase{
height: height,
lock_height: lock_height,
})
} else {
Ok(())
}
} else {
Err(Error::Other(format!("output not found in block")))
}
}
/// Builds the blinded output and related signature proof for the block reward.
pub fn reward_output(
keychain: &keychain::Keychain,
key_id: &keychain::Identifier,
fees: u64,
height: u64,
) -> Result<(Output, TxKernel), keychain::Error> {
let commit = keychain.commit(reward(fees), key_id)?;
let switch_commit = keychain.switch_commit(key_id)?;
let switch_commit_hash = SwitchCommitHash::from_switch_commit(switch_commit);
trace!(
LOGGER,
"Block reward - Pedersen Commit is: {:?}, Switch Commit is: {:?}",
@ -585,15 +654,22 @@ impl Block {
let out_commit = output.commitment();
let excess = secp.commit_sum(vec![out_commit], vec![over_commit])?;
let msg = util::secp::Message::from_slice(&[0; secp::constants::MESSAGE_SIZE])?;
let sig = keychain.aggsig_sign_from_key_id(&msg, &key_id).unwrap();
// NOTE: Remember we sign the fee *and* the lock_height.
// For a coinbase output the fee is 0 and the lock_height is
// the lock_height of the coinbase output itself,
// not the lock_height of the tx (there is no tx for a coinbase output).
// This output will not be spendable earlier than lock_height (and we sign this here).
let msg = secp::Message::from_slice(&kernel_sig_msg(0, height))?;
let sig = keychain.aggsig_sign_from_key_id(&msg, &key_id)?;
let proof = TxKernel {
features: COINBASE_KERNEL,
excess: excess,
excess_sig: sig,
fee: 0,
lock_height: 0,
// lock_height here is the height of the block (tx should be valid immediately)
// *not* the lock_height of the coinbase output (only spendable 1,000 blocks later)
lock_height: height,
};
Ok((output, proof))
}
@ -602,6 +678,7 @@ impl Block {
#[cfg(test)]
mod test {
use super::*;
use core::hash::ZERO_HASH;
use core::Transaction;
use core::build::{self, input, output, with_fee};
use core::test::tx2i1o;
@ -633,7 +710,7 @@ mod test {
key_id2: Identifier,
) -> Transaction {
build::transaction(
vec![input(v, key_id1), output(3, key_id2), with_fee(2)],
vec![input(v, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).map(|(tx, _)| tx)
.unwrap()
@ -657,7 +734,7 @@ mod test {
}
let now = Instant::now();
parts.append(&mut vec![input(500000, pks.pop().unwrap()), with_fee(2)]);
parts.append(&mut vec![input(500000, ZERO_HASH, pks.pop().unwrap()), with_fee(2)]);
let mut tx = build::transaction(parts, &keychain)
.map(|(tx, _)| tx)
.unwrap();
@ -677,7 +754,7 @@ mod test {
let mut btx1 = tx2i1o();
let (mut btx2, _) = build::transaction(
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
@ -705,7 +782,7 @@ mod test {
let mut btx1 = tx2i1o();
let (mut btx2, _) = build::transaction(
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
@ -767,7 +844,7 @@ mod test {
b.verify_coinbase(),
Err(Error::CoinbaseSumMismatch)
);
assert_eq!(b.verify_kernels(false), Ok(()));
assert_eq!(b.verify_kernels(), Ok(()));
assert_eq!(
b.validate(),
@ -789,7 +866,6 @@ mod test {
b.verify_coinbase(),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
assert_eq!(b.verify_kernels(true), Ok(()));
assert_eq!(
b.validate(),

View file

@ -27,10 +27,11 @@
use util::{secp, kernel_sig_msg};
use core::{Input, Output, SwitchCommitHash, Transaction, DEFAULT_OUTPUT};
use util::LOGGER;
use core::{Transaction, Input, Output, OutputFeatures, SwitchCommitHash, COINBASE_OUTPUT, DEFAULT_OUTPUT};
use core::hash::Hash;
use keychain;
use keychain::{BlindSum, BlindingFactor, Identifier, Keychain};
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
use util::LOGGER;
/// Context information available to transaction combinators.
pub struct Context<'a> {
@ -43,17 +44,56 @@ pub type Append = for<'a> Fn(&'a mut Context, (Transaction, BlindSum)) -> (Trans
/// Adds an input with the provided value and blinding key to the transaction
/// being built.
pub fn input(value: u64, key_id: Identifier) -> Box<Append> {
fn build_input(
value: u64,
features: OutputFeatures,
out_block: Option<Hash>,
key_id: Identifier,
) -> Box<Append> {
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
let commit = build.keychain.commit(value, &key_id).unwrap();
(tx.with_input(Input(commit)), sum.sub_key_id(key_id.clone()))
let input = Input::new(
features,
commit,
out_block,
);
(tx.with_input(input), sum.sub_key_id(key_id.clone()))
})
}
/// Adds an input with the provided value and blinding key to the transaction
/// being built.
pub fn input(
value: u64,
out_block: Hash,
key_id: Identifier,
) -> Box<Append> {
debug!(LOGGER, "Building input (spending regular output): {}, {}", value, key_id);
build_input(value, DEFAULT_OUTPUT, Some(out_block), key_id)
}
/// Adds a coinbase input spending a coinbase output.
/// We will use the block hash to verify coinbase maturity.
pub fn coinbase_input(
value: u64,
out_block: Hash,
key_id: Identifier,
) -> Box<Append> {
debug!(LOGGER, "Building input (spending coinbase): {}, {}", value, key_id);
build_input(value, COINBASE_OUTPUT, Some(out_block), key_id)
}
/// Adds an output with the provided value and key identifier from the
/// keychain.
pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
debug!(
LOGGER,
"Building an output: {}, {}",
value,
key_id,
);
let commit = build.keychain.commit(value, &key_id).unwrap();
let switch_commit = build.keychain.switch_commit(&key_id).unwrap();
let switch_commit_hash = SwitchCommitHash::from_switch_commit(switch_commit);
@ -61,7 +101,7 @@ pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
LOGGER,
"Builder - Pedersen Commit is: {:?}, Switch Commit is: {:?}",
commit,
switch_commit
switch_commit,
);
trace!(
LOGGER,
@ -145,6 +185,7 @@ pub fn transaction(
#[cfg(test)]
mod test {
use super::*;
use core::hash::ZERO_HASH;
#[test]
fn blind_simple_tx() {
@ -155,8 +196,8 @@ mod test {
let (tx, _) = transaction(
vec![
input(10, key_id1),
input(11, key_id2),
input(10, ZERO_HASH, key_id1),
input(11, ZERO_HASH, key_id2),
output(20, key_id3),
with_fee(1),
],
@ -173,7 +214,7 @@ mod test {
let key_id2 = keychain.derive_key_id(2).unwrap();
let (tx, _) = transaction(
vec![input(6, key_id1), output(2, key_id2), with_fee(4)],
vec![input(6, ZERO_HASH, key_id1), output(2, key_id2), with_fee(4)],
&keychain,
).unwrap();

View file

@ -25,6 +25,7 @@ use blake2::blake2b::Blake2b;
use consensus;
use ser::{self, AsFixedBytes, Error, Readable, Reader, Writeable, Writer};
use util;
use util::LOGGER;
/// A hash consisting of all zeroes, used as a sentinel. No known preimage.
@ -65,6 +66,22 @@ impl Hash {
pub fn to_vec(&self) -> Vec<u8> {
self.0.to_vec()
}
/// The "zero" hash. No known preimage.
pub fn zero() -> Hash {
ZERO_HASH
}
/// Convert a hash to hex string format.
pub fn to_hex(&self) -> String {
util::to_hex(self.to_vec())
}
/// Convert hex string back to hash.
pub fn from_hex(hex: &str) -> Result<Hash, Error> {
let bytes = util::from_hex(hex.to_string()).unwrap();
Ok(Hash::from_vec(bytes))
}
}
impl ops::Index<usize> for Hash {

View file

@ -28,7 +28,6 @@ use std::cmp::Ordering;
use std::num::ParseFloatError;
use consensus::GRIN_BASE;
use core::target::Difficulty;
use util::{secp, static_secp_instance};
use util::secp::pedersen::*;
@ -211,6 +210,7 @@ pub fn amount_to_hr_string(amount: u64) -> String {
#[cfg(test)]
mod test {
use super::*;
use core::target::Difficulty;
use core::hash::ZERO_HASH;
use core::build::{initial_tx, input, output, with_excess, with_fee, with_lock_height};
use core::block::Error::KernelLockHeight;
@ -248,7 +248,7 @@ mod test {
// blinding should fail as signing with a zero r*G shouldn't work
build::transaction(
vec![
input(10, key_id1.clone()),
input(10, ZERO_HASH, key_id1.clone()),
output(9, key_id1.clone()),
with_fee(1),
],
@ -260,10 +260,9 @@ mod test {
fn simple_tx_ser() {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialized failed");
ser::serialize(&mut vec, &tx).expect("serialization failed");
println!("{}", vec.len());
assert!(vec.len() > 5340);
assert!(vec.len() < 5370);
assert!(vec.len() == 5352);
}
#[test]
@ -304,7 +303,7 @@ mod test {
let (tx, _) = build::transaction(
vec![
input(75, key_id1),
input(75, ZERO_HASH, key_id1),
output(42, key_id2),
output(32, key_id3),
with_fee(1),
@ -359,7 +358,7 @@ mod test {
{
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
// become inputs in the new transaction
let (in1, in2) = (input(4, key_id1), input(3, key_id2));
let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
// Alice builds her transaction, with change, which also produces the sum
// of blinding factors before they're obscured.
@ -448,7 +447,7 @@ mod test {
// and that the resulting block is valid
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
input(5, ZERO_HASH, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(1),
@ -469,7 +468,7 @@ mod test {
// now try adding a timelocked tx where lock height is greater than current block height
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
input(5, ZERO_HASH, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(2),
@ -486,7 +485,7 @@ mod test {
Difficulty::minimum(),
).unwrap();
match b.validate() {
Err(KernelLockHeight { lock_height: height }) => {
Err(KernelLockHeight(height)) => {
assert_eq!(height, 2);
}
_ => panic!("expecting KernelLockHeight error here"),
@ -514,8 +513,8 @@ mod test {
build::transaction(
vec![
input(10, key_id1),
input(11, key_id2),
input(10, ZERO_HASH, key_id1),
input(11, ZERO_HASH, key_id2),
output(19, key_id3),
with_fee(2),
],
@ -531,7 +530,7 @@ mod test {
let key_id2 = keychain.derive_key_id(2).unwrap();
build::transaction(
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
vec![input(5, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).map(|(tx, _)| tx)
.unwrap()

View file

@ -17,20 +17,25 @@ use blake2::blake2b::blake2b;
use util::secp::{self, Message, Signature};
use util::{static_secp_instance, kernel_sig_msg};
use util::secp::pedersen::{Commitment, RangeProof};
use std::cmp::min;
use std::cmp::Ordering;
use std::ops;
use consensus;
use consensus::VerifySortOrder;
use core::Committed;
use core::hash::Hashed;
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::pmmr::Summable;
use keychain::{Identifier, Keychain};
use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSorted, Writer};
use util;
/// The size to use for the stored blake2 hash of a switch_commitment
pub const SWITCH_COMMIT_HASH_SIZE: usize = 20;
/// The size of the secret key used to generate the switch commitment hash (blake2)
pub const SWITCH_COMMIT_KEY_SIZE: usize = 20;
bitflags! {
/// Options for a kernel's structure or use
pub flags KernelFeatures: u8 {
@ -69,13 +74,16 @@ macro_rules! hashable_ord {
pub enum Error {
/// Transaction fee can't be odd, due to half fee burning
OddFee,
/// Underlying Secp256k1 error (signature validation or invalid public
/// key typically)
/// Underlying Secp256k1 error (signature validation or invalid public key typically)
Secp(secp::Error),
/// Restrict number of incoming inputs
TooManyInputs,
/// Underlying consensus error (currently for sort order)
ConsensusError(consensus::Error),
/// Error originating from an invalid lock-height
LockHeight(u64),
/// Error originating from an invalid switch commitment (coinbase lock_height related)
SwitchCommitment,
}
impl From<secp::Error> for Error {
@ -149,14 +157,12 @@ impl TxKernel {
/// as a public key and checking the signature verifies with the fee as
/// message.
pub fn verify(&self) -> Result<(), secp::Error> {
let msg = try!(Message::from_slice(
&kernel_sig_msg(self.fee, self.lock_height),
));
let msg = Message::from_slice(&kernel_sig_msg(self.fee, self.lock_height))?;
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let sig = &self.excess_sig;
let valid = Keychain::aggsig_verify_single_from_commit(&secp, &sig, &msg, &self.excess);
if !valid{
if !valid {
return Err(secp::Error::IncorrectSignature);
}
Ok(())
@ -172,8 +178,7 @@ pub struct Transaction {
pub outputs: Vec<Output>,
/// Fee paid by the transaction.
pub fee: u64,
/// Transaction is not valid before this block height.
/// It is invalid for this to be less than the lock_height of any UTXO being spent.
/// Transaction is not valid before this chain height.
pub lock_height: u64,
/// The signature proving the excess is a valid public key, which signs
/// the transaction fee.
@ -184,7 +189,6 @@ pub struct Transaction {
/// write the transaction as binary.
impl Writeable for Transaction {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
println!("Excess sig write: {:?}", self.excess_sig);
ser_multiwrite!(
writer,
[write_u64, self.fee],
@ -335,7 +339,7 @@ impl Transaction {
// pretend the sum is a public key (which it is, being of the form r.G) and
// verify the transaction sig with it
let valid = Keychain::aggsig_verify_single_from_commit(&secp, &sig, &msg, &rsum);
if !valid{
if !valid {
return Err(secp::Error::IncorrectSignature);
}
Ok(rsum)
@ -377,10 +381,23 @@ impl Transaction {
}
}
/// A transaction input, mostly a reference to an output being spent by the
/// transaction.
#[derive(Debug, Copy, Clone)]
pub struct Input(pub Commitment);
/// A transaction input.
///
/// Primarily a reference to an output being spent by the transaction.
/// But also information required to verify coinbase maturity through
/// the lock_height hashed in the switch_commit_hash.
#[derive(Debug, Clone, Copy)]
pub struct Input{
/// The features of the output being spent.
/// We will check maturity for coinbase output.
pub features: OutputFeatures,
/// The commit referencing the output being spent.
pub commit: Commitment,
/// The hash of the block the output originated from.
/// Currently we only care about this for coinbase outputs.
/// TODO - include the merkle proof here once we support these.
pub out_block: Option<Hash>,
}
hashable_ord!(Input);
@ -388,7 +405,14 @@ hashable_ord!(Input);
/// an Input as binary.
impl Writeable for Input {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_fixed_bytes(&self.0)
writer.write_u8(self.features.bits())?;
writer.write_fixed_bytes(&self.commit)?;
if self.features.contains(COINBASE_OUTPUT) {
writer.write_fixed_bytes(&self.out_block.unwrap_or(ZERO_HASH))?;
}
Ok(())
}
}
@ -396,16 +420,49 @@ impl Writeable for Input {
/// an Input from a binary stream.
impl Readable for Input {
fn read(reader: &mut Reader) -> Result<Input, ser::Error> {
Ok(Input(Commitment::read(reader)?))
let features = OutputFeatures::from_bits(reader.read_u8()?).ok_or(
ser::Error::CorruptedData,
)?;
let commit = Commitment::read(reader)?;
let out_block = if features.contains(COINBASE_OUTPUT) {
Some(Hash::read(reader)?)
} else {
None
};
Ok(Input::new(
features,
commit,
out_block,
))
}
}
/// The input for a transaction, which spends a pre-existing output. The input
/// commitment is a reproduction of the commitment of the output it's spending.
/// The input for a transaction, which spends a pre-existing unspent output.
/// The input commitment is a reproduction of the commitment of the output being spent.
/// Input must also provide the original output features and the hash of the block
/// the output originated from.
impl Input {
/// Extracts the referenced commitment from a transaction output
/// Build a new input from the data required to identify and verify an output beng spent.
pub fn new(
features: OutputFeatures,
commit: Commitment,
out_block: Option<Hash>,
) -> Input {
Input {
features,
commit,
out_block,
}
}
/// The input commitment which _partially_ identifies the output being spent.
/// In the presence of a fork we need additional info to uniquely identify the output.
/// Specifically the block hash (so correctly calculate lock_height for coinbase outputs).
pub fn commitment(&self) -> Commitment {
self.0
self.commit
}
}
@ -422,15 +479,23 @@ bitflags! {
/// Definition of the switch commitment hash
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct SwitchCommitHash {
/// simple hash
pub hash: [u8; SWITCH_COMMIT_HASH_SIZE],
pub struct SwitchCommitHashKey ([u8; SWITCH_COMMIT_KEY_SIZE]);
impl SwitchCommitHashKey {
/// We use a zero value key for regular transactions.
pub fn zero() -> SwitchCommitHashKey {
SwitchCommitHashKey([0; SWITCH_COMMIT_KEY_SIZE])
}
}
/// Definition of the switch commitment hash
#[derive(Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct SwitchCommitHash([u8; SWITCH_COMMIT_HASH_SIZE]);
/// Implementation of Writeable for a switch commitment hash
impl Writeable for SwitchCommitHash {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_fixed_bytes(&self.hash)?;
writer.write_fixed_bytes(&self.0)?;
Ok(())
}
}
@ -444,31 +509,62 @@ impl Readable for SwitchCommitHash {
for i in 0..SWITCH_COMMIT_HASH_SIZE {
c[i] = a[i];
}
Ok(SwitchCommitHash { hash: c })
Ok(SwitchCommitHash(c))
}
}
// As Ref for AsFixedBytes
impl AsRef<[u8]> for SwitchCommitHash {
fn as_ref(&self) -> &[u8] {
&self.hash
&self.0
}
}
impl ::std::fmt::Debug for SwitchCommitHash {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
try!(write!(f, "{}(", stringify!(SwitchCommitHash)));
try!(write!(f, "{}", self.to_hex()));
write!(f, ")")
}
}
impl SwitchCommitHash {
/// Builds a switch commitment hash from a switch commit using blake2
/// Builds a switch commit hash from a switch commit using blake2
pub fn from_switch_commit(switch_commit: Commitment) -> SwitchCommitHash {
let switch_commit_hash = blake2b(SWITCH_COMMIT_HASH_SIZE, &[], &switch_commit.0);
// always use the "zero" key for now
let key = SwitchCommitHashKey::zero();
let switch_commit_hash = blake2b(SWITCH_COMMIT_HASH_SIZE, &key.0, &switch_commit.0);
let switch_commit_hash = switch_commit_hash.as_bytes();
let mut h = [0; SWITCH_COMMIT_HASH_SIZE];
for i in 0..SWITCH_COMMIT_HASH_SIZE {
h[i] = switch_commit_hash[i];
}
SwitchCommitHash { hash: h }
SwitchCommitHash(h)
}
/// Reconstructs a switch commit hash from an array of bytes.
pub fn from_bytes(bytes: &[u8]) -> SwitchCommitHash {
let mut hash = [0; SWITCH_COMMIT_HASH_SIZE];
for i in 0..min(SWITCH_COMMIT_HASH_SIZE, bytes.len()) {
hash[i] = bytes[i];
}
SwitchCommitHash(hash)
}
/// Hex string represenation of a switch commitment hash.
pub fn to_hex(&self) -> String {
util::to_hex(self.0.to_vec())
}
/// Reconstrcuts a switch commit hash from a hex string.
pub fn from_hex(hex: &str) -> Result<SwitchCommitHash, ser::Error> {
let bytes = util::from_hex(hex.to_string())
.map_err(|_| ser::Error::HexError(format!("switch_commit_hash from_hex error")))?;
Ok(SwitchCommitHash::from_bytes(&bytes))
}
/// Build an "zero" switch commitment hash
pub fn zero() -> SwitchCommitHash {
SwitchCommitHash { hash: [0; SWITCH_COMMIT_HASH_SIZE] }
SwitchCommitHash([0; SWITCH_COMMIT_HASH_SIZE])
}
}
@ -476,18 +572,18 @@ impl SwitchCommitHash {
/// transferred. The commitment is a blinded value for the output while the
/// range proof guarantees the commitment includes a positive value without
/// overflow and the ownership of the private key. The switch commitment hash
/// provides future-proofing against quantum-based attacks, as well as provides
/// provides future-proofing against quantum-based attacks, as well as providing
/// wallet implementations with a way to identify their outputs for wallet
/// reconstruction
/// reconstruction.
///
/// The hash of an output only covers its features, lock_height, commitment,
/// The hash of an output only covers its features, commitment,
/// and switch commitment. The range proof is expected to have its own hash
/// and is stored and committed to separately.
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct Output {
/// Options for an output's structure or use
pub features: OutputFeatures,
/// The homomorphic commitment representing the output's amount
/// The homomorphic commitment representing the output amount
pub commit: Commitment,
/// The switch commitment hash, a 160 bit length blake2 hash of blind*J
pub switch_commit_hash: SwitchCommitHash,
@ -569,24 +665,127 @@ impl Output {
}
}
/// Wrapper to Output commitments to provide the Summable trait.
#[derive(Clone, Debug)]
pub struct SumCommit {
/// An output_identifier can be build from either an input _or_ and output and
/// contains everything we need to uniquely identify an output being spent.
/// Needed because it is not sufficient to pass a commitment around.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct OutputIdentifier {
/// Output features (coinbase vs. regular transaction output)
/// We need to include this when hashing to ensure coinbase maturity can be enforced.
pub features: OutputFeatures,
/// Output commitment
pub commit: Commitment,
}
impl OutputIdentifier {
/// Build a new output_identifier.
pub fn new(features: OutputFeatures, commit: &Commitment) -> OutputIdentifier {
OutputIdentifier {
features: features.clone(),
commit: commit.clone(),
}
}
/// Build an output_identifier from an existing output.
pub fn from_output(output: &Output) -> OutputIdentifier {
OutputIdentifier {
features: output.features,
commit: output.commit,
}
}
/// Build an output_identifier from an existing input.
pub fn from_input(input: &Input) -> OutputIdentifier {
OutputIdentifier {
features: input.features,
commit: input.commit,
}
}
/// convert an output_identifier to hex string format.
pub fn to_hex(&self) -> String {
format!(
"{:b}{}",
self.features.bits(),
util::to_hex(self.commit.0.to_vec()),
)
}
/// Convert an output_indentifier to a sum_commit representation
/// so we can use it to query the the output MMR
pub fn as_sum_commit(&self) -> SumCommit {
SumCommit::new(self.features, &self.commit)
}
/// Convert a sum_commit back to an output_identifier.
pub fn from_sum_commit(sum_commit: &SumCommit) -> OutputIdentifier {
OutputIdentifier::new(sum_commit.features, &sum_commit.commit)
}
}
impl Writeable for OutputIdentifier {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_u8(self.features.bits())?;
self.commit.write(writer)?;
Ok(())
}
}
impl Readable for OutputIdentifier {
fn read(reader: &mut Reader) -> Result<OutputIdentifier, ser::Error> {
let features = OutputFeatures::from_bits(reader.read_u8()?).ok_or(
ser::Error::CorruptedData,
)?;
Ok(OutputIdentifier {
commit: Commitment::read(reader)?,
features: features,
})
}
}
/// Wrapper to Output commitments to provide the Summable trait.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SumCommit {
/// Output features (coinbase vs. regular transaction output)
/// We need to include this when hashing to ensure coinbase maturity can be enforced.
pub features: OutputFeatures,
/// Output commitment
pub commit: Commitment,
/// The corresponding "switch commit hash"
pub switch_commit_hash: SwitchCommitHash,
}
impl SumCommit {
/// For when we do not care about the switch_commit_hash
/// for example when comparing sum_commit hashes
pub fn from_commit(commit: &Commitment) -> SumCommit {
/// Build a new sum_commit.
pub fn new(features: OutputFeatures, commit: &Commitment) -> SumCommit {
SumCommit {
features: features.clone(),
commit: commit.clone(),
switch_commit_hash: SwitchCommitHash::zero(),
}
}
/// Build a new sum_commit from an existing output.
pub fn from_output(output: &Output) -> SumCommit {
SumCommit {
features: output.features,
commit: output.commit,
}
}
/// Build a new sum_commit from an existing input.
pub fn from_input(input: &Input) -> SumCommit {
SumCommit {
features: input.features,
commit: input.commit,
}
}
/// Convert a sum_commit to hex string.
pub fn to_hex(&self) -> String {
format!(
"{:b}{}",
self.features.bits(),
util::to_hex(self.commit.0.to_vec()),
)
}
}
/// Outputs get summed through their commitments.
@ -596,33 +795,31 @@ impl Summable for SumCommit {
fn sum(&self) -> SumCommit {
SumCommit {
commit: self.commit.clone(),
switch_commit_hash: self.switch_commit_hash.clone(),
features: self.features.clone(),
}
}
fn sum_len() -> usize {
secp::constants::PEDERSEN_COMMITMENT_SIZE + SWITCH_COMMIT_HASH_SIZE
secp::constants::PEDERSEN_COMMITMENT_SIZE + 1
}
}
impl Writeable for SumCommit {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_u8(self.features.bits())?;
self.commit.write(writer)?;
if writer.serialization_mode() == ser::SerializationMode::Full {
self.switch_commit_hash.write(writer)?;
}
Ok(())
}
}
impl Readable for SumCommit {
fn read(reader: &mut Reader) -> Result<SumCommit, ser::Error> {
let commit = Commitment::read(reader)?;
let switch_commit_hash = SwitchCommitHash::read(reader)?;
let features = OutputFeatures::from_bits(reader.read_u8()?).ok_or(
ser::Error::CorruptedData,
)?;
Ok(SumCommit {
commit: commit,
switch_commit_hash: switch_commit_hash,
commit: Commitment::read(reader)?,
features: features,
})
}
}
@ -642,7 +839,10 @@ impl ops::Add for SumCommit {
Ok(s) => s,
Err(_) => Commitment::from_vec(vec![1; 33]),
};
SumCommit::from_commit(&sum)
SumCommit::new(
self.features | other.features,
&sum,
)
}
}

View file

@ -50,6 +50,8 @@ pub enum Error {
TooLargeReadErr,
/// Consensus rule failure (currently sort order)
ConsensusError(consensus::Error),
/// Error from from_hex deserialization
HexError(String),
}
impl From<io::Error> for Error {
@ -75,6 +77,7 @@ impl fmt::Display for Error {
Error::CorruptedData => f.write_str("corrupted data"),
Error::TooLargeReadErr => f.write_str("too large read"),
Error::ConsensusError(ref e) => write!(f, "consensus error {:?}", e),
Error::HexError(ref e) => write!(f, "hex error {:?}", e),
}
}
}
@ -97,6 +100,7 @@ impl error::Error for Error {
Error::CorruptedData => "corrupted data",
Error::TooLargeReadErr => "too large read",
Error::ConsensusError(_) => "consensus error (sort order)",
Error::HexError(_) => "hex error",
}
}
}

View file

@ -17,13 +17,13 @@ use std::sync::{Arc, RwLock};
use std::sync::atomic::{AtomicBool, Ordering};
use chain::{self, ChainAdapter};
use core::core::{self, Output};
use core::core;
use core::core::block::BlockHeader;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::core::transaction::{Input, OutputIdentifier};
use p2p;
use pool;
use util::secp::pedersen::Commitment;
use util::OneTime;
use store;
use util::LOGGER;
@ -332,10 +332,10 @@ impl PoolToChainAdapter {
}
impl pool::BlockChain for PoolToChainAdapter {
fn get_unspent(&self, output_ref: &Commitment) -> Result<Output, pool::PoolError> {
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), pool::PoolError> {
self.chain
.borrow()
.get_unspent(output_ref)
.is_unspent(output_ref)
.map_err(|e| match e {
chain::types::Error::OutputNotFound => pool::PoolError::OutputNotFound,
chain::types::Error::OutputSpent => pool::PoolError::OutputSpent,
@ -343,15 +343,15 @@ impl pool::BlockChain for PoolToChainAdapter {
})
}
fn get_block_header_by_output_commit(
&self,
commit: &Commitment,
) -> Result<BlockHeader, pool::PoolError> {
fn is_matured(&self, input: &Input, height: u64) -> Result<(), pool::PoolError> {
self.chain
.borrow()
.get_block_header_by_output_commit(commit)
.map_err(|_| pool::PoolError::GenericPoolError)
}
.is_matured(input, height)
.map_err(|e| match e {
chain::types::Error::OutputNotFound => pool::PoolError::OutputNotFound,
_ => pool::PoolError::GenericPoolError,
})
}
fn head_header(&self) -> Result<BlockHeader, pool::PoolError> {
self.chain

View file

@ -590,7 +590,6 @@ impl Miner {
b.header.nonce = rng.gen();
b.header.difficulty = difficulty;
b.header.timestamp = time::at_utc(time::Timespec::new(now_sec, 0));
trace!(LOGGER, "Block: {:?}", b);
let roots_result = self.chain.set_sumtree_roots(&mut b, false);
@ -621,8 +620,12 @@ impl Miner {
) -> Result<(core::Output, core::TxKernel, BlockFees), Error> {
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let (out, kernel) =
core::Block::reward_output(&keychain, &key_id, block_fees.fees).unwrap();
let (out, kernel) = core::Block::reward_output(
&keychain,
&key_id,
block_fees.fees,
block_fees.height,
).unwrap();
Ok((out, kernel, block_fees))
}
@ -651,7 +654,7 @@ impl Miner {
..block_fees
};
debug!(LOGGER, "block_fees here: {:?}", block_fees);
debug!(LOGGER, "get_coinbase: {:?}", block_fees);
Ok((output, kernel, block_fees))
}

View file

@ -343,8 +343,11 @@ impl Keychain {
Ok(sig)
}
/// Helper function to calculate final public key
pub fn aggsig_calculate_final_pubkey(&self, their_public_key: &PublicKey) -> Result<PublicKey, Error> {
/// Helper function to calculate final public key
pub fn aggsig_calculate_final_pubkey(
&self,
their_public_key: &PublicKey,
) -> Result<PublicKey, Error> {
let (our_sec_key, _) = self.aggsig_get_private_keys();
let mut pk_sum = their_public_key.clone();
let _ = pk_sum.add_exp_assign(&self.secp, &our_sec_key);
@ -352,20 +355,29 @@ impl Keychain {
}
/// Just a simple sig, creates its own nonce, etc
pub fn aggsig_sign_from_key_id(&self, msg: &Message, key_id: &Identifier) -> Result<Signature, Error> {
pub fn aggsig_sign_from_key_id(
&self,
msg: &Message,
key_id: &Identifier,
) -> Result<Signature, Error> {
let skey = self.derived_key(key_id)?;
let sig = aggsig::sign_single(&self.secp, &msg, &skey, None, None, None)?;
Ok(sig)
}
/// Verifies a sig given a commitment
pub fn aggsig_verify_single_from_commit(secp:&Secp256k1, sig: &Signature, msg: &Message, commit:&Commitment) -> bool {
pub fn aggsig_verify_single_from_commit(
secp:&Secp256k1,
sig: &Signature,
msg: &Message,
commit: &Commitment,
) -> bool {
// Extract the pubkey, unfortunately we need this hack for now, (we just hope one is valid)
// TODO: Create better secp256k1 API to do this
let pubkeys = commit.to_two_pubkeys(secp);
let mut valid=false;
let mut valid = false;
for i in 0..pubkeys.len() {
valid=aggsig::verify_single(secp, &sig, &msg, None, &pubkeys[i], false);
valid = aggsig::verify_single(secp, &sig, &msg, None, &pubkeys[i], false);
if valid {
break;
}
@ -374,7 +386,11 @@ impl Keychain {
}
/// Just a simple sig, creates its own nonce, etc
pub fn aggsig_sign_with_blinding(secp:&Secp256k1, msg: &Message, blinding:&BlindingFactor) -> Result<Signature, Error> {
pub fn aggsig_sign_with_blinding(
secp: &Secp256k1,
msg: &Message,
blinding: &BlindingFactor,
) -> Result<Signature, Error> {
let sig = aggsig::sign_single(secp, &msg, &blinding.secret_key(), None, None, None)?;
Ok(sig)
}
@ -417,7 +433,7 @@ mod test {
let msg = secp::Message::from_slice(&msg_bytes[..]).unwrap();
// now create a zero commitment using the key on the keychain associated with
// the key_id
// the key_id
let commit = keychain.commit(0, &key_id).unwrap();
// now check we can use our key to verify a signature from this zero commitment
@ -439,7 +455,7 @@ mod test {
assert_eq!(proof_info.value, 5);
// now check the recovered message is "empty" (but not truncated) i.e. all
// zeroes
// zeroes
assert_eq!(
proof_info.message,
secp::pedersen::ProofMessage::from_bytes(&[0; secp::constants::PROOF_MSG_SIZE])

View file

@ -12,5 +12,6 @@ grin_p2p = { path = "../p2p" }
grin_util = { path = "../util" }
serde = "~1.0.8"
serde_derive = "~1.0.8"
slog = { version = "^2.0.12", features = ["max_level_trace", "release_max_level_trace"] }
time = "^0.1"
rand = "0.3"

View file

@ -7,39 +7,17 @@
// Notably, UtxoDiff has been left off, and the question of how to handle
// abstract return types has been deferred.
use core::core::hash;
use core::core::block;
use core::core::transaction;
use std::collections::HashMap;
use std::clone::Clone;
use util::secp::pedersen::Commitment;
use std::sync::RwLock;
use core::core::{block, hash, transaction};
use core::core::{COINBASE_OUTPUT, Input, OutputIdentifier};
use core::global;
use core::core::hash::Hashed;
use types::{BlockChain, PoolError};
use util::secp::pedersen::Commitment;
#[derive(Debug)]
pub struct DummyBlockHeaderIndex {
block_headers: HashMap<Commitment, block::BlockHeader>,
}
impl DummyBlockHeaderIndex {
pub fn insert(&mut self, commit: Commitment, block_header: block::BlockHeader) {
self.block_headers.insert(commit, block_header);
}
pub fn get_block_header_by_output_commit(
&self,
commit: Commitment,
) -> Result<&block::BlockHeader, PoolError> {
match self.block_headers.get(&commit) {
Some(h) => Ok(h),
None => Err(PoolError::GenericPoolError),
}
}
}
/// A DummyUtxoSet for mocking up the chain
pub struct DummyUtxoSet {
@ -53,21 +31,25 @@ impl DummyUtxoSet {
outputs: HashMap::new(),
}
}
pub fn root(&self) -> hash::Hash {
hash::ZERO_HASH
}
pub fn apply(&self, b: &block::Block) -> DummyUtxoSet {
let mut new_hashmap = self.outputs.clone();
let mut new_outputs = self.outputs.clone();
for input in &b.inputs {
new_hashmap.remove(&input.commitment());
new_outputs.remove(&input.commitment());
}
for output in &b.outputs {
new_hashmap.insert(output.commitment(), output.clone());
new_outputs.insert(output.commitment(), output.clone());
}
DummyUtxoSet {
outputs: new_hashmap,
outputs: new_outputs,
}
}
pub fn with_block(&mut self, b: &block::Block) {
for input in &b.inputs {
self.outputs.remove(&input.commitment());
@ -76,11 +58,13 @@ impl DummyUtxoSet {
self.outputs.insert(output.commitment(), output.clone());
}
}
pub fn rewind(&self, _: &block::Block) -> DummyUtxoSet {
DummyUtxoSet {
outputs: HashMap::new(),
}
}
pub fn get_output(&self, output_ref: &Commitment) -> Option<&transaction::Output> {
self.outputs.get(output_ref)
}
@ -92,14 +76,12 @@ impl DummyUtxoSet {
}
// only for testing: add an output to the map
pub fn add_output(&mut self, output: transaction::Output) {
self.outputs.insert(output.commitment(), output);
}
// like above, but doesn't modify in-place so no mut ref needed
pub fn with_output(&self, output: transaction::Output) -> DummyUtxoSet {
let mut new_map = self.outputs.clone();
new_map.insert(output.commitment(), output);
DummyUtxoSet { outputs: new_map }
let mut new_outputs = self.outputs.clone();
new_outputs.insert(output.commitment(), output);
DummyUtxoSet {
outputs: new_outputs,
}
}
}
@ -108,8 +90,7 @@ impl DummyUtxoSet {
#[allow(dead_code)]
pub struct DummyChainImpl {
utxo: RwLock<DummyUtxoSet>,
block_headers: RwLock<DummyBlockHeaderIndex>,
head_header: RwLock<Vec<block::BlockHeader>>,
block_headers: RwLock<Vec<block::BlockHeader>>,
}
#[allow(dead_code)]
@ -119,39 +100,38 @@ impl DummyChainImpl {
utxo: RwLock::new(DummyUtxoSet {
outputs: HashMap::new(),
}),
block_headers: RwLock::new(DummyBlockHeaderIndex {
block_headers: HashMap::new(),
}),
head_header: RwLock::new(vec![]),
block_headers: RwLock::new(vec![]),
}
}
}
impl BlockChain for DummyChainImpl {
fn get_unspent(&self, commitment: &Commitment) -> Result<transaction::Output, PoolError> {
let output = self.utxo.read().unwrap().get_output(commitment).cloned();
match output {
Some(o) => Ok(o),
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), PoolError> {
match self.utxo.read().unwrap().get_output(&output_ref.commit) {
Some(_) => Ok(()),
None => Err(PoolError::GenericPoolError),
}
}
fn get_block_header_by_output_commit(
&self,
commit: &Commitment,
) -> Result<block::BlockHeader, PoolError> {
match self.block_headers
.read()
.unwrap()
.get_block_header_by_output_commit(*commit)
{
Ok(h) => Ok(h.clone()),
Err(e) => Err(e),
fn is_matured(&self, input: &Input, height: u64) -> Result<(), PoolError> {
if !input.features.contains(COINBASE_OUTPUT) {
return Ok(());
}
let block_hash = input.out_block.expect("requires a block hash");
let headers = self.block_headers.read().unwrap();
if let Some(h) = headers
.iter()
.find(|x| x.hash() == block_hash)
{
if h.height + global::coinbase_maturity() < height {
return Ok(());
}
}
Err(PoolError::ImmatureCoinbase)
}
fn head_header(&self) -> Result<block::BlockHeader, PoolError> {
let headers = self.head_header.read().unwrap();
let headers = self.block_headers.read().unwrap();
if headers.len() > 0 {
Ok(headers[0].clone())
} else {
@ -164,33 +144,20 @@ impl DummyChain for DummyChainImpl {
fn update_utxo_set(&mut self, new_utxo: DummyUtxoSet) {
self.utxo = RwLock::new(new_utxo);
}
fn apply_block(&self, b: &block::Block) {
self.utxo.write().unwrap().with_block(b);
self.store_head_header(&b.header)
}
fn store_header_by_output_commitment(
&self,
commitment: Commitment,
block_header: &block::BlockHeader,
) {
self.block_headers
.write()
.unwrap()
.insert(commitment, block_header.clone());
}
fn store_head_header(&self, block_header: &block::BlockHeader) {
let mut h = self.head_header.write().unwrap();
h.clear();
h.insert(0, block_header.clone());
let mut headers = self.block_headers.write().unwrap();
headers.insert(0, block_header.clone());
}
}
pub trait DummyChain: BlockChain {
fn update_utxo_set(&mut self, new_utxo: DummyUtxoSet);
fn apply_block(&self, b: &block::Block);
fn store_header_by_output_commitment(
&self,
commitment: Commitment,
block_header: &block::BlockHeader,
);
fn store_head_header(&self, block_header: &block::BlockHeader);
}

View file

@ -25,6 +25,7 @@ use std::fmt;
use core::core;
use core::core::hash::Hashed;
use core::core::OutputIdentifier;
/// An entry in the transaction pool.
/// These are the vertices of both of the graph structures
@ -68,7 +69,7 @@ pub struct Edge {
// Output is the output hash which this input/output pairing corresponds
// to.
output: Commitment,
output: OutputIdentifier,
}
impl Edge {
@ -76,7 +77,7 @@ impl Edge {
pub fn new(
source: Option<core::hash::Hash>,
destination: Option<core::hash::Hash>,
output: Commitment,
output: OutputIdentifier,
) -> Edge {
Edge {
source: source,
@ -90,7 +91,7 @@ impl Edge {
Edge {
source: src,
destination: self.destination,
output: self.output,
output: self.output.clone(),
}
}
@ -99,13 +100,18 @@ impl Edge {
Edge {
source: self.source,
destination: dst,
output: self.output,
output: self.output.clone(),
}
}
/// The output_identifier of the edge.
pub fn output(&self) -> OutputIdentifier {
self.output.clone()
}
/// The output commitment of the edge
pub fn output_commitment(&self) -> Commitment {
self.output
self.output.commit
}
/// The destination hash of the edge
@ -292,7 +298,7 @@ mod tests {
use util::secp;
use keychain::Keychain;
use rand;
use core::core::SwitchCommitHash;
use core::core::{DEFAULT_OUTPUT, SwitchCommitHash};
#[test]
fn test_add_entry() {
@ -304,21 +310,30 @@ mod tests {
let output_commit = keychain.commit(70, &key_id1).unwrap();
let switch_commit = keychain.switch_commit(&key_id1).unwrap();
let switch_commit_hash = SwitchCommitHash::from_switch_commit(switch_commit);
let inputs = vec![
core::transaction::Input(keychain.commit(50, &key_id2).unwrap()),
core::transaction::Input(keychain.commit(25, &key_id3).unwrap()),
core::transaction::Input::new(
DEFAULT_OUTPUT,
keychain.commit(50, &key_id2).unwrap(),
None,
),
core::transaction::Input::new(
DEFAULT_OUTPUT,
keychain.commit(25, &key_id3).unwrap(),
None,
),
];
let msg = secp::pedersen::ProofMessage::empty();
let outputs = vec![
core::transaction::Output {
features: core::transaction::DEFAULT_OUTPUT,
commit: output_commit,
switch_commit_hash: switch_commit_hash,
proof: keychain
.range_proof(100, &key_id1, output_commit, msg)
.unwrap(),
},
];
let output = core::transaction::Output {
features: DEFAULT_OUTPUT,
commit: output_commit,
switch_commit_hash: switch_commit_hash,
proof: keychain
.range_proof(100, &key_id1, output_commit, msg)
.unwrap(),
};
let outputs = vec![output];
let test_transaction = core::transaction::Transaction::new(inputs, outputs, 5, 0);
let test_pool_entry = PoolEntry::new(&test_transaction);
@ -326,7 +341,7 @@ mod tests {
let incoming_edge_1 = Edge::new(
Some(random_hash()),
Some(core::hash::ZERO_HASH),
output_commit,
OutputIdentifier::from_output(&output),
);
let mut test_graph = DirectedGraph::empty();

View file

@ -34,6 +34,8 @@ extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate slog;
extern crate time;
pub use pool::TransactionPool;

View file

@ -14,20 +14,17 @@
//! Top-level Pool type, methods, and tests
use types::*;
pub use graph;
use core::core::transaction;
use core::core::block;
use core::core::hash;
use core::core::target::Difficulty;
use core::global;
use util::secp::pedersen::Commitment;
use std::sync::Arc;
use std::collections::{HashMap, HashSet};
use core::core::transaction;
use core::core::OutputIdentifier;
use core::core::{block, hash};
use util::secp::pedersen::Commitment;
use types::*;
pub use graph;
/// The pool itself.
/// The transactions HashMap holds ownership of all transactions in the pool,
/// keyed by their transaction hash.
@ -69,35 +66,35 @@ where
/// Detects double spends and unknown references from the pool and
/// blockchain only; any conflicts with entries in the orphans set must
/// be accounted for separately, if relevant.
pub fn search_for_best_output(&self, output_commitment: &Commitment) -> Parent {
pub fn search_for_best_output(&self, output_ref: &OutputIdentifier) -> Parent {
// The current best unspent set is:
// Pool unspent + (blockchain unspent - pool->blockchain spent)
// Pool unspents are unconditional so we check those first
// Pool unspent + (blockchain unspent - pool->blockchain spent)
// Pool unspents are unconditional so we check those first
self.pool
.get_available_output(output_commitment)
.get_available_output(&output_ref.commit)
.map(|x| {
Parent::PoolTransaction {
tx_ref: x.source_hash().unwrap(),
}
let tx_ref = x.source_hash().unwrap();
Parent::PoolTransaction { tx_ref }
})
.or(self.search_blockchain_unspents(output_commitment))
.or(self.search_pool_spents(output_commitment))
.or(self.search_blockchain_unspents(output_ref))
.or(self.search_pool_spents(&output_ref.commit))
.unwrap_or(Parent::Unknown)
}
// search_blockchain_unspents searches the current view of the blockchain
// unspent set, represented by blockchain unspents - pool spents, for an
// output designated by output_commitment.
fn search_blockchain_unspents(&self, output_commitment: &Commitment) -> Option<Parent> {
// unspent set, represented by blockchain unspents - pool spents, for an
// output designated by output_commitment.
fn search_blockchain_unspents(&self, output_ref: &OutputIdentifier) -> Option<Parent> {
self.blockchain
.get_unspent(output_commitment)
.is_unspent(output_ref)
.ok()
.map(|output| {
match self.pool.get_blockchain_spent(output_commitment) {
Some(x) => Parent::AlreadySpent {
other_tx: x.destination_hash().unwrap(),
},
None => Parent::BlockTransaction { output },
.map(|_| {
match self.pool.get_blockchain_spent(&output_ref.commit) {
Some(x) => {
let other_tx = x.destination_hash().unwrap();
Parent::AlreadySpent { other_tx }
}
None => Parent::BlockTransaction,
}
})
}
@ -169,35 +166,24 @@ where
}
// The next issue is to identify all unspent outputs that
// this transaction will consume and make sure they exist in the set.
// this transaction will consume and make sure they exist in the set.
let mut pool_refs: Vec<graph::Edge> = Vec::new();
let mut orphan_refs: Vec<graph::Edge> = Vec::new();
let mut blockchain_refs: Vec<graph::Edge> = Vec::new();
for input in &tx.inputs {
let base = graph::Edge::new(None, Some(tx_hash), input.commitment());
let output = OutputIdentifier::from_input(&input);
let base = graph::Edge::new(None, Some(tx_hash), output.clone());
// Note that search_for_best_output does not examine orphans, by
// design. If an incoming transaction consumes pool outputs already
// spent by the orphans set, this does not preclude its inclusion
// into the pool.
match self.search_for_best_output(&input.commitment()) {
// design. If an incoming transaction consumes pool outputs already
// spent by the orphans set, this does not preclude its inclusion
// into the pool.
match self.search_for_best_output(&output) {
Parent::PoolTransaction { tx_ref: x } => pool_refs.push(base.with_source(Some(x))),
Parent::BlockTransaction { output } => {
// TODO - pull this out into a separate function?
if output.features.contains(transaction::COINBASE_OUTPUT) {
if let Ok(out_header) = self.blockchain
.get_block_header_by_output_commit(&output.commitment())
{
let lock_height = out_header.height + global::coinbase_maturity();
if head_header.height < lock_height {
return Err(PoolError::ImmatureCoinbase {
header: out_header,
output: output.commitment(),
});
};
};
};
Parent::BlockTransaction => {
let height = head_header.height + 1;
self.blockchain.is_matured(&input, height)?;
blockchain_refs.push(base);
}
Parent::Unknown => orphan_refs.push(base),
@ -223,24 +209,27 @@ where
}
// Assertion: we have exactly as many resolved spending references as
// inputs to the transaction.
// inputs to the transaction.
assert_eq!(
tx.inputs.len(),
blockchain_refs.len() + pool_refs.len() + orphan_refs.len()
);
// At this point we know if we're spending all known unspents and not
// creating any duplicate unspents.
// creating any duplicate unspents.
let pool_entry = graph::PoolEntry::new(&tx);
let new_unspents = tx.outputs
.iter()
.map(|x| graph::Edge::new(Some(tx_hash), None, x.commitment()))
.map(|x| {
let output = OutputIdentifier::from_output(&x);
graph::Edge::new(Some(tx_hash), None, output)
})
.collect();
if !is_orphan {
// In the non-orphan (pool) case, we've ensured that every input
// maps one-to-one with an unspent (available) output, and each
// output is unique. No further checks are necessary.
// maps one-to-one with an unspent (available) output, and each
// output is unique. No further checks are necessary.
self.pool
.add_pool_transaction(pool_entry, blockchain_refs, pool_refs, new_unspents);
@ -303,13 +292,14 @@ where
is_orphan: bool,
) -> Result<(), PoolError> {
// Checking against current blockchain unspent outputs
// We want outputs even if they're spent by pool txs, so we ignore
// consumed_blockchain_outputs
if self.blockchain.get_unspent(&output.commitment()).is_ok() {
// We want outputs even if they're spent by pool txs, so we ignore
// consumed_blockchain_outputs
let out = OutputIdentifier::from_output(&output);
if self.blockchain.is_unspent(&out).is_ok() {
return Err(PoolError::DuplicateOutput {
other_tx: None,
in_chain: true,
output: output.commitment(),
output: out.commit,
});
}
@ -320,7 +310,7 @@ where
return Err(PoolError::DuplicateOutput {
other_tx: Some(x),
in_chain: false,
output: output.commitment(),
output: output.commit,
})
}
None => {}
@ -517,7 +507,7 @@ where
for output in &tx_ref.unwrap().outputs {
match self.pool.get_internal_spent_output(&output.commitment()) {
Some(x) => if self.blockchain.get_unspent(&x.output_commitment()).is_err() {
Some(x) => if self.blockchain.is_unspent(&x.output()).is_err() {
self.mark_transaction(x.destination_hash().unwrap(), marked_txs);
},
None => {}
@ -604,6 +594,7 @@ where
mod tests {
use super::*;
use core::core::build;
use core::global;
use blockchain::{DummyChain, DummyChainImpl, DummyUtxoSet};
use util::secp;
use keychain::Keychain;
@ -611,11 +602,14 @@ mod tests {
use blake2;
use core::global::ChainTypes;
use core::core::SwitchCommitHash;
use core::core::hash::ZERO_HASH;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
macro_rules! expect_output_parent {
($pool:expr, $expected:pat, $( $output:expr ),+ ) => {
$(
match $pool.search_for_best_output(&test_output($output).commitment()) {
match $pool.search_for_best_output(&OutputIdentifier::from_output(&test_output($output))) {
$expected => {},
x => panic!(
"Unexpected result from output search for {:?}, got {:?}",
@ -651,7 +645,7 @@ mod tests {
dummy_chain.update_utxo_set(new_utxo);
// To mirror how this construction is intended to be used, the pool
// is placed inside a RwLock.
// is placed inside a RwLock.
let pool = RwLock::new(test_setup(&Arc::new(dummy_chain)));
// Take the write lock and add a pool entry
@ -676,14 +670,13 @@ mod tests {
}
}
// Now take the read lock and use a few exposed methods to check
// consistency
// Now take the read lock and use a few exposed methods to check consistency
{
let read_pool = pool.read().unwrap();
assert_eq!(read_pool.total_size(), 2);
expect_output_parent!(read_pool, Parent::PoolTransaction{tx_ref: _}, 12);
expect_output_parent!(read_pool, Parent::AlreadySpent{other_tx: _}, 11, 5);
expect_output_parent!(read_pool, Parent::BlockTransaction{output: _}, 8);
expect_output_parent!(read_pool, Parent::BlockTransaction, 8);
expect_output_parent!(read_pool, Parent::Unknown, 20);
}
}
@ -804,6 +797,10 @@ mod tests {
fn test_immature_coinbase() {
global::set_mining_mode(ChainTypes::AutomatedTesting);
let mut dummy_chain = DummyChainImpl::new();
let lock_height = 1 + global::coinbase_maturity();
assert_eq!(lock_height, 4);
let coinbase_output = test_coinbase_output(15);
dummy_chain.update_utxo_set(DummyUtxoSet::empty().with_output(coinbase_output));
@ -817,8 +814,7 @@ mod tests {
height: 1,
..block::BlockHeader::default()
};
chain_ref
.store_header_by_output_commitment(coinbase_output.commitment(), &coinbase_header);
chain_ref.store_head_header(&coinbase_header);
let head_header = block::BlockHeader {
height: 2,
@ -826,43 +822,28 @@ mod tests {
};
chain_ref.store_head_header(&head_header);
let txn = test_transaction(vec![15], vec![10, 3]);
let txn = test_transaction_with_coinbase_input(
15,
coinbase_header.hash(),
vec![10, 3],
);
let result = write_pool.add_to_memory_pool(test_source(), txn);
match result {
Err(PoolError::ImmatureCoinbase {
header: _,
output: out,
}) => {
assert_eq!(out, coinbase_output.commitment());
}
Err(PoolError::ImmatureCoinbase) => {},
_ => panic!("expected ImmatureCoinbase error here"),
};
let head_header = block::BlockHeader {
height: 3,
height: 4,
..block::BlockHeader::default()
};
chain_ref.store_head_header(&head_header);
let txn = test_transaction(vec![15], vec![10, 3]);
let result = write_pool.add_to_memory_pool(test_source(), txn);
match result {
Err(PoolError::ImmatureCoinbase {
header: _,
output: out,
}) => {
assert_eq!(out, coinbase_output.commitment());
}
_ => panic!("expected ImmatureCoinbase error here"),
};
let head_header = block::BlockHeader {
height: 5,
..block::BlockHeader::default()
};
chain_ref.store_head_header(&head_header);
let txn = test_transaction(vec![15], vec![10, 3]);
let txn = test_transaction_with_coinbase_input(
15,
coinbase_header.hash(),
vec![10, 3],
);
let result = write_pool.add_to_memory_pool(test_source(), txn);
match result {
Ok(_) => {}
@ -1089,7 +1070,7 @@ mod tests {
assert_eq!(read_pool.total_size(), 4);
// We should have available blockchain outputs
expect_output_parent!(read_pool, Parent::BlockTransaction{output: _}, 9, 1);
expect_output_parent!(read_pool, Parent::BlockTransaction, 9, 1);
// We should have spent blockchain outputs
expect_output_parent!(read_pool, Parent::AlreadySpent{other_tx: _}, 5, 6);
@ -1229,23 +1210,29 @@ mod tests {
/// Every output is given a blinding key equal to its value, so that the
/// entire commitment can be derived deterministically from just the value.
///
/// Fees are the remainder between input and output values, so the numbers
/// should make sense.
/// Fees are the remainder between input and output values,
/// so the numbers should make sense.
fn test_transaction(
input_values: Vec<u64>,
output_values: Vec<u64>,
) -> transaction::Transaction {
let keychain = keychain_for_tests();
let fees: i64 =
input_values.iter().sum::<u64>() as i64 - output_values.iter().sum::<u64>() as i64;
let input_sum = input_values
.iter()
.sum::<u64>() as i64;
let output_sum = output_values
.iter()
.sum::<u64>() as i64;
let fees: i64 = input_sum - output_sum;
assert!(fees >= 0);
let mut tx_elements = Vec::new();
for input_value in input_values {
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
tx_elements.push(build::input(input_value, key_id));
tx_elements.push(build::input(input_value, ZERO_HASH, key_id));
}
for output_value in output_values {
@ -1258,6 +1245,38 @@ mod tests {
tx
}
fn test_transaction_with_coinbase_input(
input_value: u64,
input_block_hash: Hash,
output_values: Vec<u64>,
) -> transaction::Transaction {
let keychain = keychain_for_tests();
let output_sum = output_values
.iter()
.sum::<u64>() as i64;
let fees: i64 = input_value as i64 - output_sum;
assert!(fees >= 0);
let mut tx_elements = Vec::new();
// for input_value in input_values {
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
tx_elements.push(build::coinbase_input(input_value, input_block_hash, key_id));
for output_value in output_values {
let key_id = keychain.derive_key_id(output_value as u32).unwrap();
tx_elements.push(build::output(output_value, key_id));
}
tx_elements.push(build::with_fee(fees as u64));
let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
tx
}
/// Very un-dry way of building a vanilla tx and adding a lock_height to it.
/// TODO - rethink this.
fn timelocked_transaction(
input_values: Vec<u64>,
output_values: Vec<u64>,
@ -1273,7 +1292,7 @@ mod tests {
for input_value in input_values {
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
tx_elements.push(build::input(input_value, key_id));
tx_elements.push(build::input(input_value, ZERO_HASH, key_id));
}
for output_value in output_values {

View file

@ -25,9 +25,8 @@ use util::secp::pedersen::Commitment;
pub use graph;
use core::consensus;
use core::core::block;
use core::core::transaction;
use core::core::hash;
use core::core::{block, hash, transaction};
use core::core::transaction::{Input, OutputIdentifier};
/// Tranasction pool configuration
#[derive(Clone, Debug, Serialize, Deserialize)]
@ -78,7 +77,7 @@ pub struct TxSource {
#[derive(Clone)]
pub enum Parent {
Unknown,
BlockTransaction { output: transaction::Output },
BlockTransaction,
PoolTransaction { tx_ref: hash::Hash },
AlreadySpent { other_tx: hash::Hash },
}
@ -87,11 +86,15 @@ impl fmt::Debug for Parent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&Parent::Unknown => write!(f, "Parent: Unknown"),
&Parent::BlockTransaction { output: _ } => write!(f, "Parent: Block Transaction"),
&Parent::BlockTransaction => {
write!(f, "Parent: Block Transaction")
}
&Parent::PoolTransaction { tx_ref: x } => {
write!(f, "Parent: Pool Transaction ({:?})", x)
}
&Parent::AlreadySpent { other_tx: x } => write!(f, "Parent: Already Spent By {:?}", x),
&Parent::AlreadySpent { other_tx: x } => {
write!(f, "Parent: Already Spent By {:?}", x)
}
}
}
}
@ -122,12 +125,7 @@ pub enum PoolError {
},
/// Attempt to spend an output before it matures
/// lock_height must not exceed current block height
ImmatureCoinbase {
/// The block header of the block containing the output
header: block::BlockHeader,
/// The unspent output
output: Commitment,
},
ImmatureCoinbase,
/// Attempt to add a transaction to the pool with lock_height
/// greater than height of current block
ImmatureTransaction {
@ -151,18 +149,18 @@ pub enum PoolError {
/// Interface that the pool requires from a blockchain implementation.
pub trait BlockChain {
/// Get an unspent output by its commitment. Will return None if the output
/// Get an unspent output by its commitment. Will return an error if the output
/// is spent or if it doesn't exist. The blockchain is expected to produce
/// a result with its current view of the most worked chain, ignoring
/// orphans, etc.
fn get_unspent(&self, output_ref: &Commitment) -> Result<transaction::Output, PoolError>;
/// We do not maintain outputs themselves. The only information we have is the
/// hash from the output MMR.
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), PoolError>;
/// Get the block header by output commitment (needed for spending coinbase
/// after n blocks)
fn get_block_header_by_output_commit(
&self,
commit: &Commitment,
) -> Result<block::BlockHeader, PoolError>;
/// Check if an output being spent by the input has sufficiently matured.
/// This is only applicable for coinbase outputs (1,000 blocks).
/// Non-coinbase outputs will always pass this check.
fn is_matured(&self, input: &Input, height: u64) -> Result<(), PoolError>;
/// Get the block header at the head
fn head_header(&self) -> Result<block::BlockHeader, PoolError>;

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Grin Developers
// Copyright 2017 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -19,22 +19,18 @@ use std::collections::hash_map::Entry;
use std::collections::HashMap;
use api;
use core::core::hash::Hash;
use types::*;
use keychain::{Identifier, Keychain};
use util::secp::pedersen;
use util;
use util::LOGGER;
// Transitions a local wallet output from Unconfirmed -> Unspent.
// Also updates the height and lock_height based on latest from the api.
fn refresh_output(out: &mut OutputData, api_out: &api::Output) {
out.height = api_out.height;
out.lock_height = api_out.lock_height;
// Transitions a local wallet output from Unconfirmed -> Unspent.
fn mark_unspent_output(out: &mut OutputData) {
match out.status {
OutputStatus::Unconfirmed => {
out.status = OutputStatus::Unspent;
}
OutputStatus::Unconfirmed => out.status = OutputStatus::Unspent,
_ => (),
}
}
@ -45,79 +41,92 @@ fn refresh_output(out: &mut OutputData, api_out: &api::Output) {
// Locked -> Spent
fn mark_spent_output(out: &mut OutputData) {
match out.status {
OutputStatus::Unspent | OutputStatus::Locked => out.status = OutputStatus::Spent,
OutputStatus::Unspent => out.status = OutputStatus::Spent,
OutputStatus::Locked => out.status = OutputStatus::Spent,
_ => (),
}
}
/// Builds multiple api queries to retrieve the latest output data from the node.
/// So we can refresh the local wallet outputs.
pub fn refresh_outputs(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
debug!(LOGGER, "Refreshing wallet outputs");
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
let mut commits: Vec<pedersen::Commitment> = vec![];
refresh_output_state(config, keychain)?;
refresh_missing_block_hashes(config, keychain)?;
Ok(())
}
// build a local map of wallet outputs by commits
// TODO - this might be slow if we have really old outputs that have never been refreshed
fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
// build a local map of wallet outputs keyed by commit
// and a list of outputs we want to query the node for
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
let _ = WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
for out in wallet_data
.outputs
.values()
.filter(|out| out.root_key_id == keychain.root_key_id())
.filter(|out| out.status != OutputStatus::Spent)
.filter(|x| {
x.root_key_id == keychain.root_key_id() &&
x.block_hash == Hash::zero() &&
x.status == OutputStatus::Unspent
})
{
let commit = keychain
.commit_with_key_index(out.value, out.n_child)
.unwrap();
commits.push(commit);
let commit = keychain.commit_with_key_index(out.value, out.n_child).unwrap();
wallet_outputs.insert(commit, out.key_id.clone());
}
});
// build the necessary query params -
// ?id=xxx&id=yyy&id=zzz
let query_params: Vec<String> = commits
.iter()
// nothing to do so return (otherwise we hit the api with a monster query...)
if wallet_outputs.is_empty() {
return Ok(());
}
debug!(
LOGGER,
"Refreshing missing block hashes (and heights) for {} outputs",
wallet_outputs.len(),
);
let mut id_params: Vec<String> = wallet_outputs
.keys()
.map(|commit| {
let id = util::to_hex(commit.as_ref().to_vec());
format!("id={}", id)
})
.collect();
// build a map of api outputs by commit so we can look them up efficiently
let mut api_outputs: HashMap<pedersen::Commitment, api::Output> = HashMap::new();
let tip = get_tip_from_node(config)?;
// size of the batch size for the utxo query
let batch_query_size = 500;
let height_params = format!(
"start_height={}&end_height={}",
0,
tip.height,
);
let mut query_params = vec![height_params];
query_params.append(&mut id_params);
let mut index_id = 0;
while index_id < query_params.len() {
let batch_query: Vec<String>;
if index_id + batch_query_size > query_params.len() {
batch_query = query_params[index_id..query_params.len()].to_vec();
index_id = query_params.len();
} else {
batch_query = query_params[index_id..index_id + batch_query_size].to_vec();
index_id = index_id + batch_query_size;
}
let url =
format!(
"{}/v1/chain/utxos/byheight?{}",
config.check_node_api_http_addr,
query_params.join("&"),
);
debug!(LOGGER, "{:?}", url);
let query_string = batch_query.join("&");
let url = format!(
"{}/v1/chain/utxos/byids?{}",
config.check_node_api_http_addr, query_string,
);
match api::client::get::<Vec<api::Output>>(url.as_str()) {
Ok(outputs) => for out in outputs {
api_outputs.insert(out.commit, out);
},
Err(e) => {
// if we got anything other than 200 back from server, don't attempt to refresh
// the wallet data after
return Err(Error::Node(e));
let mut api_blocks: HashMap<pedersen::Commitment, api::BlockHeaderInfo> = HashMap::new();
match api::client::get::<Vec<api::BlockOutputs>>(url.as_str()) {
Ok(blocks) => {
for block in blocks {
for out in block.outputs {
if let Ok(c) = util::from_hex(String::from(out.commit)) {
let commit = pedersen::Commitment::from_vec(c);
api_blocks.insert(commit, block.header.clone());
}
}
}
};
}
Err(e) => {
// if we got anything other than 200 back from server, bye
error!(LOGGER, "Refresh failed... unable to contact node: {}", e);
return Err(Error::Node(e));
}
}
// now for each commit, find the output in the wallet and
@ -125,18 +134,87 @@ pub fn refresh_outputs(config: &WalletConfig, keychain: &Keychain) -> Result<(),
// and refresh it in-place in the wallet.
// Note: minimizing the time we spend holding the wallet lock.
WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
for commit in commits {
for commit in wallet_outputs.keys() {
let id = wallet_outputs.get(&commit).unwrap();
if let Entry::Occupied(mut output) = wallet_data.outputs.entry(id.to_hex()) {
match api_outputs.get(&commit) {
Some(api_output) => refresh_output(&mut output.get_mut(), api_output),
None => mark_spent_output(&mut output.get_mut()),
};
if let Some(b) = api_blocks.get(&commit) {
let output = output.get_mut();
output.block_hash = Hash::from_hex(&b.hash).unwrap();
output.height = b.height;
}
}
}
})
}
/// Builds a single api query to retrieve the latest output data from the node.
/// So we can refresh the local wallet outputs.
fn refresh_output_state(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
debug!(LOGGER, "Refreshing wallet outputs");
// build a local map of wallet outputs keyed by commit
// and a list of outputs we want to query the node for
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
let _ = WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
for out in wallet_data
.outputs
.values()
.filter(|x| {
x.root_key_id == keychain.root_key_id() &&
x.status != OutputStatus::Spent
})
{
let commit = keychain.commit_with_key_index(out.value, out.n_child).unwrap();
wallet_outputs.insert(commit, out.key_id.clone());
}
});
// build the necessary query params -
// ?id=xxx&id=yyy&id=zzz
let query_params: Vec<String> = wallet_outputs
.keys()
.map(|commit| {
let id = util::to_hex(commit.as_ref().to_vec());
format!("id={}", id)
})
.collect();
// build a map of api outputs by commit so we can look them up efficiently
let mut api_utxos: HashMap<pedersen::Commitment, api::Utxo> = HashMap::new();
let query_string = query_params.join("&");
let url = format!(
"{}/v1/chain/utxos/byids?{}",
config.check_node_api_http_addr, query_string,
);
match api::client::get::<Vec<api::Utxo>>(url.as_str()) {
Ok(outputs) => for out in outputs {
api_utxos.insert(out.commit, out);
},
Err(e) => {
// if we got anything other than 200 back from server, don't attempt to refresh
// the wallet data after
return Err(Error::Node(e));
}
};
// now for each commit, find the output in the wallet and
// the corresponding api output (if it exists)
// and refresh it in-place in the wallet.
// Note: minimizing the time we spend holding the wallet lock.
WalletData::with_wallet(&config.data_file_dir, |wallet_data| for commit in wallet_outputs.keys() {
let id = wallet_outputs.get(&commit).unwrap();
if let Entry::Occupied(mut output) = wallet_data.outputs.entry(id.to_hex()) {
match api_utxos.get(&commit) {
Some(_) => mark_unspent_output(&mut output.get_mut()),
None => mark_spent_output(&mut output.get_mut()),
};
}
})
}
pub fn get_tip_from_node(config: &WalletConfig) -> Result<api::Tip, Error> {
let url = format!("{}/v1/chain", config.check_node_api_http_addr);
api::client::get::<api::Tip>(url.as_str()).map_err(|e| Error::Node(e))

View file

@ -21,7 +21,6 @@ use prettytable;
pub fn show_info(config: &WalletConfig, keychain: &Keychain) {
let result = checker::refresh_outputs(&config, &keychain);
let _ = WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
let (current_height, from) = match checker::get_tip_from_node(config) {
Ok(tip) => (tip.height, "from server node"),
@ -30,26 +29,26 @@ pub fn show_info(config: &WalletConfig, keychain: &Keychain) {
None => (0, "node/wallet unavailable"),
},
};
let mut unspent_total=0;
let mut unspent_but_locked_total=0;
let mut unconfirmed_total=0;
let mut locked_total=0;
let mut unspent_total = 0;
let mut unspent_but_locked_total = 0;
let mut unconfirmed_total = 0;
let mut locked_total = 0;
for out in wallet_data
.outputs
.values()
.filter(|out| out.root_key_id == keychain.root_key_id())
{
if out.status == OutputStatus::Unspent {
unspent_total+=out.value;
unspent_total += out.value;
if out.lock_height > current_height {
unspent_but_locked_total+=out.value;
unspent_but_locked_total += out.value;
}
}
if out.status == OutputStatus::Unconfirmed && !out.is_coinbase {
unconfirmed_total+=out.value;
unconfirmed_total += out.value;
}
if out.status == OutputStatus::Locked {
locked_total+=out.value;
locked_total += out.value;
}
};

View file

@ -25,7 +25,8 @@ use serde_json;
use api;
use core::consensus::reward;
use core::core::{build, Block, Output, Transaction, TxKernel, amount_to_hr_string};
use core::ser;
use core::core::hash::Hash;
use core::{global, ser};
use keychain::{Identifier, Keychain};
use types::*;
use util::{LOGGER, to_hex, secp};
@ -68,7 +69,7 @@ fn handle_sender_initiation(
if fee > amount {
info!(
LOGGER,
LOGGER,
"Rejected the transfer because transaction fee ({}) exceeds received amount ({}).",
amount_to_hr_string(fee),
amount_to_hr_string(amount)
@ -96,6 +97,7 @@ fn handle_sender_initiation(
height: 0,
lock_height: 0,
is_coinbase: false,
block_hash: Hash::zero(),
});
key_id
@ -259,6 +261,9 @@ pub fn receive_coinbase(
) -> Result<(Output, TxKernel, BlockFees), Error> {
let root_key_id = keychain.root_key_id();
let height = block_fees.height;
let lock_height = height + global::coinbase_maturity();
// Now acquire the wallet lock and write the new output.
let (key_id, derivation) = WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
let key_id = block_fees.key_id();
@ -274,9 +279,10 @@ pub fn receive_coinbase(
n_child: derivation,
value: reward(block_fees.fees),
status: OutputStatus::Unconfirmed,
height: 0,
lock_height: 0,
height: height,
lock_height: lock_height,
is_coinbase: true,
block_hash: Hash::zero(),
});
(key_id, derivation)
@ -284,20 +290,22 @@ pub fn receive_coinbase(
debug!(
LOGGER,
"Received coinbase and built candidate output - {:?}, {:?}, {}",
root_key_id.clone(),
"receive_coinbase: built candidate output - {:?}, {}",
key_id.clone(),
derivation,
);
debug!(LOGGER, "block_fees - {:?}", block_fees);
let mut block_fees = block_fees.clone();
block_fees.key_id = Some(key_id.clone());
debug!(LOGGER, "block_fees updated - {:?}", block_fees);
debug!(LOGGER, "receive_coinbase: {:?}", block_fees);
let (out, kern) = Block::reward_output(&keychain, &key_id, block_fees.fees)?;
let (out, kern) = Block::reward_output(
&keychain,
&key_id,
block_fees.fees,
block_fees.height,
)?;
Ok((out, kern, block_fees))
}
@ -313,8 +321,8 @@ fn build_final_transaction(
let root_key_id = keychain.root_key_id();
// double check the fee amount included in the partial tx
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
if fee != tx.fee {
return Err(Error::FeeDispute {
@ -325,7 +333,7 @@ fn build_final_transaction(
if fee > amount {
info!(
LOGGER,
LOGGER,
"Rejected the transfer because transaction fee ({}) exceeds received amount ({}).",
amount_to_hr_string(fee),
amount_to_hr_string(amount)
@ -355,6 +363,7 @@ fn build_final_transaction(
height: 0,
lock_height: 0,
is_coinbase: false,
block_hash: Hash::zero(),
});
(key_id, derivation)

View file

@ -16,11 +16,14 @@ use keychain::{Keychain, Identifier};
use util::{LOGGER, from_hex};
use util::secp::pedersen;
use api;
use core::global;
use core::core::{Output, SwitchCommitHash};
use core::core::transaction::{COINBASE_OUTPUT, DEFAULT_OUTPUT, SWITCH_COMMIT_HASH_SIZE};
use core::core::hash::Hash;
use core::core::transaction::{COINBASE_OUTPUT, DEFAULT_OUTPUT};
use types::{WalletConfig, WalletData, OutputData, OutputStatus, Error};
use byteorder::{BigEndian, ByteOrder};
pub fn get_chain_height(config: &WalletConfig) -> Result<u64, Error> {
let url = format!("{}/v1/chain", config.check_node_api_http_addr);
@ -39,18 +42,28 @@ pub fn get_chain_height(config: &WalletConfig) -> Result<u64, Error> {
}
}
fn output_with_range_proof(config: &WalletConfig, commit_id: &str) -> Result<api::Output, Error> {
fn output_with_range_proof(
config: &WalletConfig,
commit_id: &str,
height: u64,
) -> Result<api::OutputPrintable, Error> {
let url =
format!(
"{}/v1/chain/utxos/byids?id={}&include_rp&include_switch",
"{}/v1/chain/utxos/byheight?start_height={}&end_height={}&id={}&include_rp",
config.check_node_api_http_addr,
height,
height,
commit_id,
);
match api::client::get::<Vec<api::Output>>(url.as_str()) {
Ok(outputs) => {
if let Some(output) = outputs.first() {
Ok(output.clone())
match api::client::get::<Vec<api::BlockOutputs>>(url.as_str()) {
Ok(block_outputs) => {
if let Some(block_output) = block_outputs.first() {
if let Some(output) = block_output.outputs.first() {
Ok(output.clone())
} else {
Err(Error::Node(api::Error::NotFound))
}
} else {
Err(Error::Node(api::Error::NotFound))
}
@ -69,17 +82,20 @@ fn retrieve_amount_and_coinbase_status(
keychain: &Keychain,
key_id: Identifier,
commit_id: &str,
height: u64,
) -> Result<(u64, bool), Error> {
let output = output_with_range_proof(config, commit_id)?;
let output = output_with_range_proof(config, commit_id, height)?;
let core_output = Output {
features: match output.output_type {
api::OutputType::Coinbase => COINBASE_OUTPUT,
api::OutputType::Transaction => DEFAULT_OUTPUT,
},
proof: output.proof.expect("output with proof"),
switch_commit_hash: output.switch_commit_hash.expect("output with switch_commit_hash"),
commit: output.commit,
proof: output.range_proof()?,
switch_commit_hash: output.switch_commit_hash()?,
commit: output.commit()?,
};
if let Some(amount) = core_output.recover_value(keychain, &key_id) {
let is_coinbase = match output.output_type {
api::OutputType::Coinbase => true,
@ -96,8 +112,6 @@ pub fn utxos_batch_block(
start_height: u64,
end_height: u64,
) -> Result<Vec<api::BlockOutputs>, Error> {
// build the necessary query param -
// ?height=x
let query_param = format!("start_height={}&end_height={}", start_height, end_height);
let url =
@ -122,15 +136,16 @@ pub fn utxos_batch_block(
}
}
// TODO - wrap the many return values in a struct
fn find_utxos_with_key(
config: &WalletConfig,
keychain: &Keychain,
switch_commit_cache: &Vec<[u8; SWITCH_COMMIT_HASH_SIZE]>,
switch_commit_cache: &Vec<pedersen::Commitment>,
block_outputs: api::BlockOutputs,
key_iterations: &mut usize,
padding: &mut usize,
) -> Vec<(pedersen::Commitment, Identifier, u32, u64, u64, bool)> {
let mut wallet_outputs: Vec<(pedersen::Commitment, Identifier, u32, u64, u64, bool)> =
) -> Vec<(pedersen::Commitment, Identifier, u32, u64, u64, u64, bool)> {
let mut wallet_outputs: Vec<(pedersen::Commitment, Identifier, u32, u64, u64, u64, bool)> =
Vec::new();
info!(
@ -141,55 +156,69 @@ fn find_utxos_with_key(
*key_iterations,
);
for output in block_outputs.outputs {
for output in block_outputs.outputs.iter().filter(|x| !x.spent) {
for i in 0..*key_iterations {
if switch_commit_cache[i as usize] == output.switch_commit_hash {
info!(
LOGGER,
"Output found: {:?}, key_index: {:?}",
output.switch_commit_hash,
i,
);
let expected_hash = SwitchCommitHash::from_switch_commit(switch_commit_cache[i as usize]);
// add it to result set here
let commit_id = from_hex(output.commit.clone()).unwrap();
let key_id = keychain.derive_key_id(i as u32).unwrap();
let res = retrieve_amount_and_coinbase_status(
config,
keychain,
key_id.clone(),
&output.commit,
);
if let Ok((amount, is_coinbase)) = res {
info!(LOGGER, "Amount: {}", amount);
let commit = keychain
.commit_with_key_index(BigEndian::read_u64(&commit_id), i as u32)
.expect("commit with key index");
wallet_outputs.push((
commit,
key_id.clone(),
i as u32,
amount,
output.height,
is_coinbase,
));
// probably don't have to look for indexes greater than this now
*key_iterations = i + *padding;
if *key_iterations > switch_commit_cache.len() {
*key_iterations = switch_commit_cache.len();
}
info!(LOGGER, "Setting max key index to: {}", *key_iterations);
break;
} else {
if let Ok(x) = output.switch_commit_hash() {
if x == expected_hash {
info!(
LOGGER,
"Unable to retrieve the amount (needs investigating)",
"Output found: {:?}, key_index: {:?}",
output,
i,
);
// add it to result set here
let commit_id = from_hex(output.commit.clone()).unwrap();
let key_id = keychain.derive_key_id(i as u32).unwrap();
let res = retrieve_amount_and_coinbase_status(
config,
keychain,
key_id.clone(),
&output.commit,
block_outputs.header.height,
);
if let Ok((amount, is_coinbase)) = res {
info!(LOGGER, "Amount: {}", amount);
let commit = keychain
.commit_with_key_index(BigEndian::read_u64(&commit_id), i as u32)
.expect("commit with key index");
let height = block_outputs.header.height;
let lock_height = if is_coinbase {
height + global::coinbase_maturity()
} else {
0
};
wallet_outputs.push((
commit,
key_id.clone(),
i as u32,
amount,
height,
lock_height,
is_coinbase,
));
// probably don't have to look for indexes greater than this now
*key_iterations = i + *padding;
if *key_iterations > switch_commit_cache.len() {
*key_iterations = switch_commit_cache.len();
}
info!(LOGGER, "Setting max key index to: {}", *key_iterations);
break;
} else {
info!(
LOGGER,
"Unable to retrieve the amount (needs investigating) {:?}",
res,
);
}
}
}
}
@ -229,7 +258,7 @@ pub fn restore(
chain_height
);
let mut switch_commit_cache: Vec<[u8; SWITCH_COMMIT_HASH_SIZE]> = vec![];
let mut switch_commit_cache: Vec<pedersen::Commitment> = vec![];
info!(
LOGGER,
"Building key derivation cache ({}) ...",
@ -237,8 +266,7 @@ pub fn restore(
);
for i in 0..key_derivations {
let switch_commit = keychain.switch_commit_from_index(i as u32).unwrap();
let switch_commit_hash = SwitchCommitHash::from_switch_commit(switch_commit);
switch_commit_cache.push(switch_commit_hash.hash);
switch_commit_cache.push(switch_commit);
}
debug!(LOGGER, "... done");
@ -281,8 +309,9 @@ pub fn restore(
value: output.3,
status: OutputStatus::Unconfirmed,
height: output.4,
lock_height: 0,
is_coinbase: output.5,
lock_height: output.5,
is_coinbase: output.6,
block_hash: Hash::zero(),
});
};
}

View file

@ -16,6 +16,7 @@ use api;
use client;
use checker;
use core::core::{build, Transaction, amount_to_hr_string};
use core::core::hash::Hash;
use core::ser;
use keychain::{BlindingFactor, Identifier, Keychain};
use receiver::TxWrapper;
@ -27,7 +28,6 @@ use util;
/// wallet
/// UTXOs. The destination can be "stdout" (for command line) (currently disabled) or a URL to the
/// recipients wallet receiver (to be implemented).
pub fn issue_send_tx(
config: &WalletConfig,
keychain: &Keychain,
@ -98,9 +98,9 @@ pub fn issue_send_tx(
let res = client::send_partial_tx(&url, &partial_tx);
if let Err(e) = res {
match e {
Error::FeeExceedsAmount {sender_amount, recipient_fee} =>
Error::FeeExceedsAmount {sender_amount, recipient_fee} =>
error!(
LOGGER,
LOGGER,
"Recipient rejected the transfer because transaction fee ({}) exceeded amount ({}).",
amount_to_hr_string(recipient_fee),
amount_to_hr_string(sender_amount)
@ -126,7 +126,7 @@ pub fn issue_send_tx(
let sig_part=keychain.aggsig_calculate_partial_sig(&recp_pub_nonce, tx.fee, tx.lock_height).unwrap();
// Build the next stage, containing sS (and our pubkeys again, for the recipient's convenience)
// Build the next stage, containing sS (and our pubkeys again, for the recipient's convenience)
let mut partial_tx = build_partial_tx(keychain, amount, Some(sig_part), tx);
partial_tx.phase = PartialTxPhase::SenderConfirmation;
@ -134,9 +134,9 @@ pub fn issue_send_tx(
let res = client::send_partial_tx(&url, &partial_tx);
if let Err(e) = res {
match e {
Error::FeeExceedsAmount {sender_amount, recipient_fee} =>
Error::FeeExceedsAmount {sender_amount, recipient_fee} =>
error!(
LOGGER,
LOGGER,
"Recipient rejected the transfer because transaction fee ({}) exceeded amount ({}).",
amount_to_hr_string(recipient_fee),
amount_to_hr_string(sender_amount)
@ -266,7 +266,11 @@ fn inputs_and_change(
// build inputs using the appropriate derived key_ids
for coin in coins {
let key_id = keychain.derive_key_id(coin.n_child)?;
parts.push(build::input(coin.value, key_id));
if coin.is_coinbase {
parts.push(build::coinbase_input(coin.value, coin.block_hash, key_id));
} else {
parts.push(build::input(coin.value, coin.block_hash, key_id));
}
}
// track the output representing our change
@ -284,6 +288,7 @@ fn inputs_and_change(
height: 0,
lock_height: 0,
is_coinbase: false,
block_hash: Hash::zero(),
});
change_key
@ -297,8 +302,10 @@ fn inputs_and_change(
#[cfg(test)]
mod test {
use core::core::build::{input, output, transaction};
use core::core::hash::ZERO_HASH;
use keychain::Keychain;
#[test]
// demonstrate that input.commitment == referenced output.commitment
// based on the public key and amount begin spent
@ -307,8 +314,9 @@ mod test {
let key_id1 = keychain.derive_key_id(1).unwrap();
let (tx1, _) = transaction(vec![output(105, key_id1.clone())], &keychain).unwrap();
let (tx2, _) = transaction(vec![input(105, key_id1.clone())], &keychain).unwrap();
let (tx2, _) = transaction(vec![input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
assert_eq!(tx1.outputs[0].features, tx2.inputs[0].features);
assert_eq!(tx1.outputs[0].commitment(), tx2.inputs[0].commitment());
}
}

View file

@ -33,6 +33,7 @@ use tokio_retry::strategy::FibonacciBackoff;
use api;
use core::consensus;
use core::core::{transaction, Transaction};
use core::core::hash::Hash;
use core::ser;
use keychain;
use util;
@ -126,12 +127,20 @@ impl From<serde_json::Error> for Error {
}
}
// TODO - rethink this, would be nice not to have to worry about
// low level hex conversion errors like this
impl From<num::ParseIntError> for Error {
fn from(_: num::ParseIntError) -> Error {
Error::Format("Invalid hex".to_string())
}
}
impl From<ser::Error> for Error {
fn from(e: ser::Error) -> Error {
Error::Format(e.to_string())
}
}
impl From<api::Error> for Error {
fn from(e: api::Error) -> Error {
Error::Node(e)
@ -236,6 +245,8 @@ pub struct OutputData {
pub lock_height: u64,
/// Is this a coinbase output? Is it subject to coinbase locktime?
pub is_coinbase: bool,
/// Hash of the block this output originated from.
pub block_hash: Hash,
}
impl OutputData {
@ -252,7 +263,7 @@ impl OutputData {
pub fn num_confirmations(&self, current_height: u64) -> u64 {
if self.status == OutputStatus::Unconfirmed {
0
} else if self.status == OutputStatus::Spent && self.height == 0 {
} else if self.height == 0 {
0
} else {
// if an output has height n and we are at block n