mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
Merkle Proofs (#716)
* family_branch() to recursively call family() up the branch todo - we hit a peak, then we need to get to the root somehow - actually get the hashes to build the proof * wip * some additional testing around merkle tree branches * track left/right branch for each sibling as we build the merkle path up * MerkleProof and basic (incomplete) verify fn * I think a MerkleProof verifies correctly now need to test on test case with multiple peaks * basic pmmr merkle proof working * MerkleProof now serializable/deserializable * coinbase maturity via merkle proof basically working * ser/deser merkle proof into hex in api and wallet.dat * cleanup * wip - temporarily saving merkle proofs to the commit index * assert merkle proof in store matches the rewound version there are cases where it does not... * commit * commit * can successfully rewind the output PMMR and generate a Merkle proof need to fix the tests up now and cleanup the code and add docs for functions etc. * core tests passing * fixup chain tests using merkle proofs * pool tests working with merkle proofs * api tests working with merkle proof * fix the broken comapct block hashing behavior made nonce for short_ids explicit to help with this * cleanup and comment as necessary * cleanup variety of TODOs
This commit is contained in:
parent
f2d709cb01
commit
cc12798d7a
23 changed files with 1260 additions and 528 deletions
|
@ -128,7 +128,7 @@ impl UtxoHandler {
|
|||
commitments.is_empty() || commitments.contains(&output.commit)
|
||||
})
|
||||
.map(|output| {
|
||||
OutputPrintable::from_output(output, w(&self.chain), include_proof)
|
||||
OutputPrintable::from_output(output, w(&self.chain), &block, include_proof)
|
||||
})
|
||||
.collect();
|
||||
BlockOutputs {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2016 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -16,11 +16,11 @@ use std::sync::Arc;
|
|||
|
||||
use core::{core, ser};
|
||||
use core::core::hash::Hashed;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::SwitchCommitHash;
|
||||
use chain;
|
||||
use p2p;
|
||||
use util;
|
||||
use util::LOGGER;
|
||||
use util::secp::pedersen;
|
||||
use util::secp::constants::MAX_PROOF_SIZE;
|
||||
use serde;
|
||||
|
@ -226,12 +226,15 @@ pub struct OutputPrintable {
|
|||
pub proof: Option<pedersen::RangeProof>,
|
||||
/// Rangeproof hash (as hex string)
|
||||
pub proof_hash: String,
|
||||
|
||||
pub merkle_proof: Option<MerkleProof>,
|
||||
}
|
||||
|
||||
impl OutputPrintable {
|
||||
pub fn from_output(
|
||||
output: &core::Output,
|
||||
chain: Arc<chain::Chain>,
|
||||
block: &core::Block,
|
||||
include_proof: bool,
|
||||
) -> OutputPrintable {
|
||||
let output_type =
|
||||
|
@ -250,6 +253,17 @@ impl OutputPrintable {
|
|||
None
|
||||
};
|
||||
|
||||
// Get the Merkle proof for all unspent coinbase outputs (to verify maturity on spend).
|
||||
// We obtain the Merkle proof by rewinding the PMMR.
|
||||
// We require the rewind() to be stable even after the PMMR is pruned and compacted
|
||||
// so we can still recreate the necessary proof.
|
||||
let mut merkle_proof = None;
|
||||
if output.features.contains(core::transaction::OutputFeatures::COINBASE_OUTPUT)
|
||||
&& !spent
|
||||
{
|
||||
merkle_proof = chain.get_merkle_proof(&out_id, &block).ok()
|
||||
};
|
||||
|
||||
OutputPrintable {
|
||||
output_type,
|
||||
commit: output.commit,
|
||||
|
@ -257,6 +271,7 @@ impl OutputPrintable {
|
|||
spent,
|
||||
proof,
|
||||
proof_hash: util::to_hex(output.proof.hash().to_vec()),
|
||||
merkle_proof,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,13 +292,17 @@ impl OutputPrintable {
|
|||
impl serde::ser::Serialize for OutputPrintable {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where
|
||||
S: serde::ser::Serializer {
|
||||
let mut state = serializer.serialize_struct("OutputPrintable", 6)?;
|
||||
let mut state = serializer.serialize_struct("OutputPrintable", 7)?;
|
||||
state.serialize_field("output_type", &self.output_type)?;
|
||||
state.serialize_field("commit", &util::to_hex(self.commit.0.to_vec()))?;
|
||||
state.serialize_field("switch_commit_hash", &self.switch_commit_hash.to_hex())?;
|
||||
state.serialize_field("spent", &self.spent)?;
|
||||
state.serialize_field("proof", &self.proof)?;
|
||||
state.serialize_field("proof_hash", &self.proof_hash)?;
|
||||
|
||||
let hex_merkle_proof = &self.merkle_proof.clone().map(|x| x.to_hex());
|
||||
state.serialize_field("merkle_proof", &hex_merkle_proof)?;
|
||||
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
@ -299,7 +318,8 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable {
|
|||
SwitchCommitHash,
|
||||
Spent,
|
||||
Proof,
|
||||
ProofHash
|
||||
ProofHash,
|
||||
MerkleProof
|
||||
}
|
||||
|
||||
struct OutputPrintableVisitor;
|
||||
|
@ -319,6 +339,7 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable {
|
|||
let mut spent = None;
|
||||
let mut proof = None;
|
||||
let mut proof_hash = None;
|
||||
let mut merkle_proof = None;
|
||||
|
||||
while let Some(key) = map.next_key()? {
|
||||
match key {
|
||||
|
@ -365,6 +386,16 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable {
|
|||
Field::ProofHash => {
|
||||
no_dup!(proof_hash);
|
||||
proof_hash = Some(map.next_value()?)
|
||||
},
|
||||
Field::MerkleProof => {
|
||||
no_dup!(merkle_proof);
|
||||
if let Some(hex) = map.next_value::<Option<String>>()? {
|
||||
if let Ok(res) = MerkleProof::from_hex(&hex) {
|
||||
merkle_proof = Some(res);
|
||||
} else {
|
||||
merkle_proof = Some(MerkleProof::empty());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -375,7 +406,8 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable {
|
|||
switch_commit_hash: switch_commit_hash.unwrap(),
|
||||
spent: spent.unwrap(),
|
||||
proof: proof,
|
||||
proof_hash: proof_hash.unwrap()
|
||||
proof_hash: proof_hash.unwrap(),
|
||||
merkle_proof: merkle_proof,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -498,7 +530,7 @@ impl BlockPrintable {
|
|||
let outputs = block
|
||||
.outputs
|
||||
.iter()
|
||||
.map(|output| OutputPrintable::from_output(output, chain.clone(), include_proof))
|
||||
.map(|output| OutputPrintable::from_output(output, chain.clone(), &block, include_proof))
|
||||
.collect();
|
||||
let kernels = block
|
||||
.kernels
|
||||
|
@ -532,10 +564,11 @@ impl CompactBlockPrintable {
|
|||
cb: &core::CompactBlock,
|
||||
chain: Arc<chain::Chain>,
|
||||
) -> CompactBlockPrintable {
|
||||
let block = chain.get_block(&cb.hash()).unwrap();
|
||||
let out_full = cb
|
||||
.out_full
|
||||
.iter()
|
||||
.map(|x| OutputPrintable::from_output(x, chain.clone(), false))
|
||||
.map(|x| OutputPrintable::from_output(x, chain.clone(), &block, false))
|
||||
.collect();
|
||||
let kern_full = cb
|
||||
.kern_full
|
||||
|
@ -584,7 +617,8 @@ mod test {
|
|||
\"switch_commit_hash\":\"85daaf11011dc11e52af84ebe78e2f2d19cbdc76000000000000000000000000\",\
|
||||
\"spent\":false,\
|
||||
\"proof\":null,\
|
||||
\"proof_hash\":\"ed6ba96009b86173bade6a9227ed60422916593fa32dd6d78b25b7a4eeef4946\"\
|
||||
\"proof_hash\":\"ed6ba96009b86173bade6a9227ed60422916593fa32dd6d78b25b7a4eeef4946\",\
|
||||
\"merkle_proof\":null\
|
||||
}";
|
||||
let deserialized: OutputPrintable = serde_json::from_str(&hex_output).unwrap();
|
||||
let serialized = serde_json::to_string(&deserialized).unwrap();
|
||||
|
|
|
@ -20,12 +20,11 @@ use std::fs::File;
|
|||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use core::core::{Input, OutputIdentifier, OutputStoreable, TxKernel};
|
||||
use core::core::{Block, BlockHeader, Input, OutputFeatures, OutputIdentifier, OutputStoreable, TxKernel};
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::global;
|
||||
|
||||
use core::core::{Block, BlockHeader};
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::target::Difficulty;
|
||||
use core::global;
|
||||
use grin_store::Error::NotFoundErr;
|
||||
use pipe;
|
||||
use store;
|
||||
|
@ -388,7 +387,7 @@ impl Chain {
|
|||
/// Return an error if the output does not exist or has been spent.
|
||||
/// This querying is done in a way that is consistent with the current chain state,
|
||||
/// specifically the current winning (valid, most work) fork.
|
||||
pub fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), Error> {
|
||||
pub fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<Hash, Error> {
|
||||
let mut sumtrees = self.sumtrees.write().unwrap();
|
||||
sumtrees.is_unspent(output_ref)
|
||||
}
|
||||
|
@ -406,8 +405,14 @@ impl Chain {
|
|||
/// This only applies to inputs spending coinbase outputs.
|
||||
/// An input spending a non-coinbase output will always pass this check.
|
||||
pub fn is_matured(&self, input: &Input, height: u64) -> Result<(), Error> {
|
||||
let mut sumtrees = self.sumtrees.write().unwrap();
|
||||
sumtrees.is_matured(input, height)
|
||||
if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let mut sumtrees = self.sumtrees.write().unwrap();
|
||||
let output = OutputIdentifier::from_input(&input);
|
||||
let hash = sumtrees.is_unspent(&output)?;
|
||||
let header = self.get_block_header(&input.block_hash())?;
|
||||
input.verify_maturity(hash, &header, height)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets the sumtree roots on a brand new block by applying the block on the
|
||||
|
@ -432,6 +437,22 @@ impl Chain {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Return a pre-built Merkle proof for the given commitment from the store.
|
||||
pub fn get_merkle_proof(
|
||||
&self,
|
||||
output: &OutputIdentifier,
|
||||
block: &Block,
|
||||
) -> Result<MerkleProof, Error> {
|
||||
let mut sumtrees = self.sumtrees.write().unwrap();
|
||||
|
||||
let merkle_proof = sumtree::extending(&mut sumtrees, |extension| {
|
||||
extension.force_rollback();
|
||||
extension.merkle_proof_via_rewind(output, block)
|
||||
})?;
|
||||
|
||||
Ok(merkle_proof)
|
||||
}
|
||||
|
||||
/// Returns current sumtree roots
|
||||
pub fn get_sumtree_roots(
|
||||
&self,
|
||||
|
@ -613,8 +634,10 @@ impl Chain {
|
|||
store::DifficultyIter::from(head.last_block_h, self.store.clone())
|
||||
}
|
||||
|
||||
/// Check whether we have a block without reading it
|
||||
pub fn block_exists(&self, h: Hash) -> Result<bool, Error> {
|
||||
self.store.block_exists(&h).map_err(|e| Error::StoreErr(e, "chain block exists".to_owned()))
|
||||
}
|
||||
/// Check whether we have a block without reading it
|
||||
pub fn block_exists(&self, h: Hash) -> Result<bool, Error> {
|
||||
self.store
|
||||
.block_exists(&h)
|
||||
.map_err(|e| Error::StoreErr(e, "chain block exists".to_owned()))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -27,7 +28,7 @@ use util::secp::pedersen::{RangeProof, Commitment};
|
|||
use core::consensus::reward;
|
||||
use core::core::{Block, BlockHeader, Input, Output, OutputIdentifier,
|
||||
OutputFeatures, OutputStoreable, TxKernel};
|
||||
use core::core::pmmr::{self, PMMR};
|
||||
use core::core::pmmr::{self, PMMR, MerkleProof};
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::ser::{self, PMMRable};
|
||||
|
||||
|
@ -106,10 +107,10 @@ impl SumTrees {
|
|||
})
|
||||
}
|
||||
|
||||
/// Check is an output is unspent.
|
||||
/// Check if an output is unspent.
|
||||
/// We look in the index to find the output MMR pos.
|
||||
/// Then we check the entry in the output MMR and confirm the hash matches.
|
||||
pub fn is_unspent(&mut self, output_id: &OutputIdentifier) -> Result<(), Error> {
|
||||
pub fn is_unspent(&mut self, output_id: &OutputIdentifier) -> Result<Hash, Error> {
|
||||
match self.commit_index.get_output_pos(&output_id.commit) {
|
||||
Ok(pos) => {
|
||||
let output_pmmr:PMMR<OutputStoreable, _> = PMMR::at(
|
||||
|
@ -117,11 +118,9 @@ impl SumTrees {
|
|||
self.utxo_pmmr_h.last_pos,
|
||||
);
|
||||
if let Some((hash, _)) = output_pmmr.get(pos, false) {
|
||||
println!("Getting output ID hash");
|
||||
if hash == output_id.hash() {
|
||||
Ok(())
|
||||
Ok(hash)
|
||||
} else {
|
||||
println!("MISMATCH BECAUSE THE BLOODY THING MISMATCHES");
|
||||
Err(Error::SumTreeErr(format!("sumtree hash mismatch")))
|
||||
}
|
||||
} else {
|
||||
|
@ -133,35 +132,6 @@ impl SumTrees {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check the output being spent by the input has sufficiently matured.
|
||||
/// This only applies for coinbase outputs being spent (1,000 blocks).
|
||||
/// Non-coinbase outputs will always pass this check.
|
||||
/// For a coinbase output we find the block by the block hash provided in the input
|
||||
/// and check coinbase maturty based on the height of this block.
|
||||
pub fn is_matured(
|
||||
&mut self,
|
||||
input: &Input,
|
||||
height: u64,
|
||||
) -> Result<(), Error> {
|
||||
// We should never be in a situation where we are checking maturity rules
|
||||
// if the output is already spent (this should have already been checked).
|
||||
let output = OutputIdentifier::from_input(&input);
|
||||
assert!(self.is_unspent(&output).is_ok());
|
||||
|
||||
// At this point we can be sure the input is spending the output
|
||||
// it claims to be spending, and that it is coinbase or non-coinbase.
|
||||
// If we are spending a coinbase output then go find the block
|
||||
// and check the coinbase maturity rule is being met.
|
||||
if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = &input.out_block
|
||||
.expect("input spending coinbase output must have a block hash");
|
||||
let block = self.commit_index.get_block(&block_hash)?;
|
||||
block.verify_coinbase_maturity(&input, height)
|
||||
.map_err(|_| Error::ImmatureCoinbase)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// returns the last N nodes inserted into the tree (i.e. the 'bottom'
|
||||
/// nodes at level 0
|
||||
/// TODO: These need to return the actual data from the flat-files instead of hashes now
|
||||
|
@ -298,7 +268,6 @@ impl<'a> Extension<'a> {
|
|||
commit_index: commit_index,
|
||||
new_output_commits: HashMap::new(),
|
||||
new_kernel_excesses: HashMap::new(),
|
||||
|
||||
rollback: false,
|
||||
}
|
||||
}
|
||||
|
@ -335,14 +304,17 @@ impl<'a> Extension<'a> {
|
|||
for kernel in &b.kernels {
|
||||
self.apply_kernel(kernel)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn save_pos_index(&self) -> Result<(), Error> {
|
||||
// store all new output pos in the index
|
||||
for (commit, pos) in &self.new_output_commits {
|
||||
self.commit_index.save_output_pos(commit, *pos)?;
|
||||
}
|
||||
|
||||
// store all new kernel pos in the index
|
||||
for (excess, pos) in &self.new_kernel_excesses {
|
||||
self.commit_index.save_kernel_pos(excess, *pos)?;
|
||||
}
|
||||
|
@ -364,16 +336,10 @@ impl<'a> Extension<'a> {
|
|||
return Err(Error::SumTreeErr(format!("output pmmr hash mismatch")));
|
||||
}
|
||||
|
||||
// At this point we can be sure the input is spending the output
|
||||
// it claims to be spending, and it is coinbase or non-coinbase.
|
||||
// If we are spending a coinbase output then go find the block
|
||||
// and check the coinbase maturity rule is being met.
|
||||
// check coinbase maturity with the Merkle Proof on the input
|
||||
if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = &input.out_block
|
||||
.expect("input spending coinbase output must have a block hash");
|
||||
let block = self.commit_index.get_block(&block_hash)?;
|
||||
block.verify_coinbase_maturity(&input, height)
|
||||
.map_err(|_| Error::ImmatureCoinbase)?;
|
||||
let header = self.commit_index.get_block_header(&input.block_hash())?;
|
||||
input.verify_maturity(read_hash, &header, height)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -444,6 +410,28 @@ impl<'a> Extension<'a> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Build a Merkle proof for the given output and the block by
|
||||
/// rewinding the MMR to the last pos of the block.
|
||||
/// Note: this relies on the MMR being stable even after pruning/compaction.
|
||||
/// We need the hash of each sibling pos from the pos up to the peak
|
||||
/// including the sibling leaf node which may have been removed.
|
||||
pub fn merkle_proof_via_rewind(
|
||||
&mut self,
|
||||
output: &OutputIdentifier,
|
||||
block: &Block,
|
||||
) -> Result<MerkleProof, Error> {
|
||||
debug!(LOGGER, "sumtree: merkle_proof_via_rewind: rewinding to block {:?}", block.hash());
|
||||
// rewind to the specified block
|
||||
self.rewind(block)?;
|
||||
// then calculate the Merkle Proof based on the known pos
|
||||
let pos = self.get_output_pos(&output.commit)?;
|
||||
let merkle_proof = self.utxo_pmmr
|
||||
.merkle_proof(pos)
|
||||
.map_err(&Error::SumTreeErr)?;
|
||||
|
||||
Ok(merkle_proof)
|
||||
}
|
||||
|
||||
/// Rewinds the MMRs to the provided block, using the last output and
|
||||
/// last kernel of the block we want to rewind to.
|
||||
pub fn rewind(&mut self, block: &Block) -> Result<(), Error> {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2016 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -80,8 +80,6 @@ pub enum Error {
|
|||
DuplicateCommitment(Commitment),
|
||||
/// A kernel with that excess commitment already exists (should be unique)
|
||||
DuplicateKernel(Commitment),
|
||||
/// coinbase can only be spent after it has matured (n blocks)
|
||||
ImmatureCoinbase,
|
||||
/// output not found
|
||||
OutputNotFound,
|
||||
/// output spent
|
||||
|
|
|
@ -26,7 +26,7 @@ use std::sync::Arc;
|
|||
|
||||
use chain::Chain;
|
||||
use chain::types::*;
|
||||
use core::core::{Block, BlockHeader, Transaction, OutputIdentifier, build};
|
||||
use core::core::{Block, BlockHeader, Transaction, OutputIdentifier, OutputFeatures, build};
|
||||
use core::core::hash::Hashed;
|
||||
use core::core::target::Difficulty;
|
||||
use core::consensus;
|
||||
|
@ -251,8 +251,12 @@ fn spend_in_fork() {
|
|||
// so we can spend the coinbase later
|
||||
let b = prepare_block(&kc, &fork_head, &chain, 2);
|
||||
let block_hash = b.hash();
|
||||
let out_id = OutputIdentifier::from_output(&b.outputs[0]);
|
||||
assert!(out_id.features.contains(OutputFeatures::COINBASE_OUTPUT));
|
||||
fork_head = b.header.clone();
|
||||
chain.process_block(b, chain::Options::SKIP_POW).unwrap();
|
||||
chain.process_block(b.clone(), chain::Options::SKIP_POW).unwrap();
|
||||
|
||||
let merkle_proof = chain.get_merkle_proof(&out_id, &b).unwrap();
|
||||
|
||||
println!("First block");
|
||||
|
||||
|
@ -270,7 +274,12 @@ fn spend_in_fork() {
|
|||
|
||||
let tx1 = build::transaction(
|
||||
vec![
|
||||
build::coinbase_input(consensus::REWARD, block_hash, kc.derive_key_id(2).unwrap()),
|
||||
build::coinbase_input(
|
||||
consensus::REWARD,
|
||||
block_hash,
|
||||
merkle_proof,
|
||||
kc.derive_key_id(2).unwrap(),
|
||||
),
|
||||
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
|
||||
build::with_fee(20000),
|
||||
],
|
||||
|
@ -288,7 +297,7 @@ fn spend_in_fork() {
|
|||
|
||||
let tx2 = build::transaction(
|
||||
vec![
|
||||
build::input(consensus::REWARD - 20000, next.hash(), kc.derive_key_id(30).unwrap()),
|
||||
build::input(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
|
||||
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
|
||||
build::with_fee(20000),
|
||||
],
|
||||
|
|
|
@ -27,6 +27,7 @@ use chain::types::*;
|
|||
use core::core::build;
|
||||
use core::core::target::Difficulty;
|
||||
use core::core::transaction;
|
||||
use core::core::OutputIdentifier;
|
||||
use core::consensus;
|
||||
use core::global;
|
||||
use core::global::ChainTypes;
|
||||
|
@ -96,16 +97,21 @@ fn test_coinbase_maturity() {
|
|||
).unwrap();
|
||||
|
||||
assert_eq!(block.outputs.len(), 1);
|
||||
let coinbase_output = block.outputs[0];
|
||||
assert!(
|
||||
block.outputs[0]
|
||||
coinbase_output
|
||||
.features
|
||||
.contains(transaction::OutputFeatures::COINBASE_OUTPUT)
|
||||
);
|
||||
|
||||
let out_id = OutputIdentifier::from_output(&coinbase_output);
|
||||
|
||||
// we will need this later when we want to spend the coinbase output
|
||||
let block_hash = block.hash();
|
||||
|
||||
chain.process_block(block, chain::Options::MINE).unwrap();
|
||||
chain.process_block(block.clone(), chain::Options::MINE).unwrap();
|
||||
|
||||
let merkle_proof = chain.get_merkle_proof(&out_id, &block).unwrap();
|
||||
|
||||
let prev = chain.head_header().unwrap();
|
||||
|
||||
|
@ -118,7 +124,12 @@ fn test_coinbase_maturity() {
|
|||
// this is not a valid tx as the coinbase output cannot be spent yet
|
||||
let coinbase_txn = build::transaction(
|
||||
vec![
|
||||
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
||||
build::coinbase_input(
|
||||
amount,
|
||||
block_hash,
|
||||
merkle_proof.clone(),
|
||||
key_id1.clone(),
|
||||
),
|
||||
build::output(amount - 2, key_id2.clone()),
|
||||
build::with_fee(2),
|
||||
],
|
||||
|
@ -139,7 +150,7 @@ fn test_coinbase_maturity() {
|
|||
block.header.difficulty = difficulty.clone();
|
||||
|
||||
match chain.set_sumtree_roots(&mut block, false) {
|
||||
Err(Error::ImmatureCoinbase) => (),
|
||||
Err(Error::Transaction(transaction::Error::ImmatureCoinbase)) => (),
|
||||
_ => panic!("expected ImmatureCoinbase error here"),
|
||||
}
|
||||
|
||||
|
@ -185,7 +196,12 @@ fn test_coinbase_maturity() {
|
|||
|
||||
let coinbase_txn = build::transaction(
|
||||
vec![
|
||||
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
||||
build::coinbase_input(
|
||||
amount,
|
||||
block_hash,
|
||||
merkle_proof.clone(),
|
||||
key_id1.clone(),
|
||||
),
|
||||
build::output(amount - 2, key_id2.clone()),
|
||||
build::with_fee(2),
|
||||
],
|
||||
|
@ -216,7 +232,6 @@ fn test_coinbase_maturity() {
|
|||
let result = chain.process_block(block, chain::Options::MINE);
|
||||
match result {
|
||||
Ok(_) => (),
|
||||
Err(Error::ImmatureCoinbase) => panic!("we should not get an ImmatureCoinbase here"),
|
||||
Err(_) => panic!("we did not expect an error here"),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2016 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -22,7 +22,6 @@ use core::{
|
|||
Committed,
|
||||
Input,
|
||||
Output,
|
||||
OutputIdentifier,
|
||||
ShortId,
|
||||
SwitchCommitHash,
|
||||
Proof,
|
||||
|
@ -75,6 +74,8 @@ pub enum Error {
|
|||
/// The lock_height needed to be reached for the coinbase output to mature
|
||||
lock_height: u64,
|
||||
},
|
||||
/// Underlying Merkle proof error
|
||||
MerkleProof,
|
||||
/// Other unspecified error condition
|
||||
Other(String)
|
||||
}
|
||||
|
@ -613,13 +614,13 @@ impl Block {
|
|||
let new_inputs = self.inputs
|
||||
.iter()
|
||||
.filter(|inp| !to_cut_through.contains(&inp.commitment()))
|
||||
.map(|&inp| inp)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let new_outputs = self.outputs
|
||||
.iter()
|
||||
.filter(|out| !to_cut_through.contains(&out.commitment()))
|
||||
.map(|&out| out)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Block {
|
||||
|
@ -642,6 +643,7 @@ impl Block {
|
|||
self.verify_weight()?;
|
||||
self.verify_sorted()?;
|
||||
self.verify_coinbase()?;
|
||||
self.verify_inputs()?;
|
||||
self.verify_kernels()?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -660,6 +662,26 @@ impl Block {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// We can verify the Merkle proof (for coinbase inputs) here in isolation.
|
||||
/// But we cannot check the following as we need data from the index and the PMMR.
|
||||
/// So we must be sure to check these at the appropriate point during block validation.
|
||||
/// * node is in the correct pos in the PMMR
|
||||
/// * block is the correct one (based on utxo_root from block_header via the index)
|
||||
fn verify_inputs(&self) -> Result<(), Error> {
|
||||
let coinbase_inputs = self.inputs
|
||||
.iter()
|
||||
.filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT));
|
||||
|
||||
for input in coinbase_inputs {
|
||||
let merkle_proof = input.merkle_proof();
|
||||
if !merkle_proof.verify() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Verifies the sum of input/output commitments match the sum in kernels
|
||||
/// and that all kernel signatures are valid.
|
||||
fn verify_kernels(&self) -> Result<(), Error> {
|
||||
|
@ -753,42 +775,6 @@ impl Block {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// NOTE: this happens during apply_block (not the earlier validate_block)
|
||||
///
|
||||
/// Calculate lock_height as block_height + 1,000
|
||||
/// Confirm height <= lock_height
|
||||
pub fn verify_coinbase_maturity(
|
||||
&self,
|
||||
input: &Input,
|
||||
height: u64,
|
||||
) -> Result<(), Error> {
|
||||
let output = OutputIdentifier::from_input(&input);
|
||||
|
||||
// We should only be calling verify_coinbase_maturity
|
||||
// if the sender claims we are spending a coinbase output
|
||||
// _and_ that we trust this claim.
|
||||
// We should have already confirmed the entry from the MMR exists
|
||||
// and has the expected hash.
|
||||
assert!(output.features.contains(OutputFeatures::COINBASE_OUTPUT));
|
||||
|
||||
if let Some(_) = self.outputs
|
||||
.iter()
|
||||
.find(|x| OutputIdentifier::from_output(&x) == output)
|
||||
{
|
||||
let lock_height = self.header.height + global::coinbase_maturity();
|
||||
if lock_height > height {
|
||||
Err(Error::ImmatureCoinbase{
|
||||
height: height,
|
||||
lock_height: lock_height,
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(Error::Other(format!("output not found in block")))
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds the blinded output and related signature proof for the block reward.
|
||||
pub fn reward_output(
|
||||
keychain: &keychain::Keychain,
|
||||
|
@ -860,7 +846,6 @@ impl Block {
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use core::hash::ZERO_HASH;
|
||||
use core::Transaction;
|
||||
use core::build::{self, input, output, with_fee};
|
||||
use core::test::{tx1i2o, tx2i1o};
|
||||
|
@ -892,7 +877,7 @@ mod test {
|
|||
key_id2: Identifier,
|
||||
) -> Transaction {
|
||||
build::transaction(
|
||||
vec![input(v, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
||||
vec![input(v, key_id1), output(3, key_id2), with_fee(2)],
|
||||
&keychain,
|
||||
).unwrap()
|
||||
}
|
||||
|
@ -915,7 +900,7 @@ mod test {
|
|||
}
|
||||
|
||||
let now = Instant::now();
|
||||
parts.append(&mut vec![input(500000, ZERO_HASH, pks.pop().unwrap()), with_fee(2)]);
|
||||
parts.append(&mut vec![input(500000, pks.pop().unwrap()), with_fee(2)]);
|
||||
let mut tx = build::transaction(parts, &keychain)
|
||||
.unwrap();
|
||||
println!("Build tx: {}", now.elapsed().as_secs());
|
||||
|
@ -952,7 +937,7 @@ mod test {
|
|||
|
||||
let mut btx1 = tx2i1o();
|
||||
let mut btx2 = build::transaction(
|
||||
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
|
||||
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
|
||||
&keychain,
|
||||
).unwrap();
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2016 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -29,6 +29,7 @@ use util::{secp, kernel_sig_msg};
|
|||
|
||||
use core::{Transaction, TxKernel, Input, Output, OutputFeatures, ProofMessageElements, SwitchCommitHash};
|
||||
use core::hash::Hash;
|
||||
use core::pmmr::MerkleProof;
|
||||
use keychain;
|
||||
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
|
||||
use util::LOGGER;
|
||||
|
@ -47,7 +48,8 @@ pub type Append = for<'a> Fn(&'a mut Context, (Transaction, TxKernel, BlindSum))
|
|||
fn build_input(
|
||||
value: u64,
|
||||
features: OutputFeatures,
|
||||
out_block: Option<Hash>,
|
||||
block_hash: Option<Hash>,
|
||||
merkle_proof: Option<MerkleProof>,
|
||||
key_id: Identifier,
|
||||
) -> Box<Append> {
|
||||
Box::new(move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||
|
@ -55,7 +57,8 @@ fn build_input(
|
|||
let input = Input::new(
|
||||
features,
|
||||
commit,
|
||||
out_block,
|
||||
block_hash.clone(),
|
||||
merkle_proof.clone(),
|
||||
);
|
||||
(tx.with_input(input), kern, sum.sub_key_id(key_id.clone()))
|
||||
})
|
||||
|
@ -65,22 +68,22 @@ fn build_input(
|
|||
/// being built.
|
||||
pub fn input(
|
||||
value: u64,
|
||||
out_block: Hash,
|
||||
key_id: Identifier,
|
||||
) -> Box<Append> {
|
||||
debug!(LOGGER, "Building input (spending regular output): {}, {}", value, key_id);
|
||||
build_input(value, OutputFeatures::DEFAULT_OUTPUT, Some(out_block), key_id)
|
||||
build_input(value, OutputFeatures::DEFAULT_OUTPUT, None, None, key_id)
|
||||
}
|
||||
|
||||
/// Adds a coinbase input spending a coinbase output.
|
||||
/// We will use the block hash to verify coinbase maturity.
|
||||
pub fn coinbase_input(
|
||||
value: u64,
|
||||
out_block: Hash,
|
||||
block_hash: Hash,
|
||||
merkle_proof: MerkleProof,
|
||||
key_id: Identifier,
|
||||
) -> Box<Append> {
|
||||
debug!(LOGGER, "Building input (spending coinbase): {}, {}", value, key_id);
|
||||
build_input(value, OutputFeatures::COINBASE_OUTPUT, Some(out_block), key_id)
|
||||
build_input(value, OutputFeatures::COINBASE_OUTPUT, Some(block_hash), Some(merkle_proof), key_id)
|
||||
}
|
||||
|
||||
/// Adds an output with the provided value and key identifier from the
|
||||
|
@ -261,7 +264,6 @@ pub fn transaction_with_offset(
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use core::hash::ZERO_HASH;
|
||||
|
||||
#[test]
|
||||
fn blind_simple_tx() {
|
||||
|
@ -272,8 +274,8 @@ mod test {
|
|||
|
||||
let tx = transaction(
|
||||
vec![
|
||||
input(10, ZERO_HASH, key_id1),
|
||||
input(12, ZERO_HASH, key_id2),
|
||||
input(10, key_id1),
|
||||
input(12, key_id2),
|
||||
output(20, key_id3),
|
||||
with_fee(2),
|
||||
],
|
||||
|
@ -292,8 +294,8 @@ mod test {
|
|||
|
||||
let tx = transaction_with_offset(
|
||||
vec![
|
||||
input(10, ZERO_HASH, key_id1),
|
||||
input(12, ZERO_HASH, key_id2),
|
||||
input(10, key_id1),
|
||||
input(12, key_id2),
|
||||
output(20, key_id3),
|
||||
with_fee(2),
|
||||
],
|
||||
|
@ -310,7 +312,7 @@ mod test {
|
|||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||
|
||||
let tx = transaction(
|
||||
vec![input(6, ZERO_HASH, key_id1), output(2, key_id2), with_fee(4)],
|
||||
vec![input(6, key_id1), output(2, key_id2), with_fee(4)],
|
||||
&keychain,
|
||||
).unwrap();
|
||||
|
||||
|
|
|
@ -250,7 +250,7 @@ mod test {
|
|||
// blinding should fail as signing with a zero r*G shouldn't work
|
||||
build::transaction(
|
||||
vec![
|
||||
input(10, ZERO_HASH, key_id1.clone()),
|
||||
input(10, key_id1.clone()),
|
||||
output(9, key_id1.clone()),
|
||||
with_fee(1),
|
||||
],
|
||||
|
@ -309,7 +309,7 @@ mod test {
|
|||
// first build a valid tx with corresponding blinding factor
|
||||
let tx = build::transaction(
|
||||
vec![
|
||||
input(10, ZERO_HASH, key_id1),
|
||||
input(10, key_id1),
|
||||
output(5, key_id2),
|
||||
output(3, key_id3),
|
||||
with_fee(2),
|
||||
|
@ -373,7 +373,7 @@ mod test {
|
|||
|
||||
let tx = build::transaction(
|
||||
vec![
|
||||
input(75, ZERO_HASH, key_id1),
|
||||
input(75, key_id1),
|
||||
output(42, key_id2),
|
||||
output(32, key_id3),
|
||||
with_fee(1),
|
||||
|
@ -480,7 +480,7 @@ mod test {
|
|||
let (tx_alice, blind_sum) = {
|
||||
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
|
||||
// become inputs in the new transaction
|
||||
let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
|
||||
let (in1, in2) = (input(4, key_id1), input(3, key_id2));
|
||||
|
||||
// Alice builds her transaction, with change, which also produces the sum
|
||||
// of blinding factors before they're obscured.
|
||||
|
@ -571,7 +571,7 @@ mod test {
|
|||
// and that the resulting block is valid
|
||||
let tx1 = build::transaction(
|
||||
vec![
|
||||
input(5, ZERO_HASH, key_id1.clone()),
|
||||
input(5, key_id1.clone()),
|
||||
output(3, key_id2.clone()),
|
||||
with_fee(2),
|
||||
with_lock_height(1),
|
||||
|
@ -591,7 +591,7 @@ mod test {
|
|||
// now try adding a timelocked tx where lock height is greater than current block height
|
||||
let tx1 = build::transaction(
|
||||
vec![
|
||||
input(5, ZERO_HASH, key_id1.clone()),
|
||||
input(5, key_id1.clone()),
|
||||
output(3, key_id2.clone()),
|
||||
with_fee(2),
|
||||
with_lock_height(2),
|
||||
|
@ -635,8 +635,8 @@ mod test {
|
|||
|
||||
build::transaction_with_offset(
|
||||
vec![
|
||||
input(10, ZERO_HASH, key_id1),
|
||||
input(11, ZERO_HASH, key_id2),
|
||||
input(10, key_id1),
|
||||
input(11, key_id2),
|
||||
output(19, key_id3),
|
||||
with_fee(2),
|
||||
],
|
||||
|
@ -651,7 +651,7 @@ mod test {
|
|||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||
|
||||
build::transaction_with_offset(
|
||||
vec![input(5, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
||||
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
|
||||
&keychain,
|
||||
).unwrap()
|
||||
}
|
||||
|
@ -667,7 +667,7 @@ mod test {
|
|||
|
||||
build::transaction_with_offset(
|
||||
vec![
|
||||
input(6, ZERO_HASH, key_id1),
|
||||
input(6, key_id1),
|
||||
output(3, key_id2),
|
||||
output(1, key_id3),
|
||||
with_fee(2),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2017 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -36,11 +36,12 @@
|
|||
//! a simple Vec or a database.
|
||||
|
||||
use std::clone::Clone;
|
||||
use std::ops::Deref;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use core::hash::{Hash, Hashed};
|
||||
use ser;
|
||||
use ser::{Readable, Reader, Writeable, Writer};
|
||||
use ser::PMMRable;
|
||||
use util;
|
||||
use util::LOGGER;
|
||||
|
||||
/// Storage backend for the MMR, just needs to be indexed by order of insertion.
|
||||
|
@ -50,7 +51,7 @@ use util::LOGGER;
|
|||
pub trait Backend<T> where
|
||||
T:PMMRable {
|
||||
/// Append the provided Hashes to the backend storage, and optionally an associated
|
||||
/// data element to flatfile storage (for leaf nodes only). The position of the
|
||||
/// data element to flatfile storage (for leaf nodes only). The position of the
|
||||
/// first element of the Vec in the MMR is provided to help the implementation.
|
||||
fn append(&mut self, position: u64, data: Vec<(Hash, Option<T>)>) -> Result<(), String>;
|
||||
|
||||
|
@ -60,11 +61,14 @@ pub trait Backend<T> where
|
|||
/// occurred (see remove).
|
||||
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String>;
|
||||
|
||||
/// Get a Hash/Element by insertion position. If include_data is true, will
|
||||
/// Get a Hash/Element by insertion position. If include_data is true, will
|
||||
/// also return the associated data element
|
||||
fn get(&self, position: u64, include_data: bool) -> Option<(Hash, Option<T>)>;
|
||||
|
||||
/// Remove Hashes/Data by insertion position. An index is also provided so the
|
||||
/// Get a Hash/Element by original insertion position (ignoring the remove list).
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash>;
|
||||
|
||||
/// Remove HashSums by insertion position. An index is also provided so the
|
||||
/// underlying backend can implement some rollback of positions up to a
|
||||
/// given index (practically the index is a the height of a block that
|
||||
/// triggered removal).
|
||||
|
@ -76,6 +80,169 @@ pub trait Backend<T> where
|
|||
fn get_data_file_path(&self) -> String;
|
||||
}
|
||||
|
||||
/// A Merkle proof.
|
||||
/// Proves inclusion of an output (node) in the output MMR.
|
||||
/// We can use this to prove an output was unspent at the time of a given block
|
||||
/// as the root will match the utxo_root of the block header.
|
||||
/// The path and left_right can be used to reconstruct the peak hash for a given tree
|
||||
/// in the MMR.
|
||||
/// The root is the result of hashing all the peaks together.
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
pub struct MerkleProof {
|
||||
/// The root hash of the full Merkle tree (in an MMR the hash of all peaks)
|
||||
pub root: Hash,
|
||||
/// The hash of the element in the tree we care about
|
||||
pub node: Hash,
|
||||
/// The full list of peak hashes in the MMR
|
||||
pub peaks: Vec<Hash>,
|
||||
/// The siblings along the path of the tree as we traverse from node to peak
|
||||
pub path: Vec<Hash>,
|
||||
/// Order of siblings (left vs right) matters, so track this here for each path element
|
||||
pub left_right: Vec<bool>,
|
||||
}
|
||||
|
||||
impl Writeable for MerkleProof {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
ser_multiwrite!(
|
||||
writer,
|
||||
[write_fixed_bytes, &self.root],
|
||||
[write_fixed_bytes, &self.node],
|
||||
[write_u64, self.peaks.len() as u64],
|
||||
|
||||
// note: path length used for both path and left_right vecs
|
||||
[write_u64, self.path.len() as u64]
|
||||
);
|
||||
|
||||
try!(self.peaks.write(writer));
|
||||
try!(self.path.write(writer));
|
||||
|
||||
// TODO - how to serialize/deserialize these boolean values as bytes?
|
||||
for x in &self.left_right {
|
||||
if *x {
|
||||
try!(writer.write_u8(1));
|
||||
} else {
|
||||
try!(writer.write_u8(0));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Readable for MerkleProof {
|
||||
fn read(reader: &mut Reader) -> Result<MerkleProof, ser::Error> {
|
||||
let root = Hash::read(reader)?;
|
||||
let node = Hash::read(reader)?;
|
||||
|
||||
let (peaks_len, path_len) =
|
||||
ser_multiread!(reader, read_u64, read_u64);
|
||||
|
||||
let mut peaks = Vec::with_capacity(peaks_len as usize);
|
||||
for _ in 0..peaks_len {
|
||||
peaks.push(Hash::read(reader)?);
|
||||
}
|
||||
let mut path = Vec::with_capacity(path_len as usize);
|
||||
for _ in 0..path_len {
|
||||
path.push(Hash::read(reader)?);
|
||||
}
|
||||
|
||||
let left_right_bytes = reader.read_fixed_bytes(path_len as usize)?;
|
||||
let left_right = left_right_bytes.iter().map(|&x| x == 1).collect();
|
||||
Ok(
|
||||
MerkleProof {
|
||||
root,
|
||||
node,
|
||||
peaks,
|
||||
path,
|
||||
left_right,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MerkleProof {
|
||||
fn default() -> MerkleProof {
|
||||
MerkleProof::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl MerkleProof {
|
||||
/// The "empty" Merkle proof.
|
||||
/// Basically some reasonable defaults. Will not verify successfully.
|
||||
pub fn empty() -> MerkleProof {
|
||||
MerkleProof {
|
||||
root: Hash::zero(),
|
||||
node: Hash::zero(),
|
||||
peaks: vec![],
|
||||
path: vec![],
|
||||
left_right: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize the Merkle proof as a hex string (for api json endpoints)
|
||||
pub fn to_hex(&self) -> String {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
util::to_hex(vec)
|
||||
}
|
||||
|
||||
/// Convert hex string represenation back to a Merkle proof instance
|
||||
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
|
||||
let bytes = util::from_hex(hex.to_string()).unwrap();
|
||||
let res = ser::deserialize(&mut &bytes[..])
|
||||
.map_err(|_| format!("failed to deserialize a Merkle Proof"))?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Verify the Merkle proof.
|
||||
/// We do this by verifying the folloiwing -
|
||||
/// * inclusion of the node beneath a peak (via the Merkle path/branch of siblings)
|
||||
/// * inclusion of the peak in the "bag of peaks" beneath the root
|
||||
pub fn verify(&self) -> bool {
|
||||
// if we have no further elements in the path
|
||||
// then this proof verifies successfully if our node is
|
||||
// one of the peaks
|
||||
// and the peaks themselves hash to give the root
|
||||
if self.path.len() == 0 {
|
||||
if !self.peaks.contains(&self.node) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut bagged = None;
|
||||
for peak in self.peaks.iter().map(|&x| Some(x)) {
|
||||
bagged = match (bagged, peak) {
|
||||
(None, rhs) => rhs,
|
||||
(lhs, None) => lhs,
|
||||
(Some(lhs), Some(rhs)) => Some(lhs.hash_with(rhs)),
|
||||
}
|
||||
}
|
||||
return bagged == Some(self.root);
|
||||
}
|
||||
|
||||
let mut path = self.path.clone();
|
||||
let sibling = path.remove(0);
|
||||
let mut left_right = self.left_right.clone();
|
||||
|
||||
// hash our node and sibling together (noting left/right position of the sibling)
|
||||
let parent = if left_right.remove(0) {
|
||||
self.node.hash_with(sibling)
|
||||
} else {
|
||||
sibling.hash_with(self.node)
|
||||
};
|
||||
|
||||
let proof = MerkleProof {
|
||||
root: self.root,
|
||||
node: parent,
|
||||
peaks: self.peaks.clone(),
|
||||
path,
|
||||
left_right,
|
||||
};
|
||||
|
||||
proof.verify()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Prunable Merkle Mountain Range implementation. All positions within the tree
|
||||
/// start at 1 as they're postorder tree traversal positions rather than array
|
||||
/// indices.
|
||||
|
@ -108,7 +275,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Build a new prunable Merkle Mountain Range pre-initlialized until
|
||||
/// Build a new prunable Merkle Mountain Range pre-initialized until
|
||||
/// last_pos
|
||||
/// with the provided backend.
|
||||
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<T, B> {
|
||||
|
@ -127,7 +294,6 @@ where
|
|||
.map(|pi| self.backend.get(pi, false))
|
||||
.collect();
|
||||
|
||||
|
||||
let mut ret = None;
|
||||
for peak in peaks {
|
||||
ret = match (ret, peak) {
|
||||
|
@ -139,6 +305,56 @@ where
|
|||
ret.expect("no root, invalid tree").0
|
||||
}
|
||||
|
||||
/// Build a Merkle proof for the element at the given position in the MMR
|
||||
pub fn merkle_proof(&self, pos: u64) -> Result<MerkleProof, String> {
|
||||
debug!(LOGGER, "merkle_proof (via rewind) - {}, last_pos {}", pos, self.last_pos);
|
||||
|
||||
if !is_leaf(pos) {
|
||||
return Err(format!("not a leaf at pos {}", pos));
|
||||
}
|
||||
|
||||
let root = self.root();
|
||||
|
||||
let node = self.get(pos, false)
|
||||
.ok_or(format!("no element at pos {}", pos))?
|
||||
.0;
|
||||
|
||||
let family_branch = family_branch(pos, self.last_pos);
|
||||
let left_right = family_branch
|
||||
.iter()
|
||||
.map(|x| x.2)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let path = family_branch
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
// we want to find siblings here even if they
|
||||
// have been "removed" from the MMR
|
||||
// TODO - pruned/compacted MMR will need to maintain hashes of removed nodes
|
||||
let res = self.get_from_file(x.1);
|
||||
res
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let peaks = peaks(self.last_pos)
|
||||
.iter()
|
||||
.filter_map(|&x| {
|
||||
let res = self.get_from_file(x);
|
||||
res
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let proof = MerkleProof {
|
||||
root,
|
||||
node,
|
||||
path,
|
||||
peaks,
|
||||
left_right,
|
||||
};
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Push a new element into the MMR. Computes new related peaks at
|
||||
/// the same time if applicable.
|
||||
pub fn push(&mut self, elmt: T) -> Result<u64, String> {
|
||||
|
@ -206,7 +422,7 @@ where
|
|||
let mut to_prune = vec![];
|
||||
let mut current = position;
|
||||
while current + 1 < self.last_pos {
|
||||
let (parent, sibling) = family(current);
|
||||
let (parent, sibling, _) = family(current);
|
||||
if parent > self.last_pos {
|
||||
// can't prune when our parent isn't here yet
|
||||
break;
|
||||
|
@ -236,6 +452,13 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
||||
if position > self.last_pos {
|
||||
None
|
||||
} else {
|
||||
self.backend.get_from_file(position)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to get the last N nodes inserted, i.e. the last
|
||||
/// n nodes along the bottom of the tree
|
||||
|
@ -275,7 +498,8 @@ where
|
|||
if bintree_postorder_height(n) > 0 {
|
||||
if let Some(hs) = self.get(n, false) {
|
||||
// take the left and right children, if they exist
|
||||
let left_pos = bintree_move_down_left(n).unwrap();
|
||||
let left_pos = bintree_move_down_left(n)
|
||||
.ok_or(format!("left_pos not found"))?;
|
||||
let right_pos = bintree_jump_right_sibling(left_pos);
|
||||
|
||||
if let Some(left_child_hs) = self.get(left_pos, false) {
|
||||
|
@ -283,7 +507,7 @@ where
|
|||
// add hashes and compare
|
||||
if left_child_hs.0+right_child_hs.0 != hs.0 {
|
||||
return Err(format!("Invalid MMR, hash of parent at {} does \
|
||||
not match children.", n));
|
||||
not match children.", n));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -332,72 +556,6 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Simple MMR backend implementation based on a Vector. Pruning does not
|
||||
/// compact the Vector itself but still frees the reference to the
|
||||
/// underlying Hash.
|
||||
#[derive(Clone)]
|
||||
pub struct VecBackend<T>
|
||||
where T:PMMRable {
|
||||
/// Backend elements
|
||||
pub elems: Vec<Option<(Hash, Option<T>)>>,
|
||||
}
|
||||
|
||||
impl <T> Backend <T> for VecBackend<T>
|
||||
where T: PMMRable {
|
||||
#[allow(unused_variables)]
|
||||
fn append(&mut self, position: u64, data: Vec<(Hash, Option<T>)>) -> Result<(), String> {
|
||||
self.elems.append(&mut map_vec!(data, |d| Some(d.clone())));
|
||||
Ok(())
|
||||
}
|
||||
fn get(&self, position: u64, _include_data:bool) -> Option<(Hash, Option<T>)> {
|
||||
self.elems[(position - 1) as usize].clone()
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn remove(&mut self, positions: Vec<u64>, index: u32) -> Result<(), String> {
|
||||
for n in positions {
|
||||
self.elems[(n - 1) as usize] = None
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
#[allow(unused_variables)]
|
||||
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> {
|
||||
self.elems = self.elems[0..(position as usize) + 1].to_vec();
|
||||
Ok(())
|
||||
}
|
||||
fn get_data_file_path(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl <T> VecBackend <T>
|
||||
where T:PMMRable {
|
||||
/// Instantiates a new VecBackend<T>
|
||||
pub fn new() -> VecBackend<T> {
|
||||
VecBackend { elems: vec![] }
|
||||
}
|
||||
|
||||
/// Current number of elements in the underlying Vec.
|
||||
pub fn used_size(&self) -> usize {
|
||||
let mut usz = self.elems.len();
|
||||
for elem in self.elems.deref() {
|
||||
if elem.is_none() {
|
||||
usz -= 1;
|
||||
}
|
||||
}
|
||||
usz
|
||||
}
|
||||
|
||||
/// Resets the backend, emptying the underlying Vec.
|
||||
pub fn clear(&mut self) {
|
||||
self.elems = Vec::new();
|
||||
}
|
||||
|
||||
/// Total length of the underlying vector.
|
||||
pub fn len(&self) -> usize {
|
||||
self.elems.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// Maintains a list of previously pruned nodes in PMMR, compacting the list as
|
||||
/// parents get pruned and allowing checking whether a leaf is pruned. Given
|
||||
/// a node's position, computes how much it should get shifted given the
|
||||
|
@ -444,7 +602,7 @@ impl PruneList {
|
|||
/// given leaf. Helpful if, for instance, data for each leaf is being stored
|
||||
/// separately in a continuous flat-file
|
||||
pub fn get_leaf_shift(&self, pos: u64) -> Option<u64> {
|
||||
|
||||
|
||||
// get the position where the node at pos would fit in the pruned list, if
|
||||
// it's already pruned, nothing to skip
|
||||
match self.pruned_pos(pos) {
|
||||
|
@ -468,7 +626,7 @@ impl PruneList {
|
|||
pub fn add(&mut self, pos: u64) {
|
||||
let mut current = pos;
|
||||
loop {
|
||||
let (parent, sibling) = family(current);
|
||||
let (parent, sibling, _) = family(current);
|
||||
match self.pruned_nodes.binary_search(&sibling) {
|
||||
Ok(idx) => {
|
||||
self.pruned_nodes.remove(idx);
|
||||
|
@ -498,7 +656,7 @@ impl PruneList {
|
|||
let next_peak_pos = self.pruned_nodes[idx];
|
||||
let mut cursor = pos;
|
||||
loop {
|
||||
let (parent, _) = family(cursor);
|
||||
let (parent, _, _) = family(cursor);
|
||||
if next_peak_pos == parent {
|
||||
return None;
|
||||
}
|
||||
|
@ -631,9 +789,7 @@ pub fn n_leaves(mut sz: u64) -> u64 {
|
|||
/// any node, from its postorder traversal position. Which is the order in which
|
||||
/// nodes are added in a MMR.
|
||||
///
|
||||
/// [1] https://github.
|
||||
/// com/opentimestamps/opentimestamps-server/blob/master/doc/merkle-mountain-range.
|
||||
/// md
|
||||
/// [1] https://github.com/opentimestamps/opentimestamps-server/blob/master/doc/merkle-mountain-range.md
|
||||
pub fn bintree_postorder_height(num: u64) -> u64 {
|
||||
let mut h = num;
|
||||
while !all_ones(h) {
|
||||
|
@ -642,22 +798,48 @@ pub fn bintree_postorder_height(num: u64) -> u64 {
|
|||
most_significant_pos(h) - 1
|
||||
}
|
||||
|
||||
/// Calculates the positions of the parent and sibling of the node at the
|
||||
/// provided position.
|
||||
pub fn family(pos: u64) -> (u64, u64) {
|
||||
let sibling: u64;
|
||||
let parent: u64;
|
||||
/// Is this position a leaf in the MMR?
|
||||
/// We know the positions of all leaves based on the postorder height of an MMR of any size
|
||||
/// (somewhat unintuitively but this is how the PMMR is "append only").
|
||||
pub fn is_leaf(pos: u64) -> bool {
|
||||
bintree_postorder_height(pos) == 0
|
||||
}
|
||||
|
||||
/// Calculates the positions of the parent and sibling of the node at the
|
||||
/// provided position. Also returns a boolean representing whether the sibling is on left
|
||||
/// branch or right branch (left=0, right=1)
|
||||
pub fn family(pos: u64) -> (u64, u64, bool) {
|
||||
let pos_height = bintree_postorder_height(pos);
|
||||
let next_height = bintree_postorder_height(pos + 1);
|
||||
if next_height > pos_height {
|
||||
sibling = bintree_jump_left_sibling(pos);
|
||||
parent = pos + 1;
|
||||
let sibling = bintree_jump_left_sibling(pos);
|
||||
let parent = pos + 1;
|
||||
(parent, sibling, false)
|
||||
} else {
|
||||
sibling = bintree_jump_right_sibling(pos);
|
||||
parent = sibling + 1;
|
||||
let sibling = bintree_jump_right_sibling(pos);
|
||||
let parent = sibling + 1;
|
||||
(parent, sibling, true)
|
||||
}
|
||||
(parent, sibling)
|
||||
}
|
||||
|
||||
/// For a given starting position calculate the parent and sibling positions
|
||||
/// for the branch/path from that position to the peak of the tree.
|
||||
/// We will use the sibling positions to generate the "path" of a Merkle proof.
|
||||
pub fn family_branch(pos: u64, last_pos: u64) -> Vec<(u64, u64, bool)> {
|
||||
// loop going up the tree, from node to parent, as long as we stay inside
|
||||
// the tree (as defined by last_pos).
|
||||
let mut branch = vec![];
|
||||
let mut current = pos;
|
||||
while current + 1 <= last_pos {
|
||||
let (parent, sibling, sibling_branch) = family(current);
|
||||
if parent > last_pos {
|
||||
break;
|
||||
}
|
||||
branch.push((parent, sibling, sibling_branch));
|
||||
|
||||
current = parent;
|
||||
}
|
||||
branch
|
||||
}
|
||||
|
||||
/// Calculates the position of the top-left child of a parent node in the
|
||||
|
@ -731,6 +913,82 @@ mod test {
|
|||
use core::{Writer, Reader};
|
||||
use core::hash::{Hash};
|
||||
|
||||
/// Simple MMR backend implementation based on a Vector. Pruning does not
|
||||
/// compact the Vec itself.
|
||||
#[derive(Clone)]
|
||||
pub struct VecBackend<T>
|
||||
where T:PMMRable {
|
||||
/// Backend elements
|
||||
pub elems: Vec<Option<(Hash, Option<T>)>>,
|
||||
/// Positions of removed elements
|
||||
pub remove_list: Vec<u64>,
|
||||
}
|
||||
|
||||
impl <T> Backend <T> for VecBackend<T>
|
||||
where T: PMMRable
|
||||
{
|
||||
fn append(&mut self, _position: u64, data: Vec<(Hash, Option<T>)>) -> Result<(), String> {
|
||||
self.elems.append(&mut map_vec!(data, |d| Some(d.clone())));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, position: u64, _include_data: bool) -> Option<(Hash, Option<T>)> {
|
||||
if self.remove_list.contains(&position) {
|
||||
None
|
||||
} else {
|
||||
self.elems[(position - 1) as usize].clone()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
||||
if let Some(ref x) = self.elems[(position - 1) as usize] {
|
||||
Some(x.0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, positions: Vec<u64>, _index: u32) -> Result<(), String> {
|
||||
for n in positions {
|
||||
self.remove_list.push(n)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rewind(&mut self, position: u64, _index: u32) -> Result<(), String> {
|
||||
self.elems = self.elems[0..(position as usize) + 1].to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_data_file_path(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl <T> VecBackend <T>
|
||||
where T:PMMRable
|
||||
{
|
||||
/// Instantiates a new VecBackend<T>
|
||||
pub fn new() -> VecBackend<T> {
|
||||
VecBackend {
|
||||
elems: vec![],
|
||||
remove_list: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Current number of elements in the underlying Vec.
|
||||
pub fn used_size(&self) -> usize {
|
||||
let mut usz = self.elems.len();
|
||||
for (idx, _) in self.elems.iter().enumerate() {
|
||||
let idx = idx as u64;
|
||||
if self.remove_list.contains(&idx) {
|
||||
usz -= 1;
|
||||
}
|
||||
}
|
||||
usz
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_leaf_index(){
|
||||
assert_eq!(n_leaves(1),1);
|
||||
|
@ -739,7 +997,7 @@ mod test {
|
|||
assert_eq!(n_leaves(5),4);
|
||||
assert_eq!(n_leaves(8),5);
|
||||
assert_eq!(n_leaves(9),6);
|
||||
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -764,7 +1022,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn first_50_mmr_heights() {
|
||||
fn first_100_mmr_heights() {
|
||||
let first_100_str = "0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 4 \
|
||||
0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 4 5 \
|
||||
0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3 4 0 0 1 0 0";
|
||||
|
@ -782,19 +1040,130 @@ mod test {
|
|||
}
|
||||
}
|
||||
|
||||
// Trst our n_leaves impl does the right thing for various MMR sizes
|
||||
#[test]
|
||||
fn various_n_leaves() {
|
||||
assert_eq!(n_leaves(1), 1);
|
||||
// 2 is not a valid size for a tree, but n_leaves rounds up to next valid tree size
|
||||
assert_eq!(n_leaves(2), 2);
|
||||
assert_eq!(n_leaves(3), 2);
|
||||
assert_eq!(n_leaves(7), 4);
|
||||
}
|
||||
|
||||
/// Find parent and sibling positions for various node positions.
|
||||
#[test]
|
||||
fn various_families() {
|
||||
// 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3
|
||||
assert_eq!(family(1), (3, 2, true));
|
||||
assert_eq!(family(2), (3, 1, false));
|
||||
assert_eq!(family(3), (7, 6, true));
|
||||
assert_eq!(family(4), (6, 5, true));
|
||||
assert_eq!(family(5), (6, 4, false));
|
||||
assert_eq!(family(6), (7, 3, false));
|
||||
assert_eq!(family(7), (15, 14, true));
|
||||
assert_eq!(family(1_000), (1_001, 997, false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn various_branches() {
|
||||
// the two leaf nodes in a 3 node tree (height 1)
|
||||
assert_eq!(family_branch(1, 3), [(3, 2, true)]);
|
||||
assert_eq!(family_branch(2, 3), [(3, 1, false)]);
|
||||
|
||||
// the root node in a 3 node tree
|
||||
assert_eq!(family_branch(3, 3), []);
|
||||
|
||||
// leaf node in a larger tree of 7 nodes (height 2)
|
||||
assert_eq!(family_branch(1, 7), [(3, 2, true), (7, 6, true)]);
|
||||
|
||||
// note these only go as far up as the local peak, not necessarily the single root
|
||||
assert_eq!(family_branch(1, 4), [(3, 2, true)]);
|
||||
// pos 4 in a tree of size 4 is a local peak
|
||||
assert_eq!(family_branch(4, 4), []);
|
||||
// pos 4 in a tree of size 5 is also still a local peak
|
||||
assert_eq!(family_branch(4, 5), []);
|
||||
// pos 4 in a tree of size 6 has a parent and a sibling
|
||||
assert_eq!(family_branch(4, 6), [(6, 5, true)]);
|
||||
// a tree of size 7 is all under a single root
|
||||
assert_eq!(family_branch(4, 7), [(6, 5, true), (7, 3, false)]);
|
||||
|
||||
// ok now for a more realistic one, a tree with over a million nodes in it
|
||||
// find the "family path" back up the tree from a leaf node at 0
|
||||
// Note: the first two entries in the branch are consistent with a small 7 node tree
|
||||
// Note: each sibling is on the left branch, this is an example of the largest possible
|
||||
// list of peaks before we start combining them into larger peaks.
|
||||
assert_eq!(
|
||||
family_branch(1, 1_049_000),
|
||||
[
|
||||
(3, 2, true),
|
||||
(7, 6, true),
|
||||
(15, 14, true),
|
||||
(31, 30, true),
|
||||
(63, 62, true),
|
||||
(127, 126, true),
|
||||
(255, 254, true),
|
||||
(511, 510, true),
|
||||
(1023, 1022, true),
|
||||
(2047, 2046, true),
|
||||
(4095, 4094, true),
|
||||
(8191, 8190, true),
|
||||
(16383, 16382, true),
|
||||
(32767, 32766, true),
|
||||
(65535, 65534, true),
|
||||
(131071, 131070, true),
|
||||
(262143, 262142, true),
|
||||
(524287, 524286, true),
|
||||
(1048575, 1048574, true),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn some_peaks() {
|
||||
// 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3
|
||||
let empty: Vec<u64> = vec![];
|
||||
assert_eq!(peaks(1), vec![1]);
|
||||
assert_eq!(peaks(1), [1]);
|
||||
assert_eq!(peaks(2), empty);
|
||||
assert_eq!(peaks(3), vec![3]);
|
||||
assert_eq!(peaks(4), vec![3, 4]);
|
||||
assert_eq!(peaks(11), vec![7, 10, 11]);
|
||||
assert_eq!(peaks(22), vec![15, 22]);
|
||||
assert_eq!(peaks(32), vec![31, 32]);
|
||||
assert_eq!(peaks(35), vec![31, 34, 35]);
|
||||
assert_eq!(peaks(42), vec![31, 38, 41, 42]);
|
||||
assert_eq!(peaks(3), [3]);
|
||||
assert_eq!(peaks(4), [3, 4]);
|
||||
assert_eq!(peaks(5), empty);
|
||||
assert_eq!(peaks(6), empty);
|
||||
assert_eq!(peaks(7), [7]);
|
||||
assert_eq!(peaks(8), [7, 8]);
|
||||
assert_eq!(peaks(9), empty);
|
||||
assert_eq!(peaks(10), [7, 10]);
|
||||
assert_eq!(peaks(11), [7, 10, 11]);
|
||||
assert_eq!(peaks(22), [15, 22]);
|
||||
assert_eq!(peaks(32), [31, 32]);
|
||||
assert_eq!(peaks(35), [31, 34, 35]);
|
||||
assert_eq!(peaks(42), [31, 38, 41, 42]);
|
||||
|
||||
// large realistic example with almost 1.5 million nodes
|
||||
// note the distance between peaks decreases toward the right (trees get smaller)
|
||||
assert_eq!(
|
||||
peaks(1048555),
|
||||
[
|
||||
524287,
|
||||
786430,
|
||||
917501,
|
||||
983036,
|
||||
1015803,
|
||||
1032186,
|
||||
1040377,
|
||||
1044472,
|
||||
1046519,
|
||||
1047542,
|
||||
1048053,
|
||||
1048308,
|
||||
1048435,
|
||||
1048498,
|
||||
1048529,
|
||||
1048544,
|
||||
1048551,
|
||||
1048554,
|
||||
1048555,
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -828,6 +1197,97 @@ mod test {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_merkle_proof() {
|
||||
let proof = MerkleProof::empty();
|
||||
assert_eq!(proof.verify(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof() {
|
||||
// 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3
|
||||
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
|
||||
pmmr.push(TestElem([0, 0, 0, 1])).unwrap();
|
||||
assert_eq!(pmmr.last_pos, 1);
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
let root = pmmr.root();
|
||||
assert_eq!(proof.peaks, [root]);
|
||||
assert!(proof.path.is_empty());
|
||||
assert!(proof.left_right.is_empty());
|
||||
assert!(proof.verify());
|
||||
|
||||
// push two more elements into the PMMR
|
||||
pmmr.push(TestElem([0, 0, 0, 2])).unwrap();
|
||||
pmmr.push(TestElem([0, 0, 0, 3])).unwrap();
|
||||
assert_eq!(pmmr.last_pos, 4);
|
||||
|
||||
let proof1 = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof1.peaks.len(), 2);
|
||||
assert_eq!(proof1.path.len(), 1);
|
||||
assert_eq!(proof1.left_right, [true]);
|
||||
assert!(proof1.verify());
|
||||
|
||||
let proof2 = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof2.peaks.len(), 2);
|
||||
assert_eq!(proof2.path.len(), 1);
|
||||
assert_eq!(proof2.left_right, [false]);
|
||||
assert!(proof2.verify());
|
||||
|
||||
// check that we cannot generate a merkle proof for pos 3 (not a leaf node)
|
||||
assert_eq!(pmmr.merkle_proof(3).err(), Some(format!("not a leaf at pos 3")));
|
||||
|
||||
let proof4 = pmmr.merkle_proof(4).unwrap();
|
||||
assert_eq!(proof4.peaks.len(), 2);
|
||||
assert!(proof4.path.is_empty());
|
||||
assert!(proof4.left_right.is_empty());
|
||||
assert!(proof4.verify());
|
||||
|
||||
// now add a few more elements to the PMMR to build a larger merkle proof
|
||||
for x in 4..1000 {
|
||||
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
|
||||
}
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof.peaks.len(), 8);
|
||||
assert_eq!(proof.path.len(), 9);
|
||||
assert_eq!(proof.left_right.len(), 9);
|
||||
assert!(proof.verify());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof_prune_and_rewind() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
pmmr.push(TestElem([0, 0, 0, 1])).unwrap();
|
||||
pmmr.push(TestElem([0, 0, 0, 2])).unwrap();
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
|
||||
// now prune an element and check we can still generate
|
||||
// the correct Merkle proof for the other element (after sibling pruned)
|
||||
pmmr.prune(1, 1).unwrap();
|
||||
let proof_2 = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merkle_proof_ser_deser() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
for x in 0..15 {
|
||||
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
|
||||
}
|
||||
let proof = pmmr.merkle_proof(9).unwrap();
|
||||
assert!(proof.verify());
|
||||
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &proof).expect("serialization failed");
|
||||
let proof_2: MerkleProof = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn pmmr_push_root() {
|
||||
|
|
|
@ -24,12 +24,16 @@ use std::{error, fmt};
|
|||
use consensus;
|
||||
use consensus::VerifySortOrder;
|
||||
use core::Committed;
|
||||
use core::global;
|
||||
use core::BlockHeader;
|
||||
use core::hash::{Hash, Hashed, ZERO_HASH};
|
||||
use keychain::{Identifier, Keychain, BlindingFactor};
|
||||
use core::pmmr::MerkleProof;
|
||||
use keychain;
|
||||
use keychain::{Identifier, Keychain, BlindingFactor};
|
||||
use ser::{self, read_and_verify_sorted, PMMRable, Readable, Reader, Writeable, WriteableSorted, Writer, ser_vec};
|
||||
use std::io::Cursor;
|
||||
use util;
|
||||
use util::LOGGER;
|
||||
|
||||
/// The size of the blake2 hash of a switch commitment (256 bits)
|
||||
pub const SWITCH_COMMIT_HASH_SIZE: usize = 32;
|
||||
|
@ -90,10 +94,14 @@ pub enum Error {
|
|||
ConsensusError(consensus::Error),
|
||||
/// Error originating from an invalid lock-height
|
||||
LockHeight(u64),
|
||||
/// Error originating from an invalid switch commitment (coinbase lock_height related)
|
||||
/// Error originating from an invalid switch commitment
|
||||
SwitchCommitment,
|
||||
/// Range proof validation error
|
||||
RangeProof,
|
||||
/// Error originating from an invalid Merkle proof
|
||||
MerkleProof,
|
||||
/// Error originating from an input attempting to spend an immature coinbase output
|
||||
ImmatureCoinbase,
|
||||
}
|
||||
|
||||
impl error::Error for Error {
|
||||
|
@ -157,7 +165,7 @@ pub struct TxKernel {
|
|||
hashable_ord!(TxKernel);
|
||||
|
||||
/// TODO - no clean way to bridge core::hash::Hash and std::hash::Hash implementations?
|
||||
impl ::std::hash::Hash for Output {
|
||||
impl ::std::hash::Hash for TxKernel {
|
||||
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
|
@ -455,6 +463,8 @@ impl Transaction {
|
|||
}
|
||||
self.verify_sorted()?;
|
||||
|
||||
self.verify_inputs()?;
|
||||
|
||||
for out in &self.outputs {
|
||||
out.verify_proof()?;
|
||||
}
|
||||
|
@ -470,6 +480,26 @@ impl Transaction {
|
|||
self.kernels.verify_sort_order()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// We can verify the Merkle proof (for coinbase inputs) here in isolation.
|
||||
/// But we cannot check the following as we need data from the index and the PMMR.
|
||||
/// So we must be sure to check these at the appropriate point during block validation.
|
||||
/// * node is in the correct pos in the PMMR
|
||||
/// * block is the correct one (based on utxo_root from block_header via the index)
|
||||
fn verify_inputs(&self) -> Result<(), Error> {
|
||||
let coinbase_inputs = self.inputs
|
||||
.iter()
|
||||
.filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT));
|
||||
|
||||
for input in coinbase_inputs {
|
||||
let merkle_proof = input.merkle_proof();
|
||||
if !merkle_proof.verify() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A transaction input.
|
||||
|
@ -477,7 +507,7 @@ impl Transaction {
|
|||
/// Primarily a reference to an output being spent by the transaction.
|
||||
/// But also information required to verify coinbase maturity through
|
||||
/// the lock_height hashed in the switch_commit_hash.
|
||||
#[derive(Debug, Clone, Copy, Hash)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Input{
|
||||
/// The features of the output being spent.
|
||||
/// We will check maturity for coinbase output.
|
||||
|
@ -486,21 +516,38 @@ pub struct Input{
|
|||
pub commit: Commitment,
|
||||
/// The hash of the block the output originated from.
|
||||
/// Currently we only care about this for coinbase outputs.
|
||||
/// TODO - include the merkle proof here once we support these.
|
||||
pub out_block: Option<Hash>,
|
||||
pub block_hash: Option<Hash>,
|
||||
/// The Merkle Proof that shows the output being spent by this input
|
||||
/// existed and was unspent at the time of this block (proof of inclusion in utxo_root)
|
||||
pub merkle_proof: Option<MerkleProof>,
|
||||
}
|
||||
|
||||
hashable_ord!(Input);
|
||||
|
||||
/// TODO - no clean way to bridge core::hash::Hash and std::hash::Hash implementations?
|
||||
impl ::std::hash::Hash for Input {
|
||||
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
::std::hash::Hash::hash(&vec, state);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of Writeable for a transaction Input, defines how to write
|
||||
/// an Input as binary.
|
||||
impl Writeable for Input {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
writer.write_u8(self.features.bits())?;
|
||||
writer.write_fixed_bytes(&self.commit)?;
|
||||
self.commit.write(writer)?;
|
||||
|
||||
if self.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
writer.write_fixed_bytes(&self.out_block.unwrap_or(ZERO_HASH))?;
|
||||
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
||||
if self.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = &self.block_hash.unwrap_or(ZERO_HASH);
|
||||
let merkle_proof = self.merkle_proof();
|
||||
|
||||
writer.write_fixed_bytes(block_hash)?;
|
||||
merkle_proof.write(writer)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -517,17 +564,23 @@ impl Readable for Input {
|
|||
|
||||
let commit = Commitment::read(reader)?;
|
||||
|
||||
let out_block = if features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
Some(Hash::read(reader)?)
|
||||
if features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = Some(Hash::read(reader)?);
|
||||
let merkle_proof = Some(MerkleProof::read(reader)?);
|
||||
Ok(Input::new(
|
||||
features,
|
||||
commit,
|
||||
block_hash,
|
||||
merkle_proof,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Input::new(
|
||||
features,
|
||||
commit,
|
||||
out_block,
|
||||
))
|
||||
Ok(Input::new(
|
||||
features,
|
||||
commit,
|
||||
None,
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -536,24 +589,102 @@ impl Readable for Input {
|
|||
/// Input must also provide the original output features and the hash of the block
|
||||
/// the output originated from.
|
||||
impl Input {
|
||||
/// Build a new input from the data required to identify and verify an output beng spent.
|
||||
/// Build a new input from the data required to identify and verify an output being spent.
|
||||
pub fn new(
|
||||
features: OutputFeatures,
|
||||
commit: Commitment,
|
||||
out_block: Option<Hash>,
|
||||
block_hash: Option<Hash>,
|
||||
merkle_proof: Option<MerkleProof>,
|
||||
) -> Input {
|
||||
Input {
|
||||
features,
|
||||
commit,
|
||||
out_block,
|
||||
block_hash,
|
||||
merkle_proof,
|
||||
}
|
||||
}
|
||||
|
||||
/// The input commitment which _partially_ identifies the output being spent.
|
||||
/// In the presence of a fork we need additional info to uniquely identify the output.
|
||||
/// Specifically the block hash (so correctly calculate lock_height for coinbase outputs).
|
||||
/// Specifically the block hash (to correctly calculate lock_height for coinbase outputs).
|
||||
pub fn commitment(&self) -> Commitment {
|
||||
self.commit
|
||||
self.commit.clone()
|
||||
}
|
||||
|
||||
/// Convenience functon to return the (optional) block_hash for this input.
|
||||
/// Will return the "zero" hash if we do not have one.
|
||||
pub fn block_hash(&self) -> Hash {
|
||||
let block_hash = self.block_hash.clone();
|
||||
block_hash.unwrap_or(Hash::zero())
|
||||
}
|
||||
|
||||
/// Convenience function to return the (optional) merkle_proof for this input.
|
||||
/// Will return the "empty" Merkle proof if we do not have one.
|
||||
/// We currently only care about the Merkle proof for inputs spending coinbase outputs.
|
||||
pub fn merkle_proof(&self) -> MerkleProof {
|
||||
let merkle_proof = self.merkle_proof.clone();
|
||||
merkle_proof.unwrap_or(MerkleProof::empty())
|
||||
}
|
||||
|
||||
/// Verify the maturity of an output being spent by an input.
|
||||
/// Only relevant for spending coinbase outputs currently (locked for 1,000 confirmations).
|
||||
///
|
||||
/// The proof associates the output with the root by its hash (and pos) in the MMR.
|
||||
/// The proof shows the output existed and was unspent at the time the utxo_root was built.
|
||||
/// The root associates the proof with a specific block header with that utxo_root.
|
||||
/// So the proof shows the output was unspent at the time of the block
|
||||
/// and is at least as old as that block (may be older).
|
||||
///
|
||||
/// We can verify maturity of the output being spent by -
|
||||
///
|
||||
/// * verifying the Merkle Proof produces the correct root for the given hash (from MMR)
|
||||
/// * verifying the root matches the utxo_root in the block_header
|
||||
/// * verifying the hash matches the node hash in the Merkle Proof
|
||||
/// * finally verify maturity rules based on height of the block header
|
||||
///
|
||||
pub fn verify_maturity(
|
||||
&self,
|
||||
hash: Hash,
|
||||
header: &BlockHeader,
|
||||
height: u64,
|
||||
) -> Result<(), Error> {
|
||||
if self.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = self.block_hash();
|
||||
let merkle_proof = self.merkle_proof();
|
||||
|
||||
// Check we are dealing with the correct block header
|
||||
if block_hash != header.hash() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Is our Merkle Proof valid? Does node hash up consistently to the root?
|
||||
if !merkle_proof.verify() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Is the root the correct root for the given block header?
|
||||
if merkle_proof.root != header.utxo_root {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Does the hash from the MMR actually match the one in the Merkle Proof?
|
||||
if merkle_proof.node != hash {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Finally has the output matured sufficiently now we know the block?
|
||||
let lock_height = header.height + global::coinbase_maturity();
|
||||
if lock_height > height {
|
||||
return Err(Error::ImmatureCoinbase);
|
||||
}
|
||||
debug!(
|
||||
LOGGER,
|
||||
"input: verify_maturity: success, coinbase maturity via Merkle proof: {} vs. {}",
|
||||
lock_height,
|
||||
height,
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -703,7 +834,7 @@ pub struct Output {
|
|||
hashable_ord!(Output);
|
||||
|
||||
/// TODO - no clean way to bridge core::hash::Hash and std::hash::Hash implementations?
|
||||
impl ::std::hash::Hash for TxKernel {
|
||||
impl ::std::hash::Hash for Output {
|
||||
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
|
@ -716,7 +847,7 @@ impl ::std::hash::Hash for TxKernel {
|
|||
impl Writeable for Output {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
writer.write_u8(self.features.bits())?;
|
||||
writer.write_fixed_bytes(&self.commit)?;
|
||||
self.commit.write(writer)?;
|
||||
// Hash of an output doesn't cover the switch commit, it should
|
||||
// be wound into the range proof separately
|
||||
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
||||
|
@ -792,7 +923,7 @@ impl Output {
|
|||
|
||||
}
|
||||
|
||||
/// An output_identifier can be build from either an input _or_ and output and
|
||||
/// An output_identifier can be build from either an input _or_ an output and
|
||||
/// contains everything we need to uniquely identify an output being spent.
|
||||
/// Needed because it is not sufficient to pass a commitment around.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
|
@ -873,25 +1004,25 @@ pub struct OutputStoreable {
|
|||
}
|
||||
|
||||
impl OutputStoreable {
|
||||
/// Build a StoreableOutput from an existing output.
|
||||
pub fn from_output(output: &Output) -> OutputStoreable {
|
||||
OutputStoreable {
|
||||
features: output.features,
|
||||
commit: output.commit,
|
||||
switch_commit_hash: output.switch_commit_hash,
|
||||
/// Build a StoreableOutput from an existing output.
|
||||
pub fn from_output(output: &Output) -> OutputStoreable {
|
||||
OutputStoreable {
|
||||
features: output.features,
|
||||
commit: output.commit,
|
||||
switch_commit_hash: output.switch_commit_hash,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a regular output
|
||||
pub fn to_output(self, rproof: RangeProof) -> Output {
|
||||
Output{
|
||||
features: self.features,
|
||||
commit: self.commit,
|
||||
switch_commit_hash: self.switch_commit_hash,
|
||||
proof: rproof,
|
||||
/// Return a regular output
|
||||
pub fn to_output(self, rproof: RangeProof) -> Output {
|
||||
Output{
|
||||
features: self.features,
|
||||
commit: self.commit,
|
||||
switch_commit_hash: self.switch_commit_hash,
|
||||
proof: rproof,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PMMRable for OutputStoreable {
|
||||
fn len() -> usize {
|
||||
|
@ -966,178 +1097,176 @@ impl ProofMessageElements {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use core::id::{ShortId, ShortIdentifiable};
|
||||
use keychain::Keychain;
|
||||
use util::secp;
|
||||
use super::*;
|
||||
use core::id::{ShortId, ShortIdentifiable};
|
||||
use keychain::Keychain;
|
||||
use util::secp;
|
||||
|
||||
#[test]
|
||||
fn test_kernel_ser_deser() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
#[test]
|
||||
fn test_kernel_ser_deser() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
|
||||
// just some bytes for testing ser/deser
|
||||
let sig = secp::Signature::from_raw_data(&[0;64]).unwrap();
|
||||
// just some bytes for testing ser/deser
|
||||
let sig = secp::Signature::from_raw_data(&[0;64]).unwrap();
|
||||
|
||||
let kernel = TxKernel {
|
||||
features: KernelFeatures::DEFAULT_KERNEL,
|
||||
lock_height: 0,
|
||||
excess: commit,
|
||||
excess_sig: sig.clone(),
|
||||
fee: 10,
|
||||
};
|
||||
let kernel = TxKernel {
|
||||
features: KernelFeatures::DEFAULT_KERNEL,
|
||||
lock_height: 0,
|
||||
excess: commit,
|
||||
excess_sig: sig.clone(),
|
||||
fee: 10,
|
||||
};
|
||||
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
|
||||
assert_eq!(kernel2.lock_height, 0);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
assert_eq!(kernel2.fee, 10);
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
|
||||
assert_eq!(kernel2.lock_height, 0);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
assert_eq!(kernel2.fee, 10);
|
||||
|
||||
// now check a kernel with lock_height serializes/deserializes correctly
|
||||
let kernel = TxKernel {
|
||||
features: KernelFeatures::DEFAULT_KERNEL,
|
||||
lock_height: 100,
|
||||
excess: commit,
|
||||
excess_sig: sig.clone(),
|
||||
fee: 10,
|
||||
};
|
||||
// now check a kernel with lock_height serializes/deserializes correctly
|
||||
let kernel = TxKernel {
|
||||
features: KernelFeatures::DEFAULT_KERNEL,
|
||||
lock_height: 100,
|
||||
excess: commit,
|
||||
excess_sig: sig.clone(),
|
||||
fee: 10,
|
||||
};
|
||||
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
|
||||
assert_eq!(kernel2.lock_height, 100);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
assert_eq!(kernel2.fee, 10);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_ser_deser() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
let switch_commit_hash = SwitchCommitHash::from_switch_commit(
|
||||
switch_commit,
|
||||
&keychain,
|
||||
&key_id,
|
||||
);
|
||||
let msg = secp::pedersen::ProofMessage::empty();
|
||||
let proof = keychain.range_proof(5, &key_id, commit, Some(switch_commit_hash.as_ref().to_vec()), msg).unwrap();
|
||||
|
||||
let out = Output {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
switch_commit_hash: switch_commit_hash,
|
||||
proof: proof,
|
||||
};
|
||||
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &out).expect("serialized failed");
|
||||
let dout: Output = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
|
||||
assert_eq!(dout.features, OutputFeatures::DEFAULT_OUTPUT);
|
||||
assert_eq!(dout.commit, out.commit);
|
||||
assert_eq!(dout.proof, out.proof);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_value_recovery() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let value = 1003;
|
||||
|
||||
let commit = keychain.commit(value, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
let switch_commit_hash = SwitchCommitHash::from_switch_commit(
|
||||
switch_commit,
|
||||
&keychain,
|
||||
&key_id,
|
||||
);
|
||||
let msg = (ProofMessageElements {
|
||||
value: value,
|
||||
}).to_proof_message();
|
||||
|
||||
let proof = keychain.range_proof(value, &key_id, commit, Some(switch_commit_hash.as_ref().to_vec()), msg).unwrap();
|
||||
|
||||
let output = Output {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
switch_commit_hash: switch_commit_hash,
|
||||
proof: proof,
|
||||
};
|
||||
|
||||
// check we can successfully recover the value with the original blinding factor
|
||||
let result = output.recover_value(&keychain, &key_id);
|
||||
// TODO: Remove this check once value recovery is supported within bullet proofs
|
||||
if let Some(v) = result {
|
||||
assert_eq!(v, 1003);
|
||||
} else {
|
||||
return;
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
assert_eq!(kernel2.features, KernelFeatures::DEFAULT_KERNEL);
|
||||
assert_eq!(kernel2.lock_height, 100);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
assert_eq!(kernel2.fee, 10);
|
||||
}
|
||||
|
||||
// Bulletproofs message unwind will just be gibberish given the wrong blinding factor
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn commit_consistency() {
|
||||
let keychain = Keychain::from_seed(&[0; 32]).unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
|
||||
let commit = keychain.commit(1003, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
println!("Switch commit: {:?}", switch_commit);
|
||||
println!("commit: {:?}", commit);
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
|
||||
let switch_commit_2 = keychain.switch_commit(&key_id).unwrap();
|
||||
let commit_2 = keychain.commit(1003, &key_id).unwrap();
|
||||
println!("Switch commit 2: {:?}", switch_commit_2);
|
||||
println!("commit2 : {:?}", commit_2);
|
||||
|
||||
assert!(commit == commit_2);
|
||||
assert!(switch_commit == switch_commit_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn input_short_id() {
|
||||
let keychain = Keychain::from_seed(&[0; 32]).unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
|
||||
let input = Input {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
out_block: None,
|
||||
};
|
||||
|
||||
let block_hash = Hash::from_hex(
|
||||
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
|
||||
).unwrap();
|
||||
|
||||
let nonce = 0;
|
||||
|
||||
let short_id = input.short_id(&block_hash, nonce);
|
||||
assert_eq!(short_id, ShortId::from_hex("28fea5a693af").unwrap());
|
||||
|
||||
// now generate the short_id for a *very* similar output (single feature flag different)
|
||||
// and check it generates a different short_id
|
||||
let input = Input {
|
||||
features: OutputFeatures::COINBASE_OUTPUT,
|
||||
commit: commit,
|
||||
out_block: None,
|
||||
};
|
||||
|
||||
let block_hash = Hash::from_hex(
|
||||
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
|
||||
).unwrap();
|
||||
|
||||
let short_id = input.short_id(&block_hash, nonce);
|
||||
assert_eq!(short_id, ShortId::from_hex("c8af83b54e46").unwrap());
|
||||
}
|
||||
#[test]
|
||||
fn test_output_ser_deser() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
let switch_commit_hash = SwitchCommitHash::from_switch_commit(
|
||||
switch_commit,
|
||||
&keychain,
|
||||
&key_id,
|
||||
);
|
||||
let msg = secp::pedersen::ProofMessage::empty();
|
||||
let proof = keychain.range_proof(5, &key_id, commit, Some(switch_commit_hash.as_ref().to_vec()), msg).unwrap();
|
||||
|
||||
let out = Output {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
switch_commit_hash: switch_commit_hash,
|
||||
proof: proof,
|
||||
};
|
||||
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, &out).expect("serialized failed");
|
||||
let dout: Output = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
|
||||
assert_eq!(dout.features, OutputFeatures::DEFAULT_OUTPUT);
|
||||
assert_eq!(dout.commit, out.commit);
|
||||
assert_eq!(dout.proof, out.proof);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_output_value_recovery() {
|
||||
let keychain = Keychain::from_random_seed().unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let value = 1003;
|
||||
|
||||
let commit = keychain.commit(value, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
let switch_commit_hash = SwitchCommitHash::from_switch_commit(
|
||||
switch_commit,
|
||||
&keychain,
|
||||
&key_id,
|
||||
);
|
||||
let msg = (ProofMessageElements {
|
||||
value: value,
|
||||
}).to_proof_message();
|
||||
|
||||
let proof = keychain.range_proof(value, &key_id, commit, Some(switch_commit_hash.as_ref().to_vec()), msg).unwrap();
|
||||
|
||||
let output = Output {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
switch_commit_hash: switch_commit_hash,
|
||||
proof: proof,
|
||||
};
|
||||
|
||||
// check we can successfully recover the value with the original blinding factor
|
||||
let result = output.recover_value(&keychain, &key_id);
|
||||
// TODO: Remove this check once value recovery is supported within bullet proofs
|
||||
if let Some(v) = result {
|
||||
assert_eq!(v, 1003);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
// Bulletproofs message unwind will just be gibberish given the wrong blinding factor
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn commit_consistency() {
|
||||
let keychain = Keychain::from_seed(&[0; 32]).unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
|
||||
let commit = keychain.commit(1003, &key_id).unwrap();
|
||||
let switch_commit = keychain.switch_commit(&key_id).unwrap();
|
||||
println!("Switch commit: {:?}", switch_commit);
|
||||
println!("commit: {:?}", commit);
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
|
||||
let switch_commit_2 = keychain.switch_commit(&key_id).unwrap();
|
||||
let commit_2 = keychain.commit(1003, &key_id).unwrap();
|
||||
println!("Switch commit 2: {:?}", switch_commit_2);
|
||||
println!("commit2 : {:?}", commit_2);
|
||||
|
||||
assert!(commit == commit_2);
|
||||
assert!(switch_commit == switch_commit_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn input_short_id() {
|
||||
let keychain = Keychain::from_seed(&[0; 32]).unwrap();
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let commit = keychain.commit(5, &key_id).unwrap();
|
||||
|
||||
let input = Input {
|
||||
features: OutputFeatures::DEFAULT_OUTPUT,
|
||||
commit: commit,
|
||||
block_hash: None,
|
||||
merkle_proof: None,
|
||||
};
|
||||
|
||||
let block_hash = Hash::from_hex(
|
||||
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
|
||||
).unwrap();
|
||||
|
||||
let nonce = 0;
|
||||
|
||||
let short_id = input.short_id(&block_hash, nonce);
|
||||
assert_eq!(short_id, ShortId::from_hex("28fea5a693af").unwrap());
|
||||
|
||||
// now generate the short_id for a *very* similar output (single feature flag different)
|
||||
// and check it generates a different short_id
|
||||
let input = Input {
|
||||
features: OutputFeatures::COINBASE_OUTPUT,
|
||||
commit: commit,
|
||||
block_hash: None,
|
||||
merkle_proof: None,
|
||||
};
|
||||
|
||||
let short_id = input.short_id(&block_hash, nonce);
|
||||
assert_eq!(short_id, ShortId::from_hex("2df325971ab0").unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -573,7 +573,7 @@ impl PoolToChainAdapter {
|
|||
}
|
||||
|
||||
impl pool::BlockChain for PoolToChainAdapter {
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), pool::PoolError> {
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<Hash, pool::PoolError> {
|
||||
wo(&self.chain)
|
||||
.is_unspent(output_ref)
|
||||
.map_err(|e| match e {
|
||||
|
|
|
@ -106,9 +106,9 @@ impl DummyChainImpl {
|
|||
}
|
||||
|
||||
impl BlockChain for DummyChainImpl {
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), PoolError> {
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<hash::Hash, PoolError> {
|
||||
match self.utxo.read().unwrap().get_output(&output_ref.commit) {
|
||||
Some(_) => Ok(()),
|
||||
Some(_) => Ok(hash::Hash::zero()),
|
||||
None => Err(PoolError::GenericPoolError),
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ impl BlockChain for DummyChainImpl {
|
|||
if !input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
return Ok(());
|
||||
}
|
||||
let block_hash = input.out_block.expect("requires a block hash");
|
||||
let block_hash = input.block_hash.expect("requires a block hash");
|
||||
let headers = self.block_headers.read().unwrap();
|
||||
if let Some(h) = headers
|
||||
.iter()
|
||||
|
@ -127,7 +127,7 @@ impl BlockChain for DummyChainImpl {
|
|||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(PoolError::ImmatureCoinbase)
|
||||
Err(PoolError::InvalidTx(transaction::Error::ImmatureCoinbase))
|
||||
}
|
||||
|
||||
fn head_header(&self) -> Result<block::BlockHeader, PoolError> {
|
||||
|
|
|
@ -320,11 +320,13 @@ mod tests {
|
|||
OutputFeatures::DEFAULT_OUTPUT,
|
||||
keychain.commit(50, &key_id2).unwrap(),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
core::transaction::Input::new(
|
||||
OutputFeatures::DEFAULT_OUTPUT,
|
||||
keychain.commit(25, &key_id3).unwrap(),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
];
|
||||
let msg = secp::pedersen::ProofMessage::empty();
|
||||
|
|
|
@ -186,7 +186,7 @@ where
|
|||
}
|
||||
|
||||
// Making sure the transaction is valid before anything else.
|
||||
tx.validate().map_err(|_e| PoolError::Invalid)?;
|
||||
tx.validate().map_err(|e| PoolError::InvalidTx(e))?;
|
||||
|
||||
// The first check involves ensuring that an identical transaction is
|
||||
// not already in the pool's transaction set.
|
||||
|
@ -646,9 +646,10 @@ mod tests {
|
|||
use blake2;
|
||||
use core::global::ChainTypes;
|
||||
use core::core::SwitchCommitHash;
|
||||
use core::core::hash::ZERO_HASH;
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::target::Difficulty;
|
||||
use types::PoolError::InvalidTx;
|
||||
|
||||
macro_rules! expect_output_parent {
|
||||
($pool:expr, $expected:pat, $( $output:expr ),+ ) => {
|
||||
|
@ -873,7 +874,7 @@ mod tests {
|
|||
);
|
||||
let result = write_pool.add_to_memory_pool(test_source(), txn);
|
||||
match result {
|
||||
Err(PoolError::ImmatureCoinbase) => {},
|
||||
Err(InvalidTx(transaction::Error::ImmatureCoinbase)) => {},
|
||||
_ => panic!("expected ImmatureCoinbase error here"),
|
||||
};
|
||||
|
||||
|
@ -1024,22 +1025,22 @@ mod tests {
|
|||
// invalidated (orphaned).
|
||||
let conflict_child = test_transaction(vec![12], vec![2]);
|
||||
// 7. A transaction that descends from transaction 2 that should be
|
||||
// valid due to its inputs being satisfied by the block.
|
||||
// valid due to its inputs being satisfied by the block.
|
||||
let conflict_valid_child = test_transaction(vec![6], vec![4]);
|
||||
// 8. A transaction that descends from transaction 3 that should be
|
||||
// invalidated due to an output conflict.
|
||||
// invalidated due to an output conflict.
|
||||
let valid_child_conflict = test_transaction(vec![13], vec![9]);
|
||||
// 9. A transaction that descends from transaction 3 that should remain
|
||||
// valid after reconciliation.
|
||||
// valid after reconciliation.
|
||||
let valid_child_valid = test_transaction(vec![15], vec![11]);
|
||||
// 10. A transaction that descends from both transaction 6 and
|
||||
// transaction 9
|
||||
// transaction 9
|
||||
let mixed_child = test_transaction(vec![2, 11], vec![7]);
|
||||
|
||||
// Add transactions.
|
||||
// Note: There are some ordering constraints that must be followed here
|
||||
// until orphans is 100% implemented. Once the orphans process has
|
||||
// stabilized, we can mix these up to exercise that path a bit.
|
||||
// Note: There are some ordering constraints that must be followed here
|
||||
// until orphans is 100% implemented. Once the orphans process has
|
||||
// stabilized, we can mix these up to exercise that path a bit.
|
||||
let mut txs_to_add = vec![
|
||||
block_transaction,
|
||||
conflict_transaction,
|
||||
|
@ -1056,7 +1057,7 @@ mod tests {
|
|||
let expected_pool_size = txs_to_add.len();
|
||||
|
||||
// First we add the above transactions to the pool; all should be
|
||||
// accepted.
|
||||
// accepted.
|
||||
{
|
||||
let mut write_pool = pool.write().unwrap();
|
||||
assert_eq!(write_pool.total_size(), 0);
|
||||
|
@ -1068,8 +1069,8 @@ mod tests {
|
|||
assert_eq!(write_pool.total_size(), expected_pool_size);
|
||||
}
|
||||
// Now we prepare the block that will cause the above condition.
|
||||
// First, the transactions we want in the block:
|
||||
// - Copy of 1
|
||||
// First, the transactions we want in the block:
|
||||
// - Copy of 1
|
||||
let block_tx_1 = test_transaction(vec![10], vec![8]);
|
||||
// - Conflict w/ 2, satisfies 7
|
||||
let block_tx_2 = test_transaction(vec![20], vec![6]);
|
||||
|
@ -1103,7 +1104,7 @@ mod tests {
|
|||
assert_eq!(evicted_transactions.unwrap().len(), 6);
|
||||
|
||||
// TODO: Txids are not yet deterministic. When they are, we should
|
||||
// check the specific transactions that were evicted.
|
||||
// check the specific transactions that were evicted.
|
||||
}
|
||||
|
||||
|
||||
|
@ -1204,8 +1205,8 @@ mod tests {
|
|||
txs = read_pool.prepare_mineable_transactions(3);
|
||||
assert_eq!(txs.len(), 3);
|
||||
// TODO: This is ugly, either make block::new take owned
|
||||
// txs instead of mut refs, or change
|
||||
// prepare_mineable_transactions to return mut refs
|
||||
// txs instead of mut refs, or change
|
||||
// prepare_mineable_transactions to return mut refs
|
||||
let block_txs: Vec<transaction::Transaction> = txs.drain(..).map(|x| *x).collect();
|
||||
let tx_refs = block_txs.iter().collect();
|
||||
|
||||
|
@ -1276,7 +1277,7 @@ mod tests {
|
|||
|
||||
for input_value in input_values {
|
||||
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
|
||||
tx_elements.push(build::input(input_value, ZERO_HASH, key_id));
|
||||
tx_elements.push(build::input(input_value, key_id));
|
||||
}
|
||||
|
||||
for output_value in output_values {
|
||||
|
@ -1304,9 +1305,22 @@ mod tests {
|
|||
|
||||
let mut tx_elements = Vec::new();
|
||||
|
||||
// for input_value in input_values {
|
||||
let merkle_proof = MerkleProof {
|
||||
node: Hash::zero(),
|
||||
root: Hash::zero(),
|
||||
peaks: vec![Hash::zero()],
|
||||
.. MerkleProof::default()
|
||||
};
|
||||
|
||||
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
|
||||
tx_elements.push(build::coinbase_input(input_value, input_block_hash, key_id));
|
||||
tx_elements.push(
|
||||
build::coinbase_input(
|
||||
input_value,
|
||||
input_block_hash,
|
||||
merkle_proof,
|
||||
key_id,
|
||||
),
|
||||
);
|
||||
|
||||
for output_value in output_values {
|
||||
let key_id = keychain.derive_key_id(output_value as u32).unwrap();
|
||||
|
@ -1334,7 +1348,7 @@ mod tests {
|
|||
|
||||
for input_value in input_values {
|
||||
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
|
||||
tx_elements.push(build::input(input_value, ZERO_HASH, key_id));
|
||||
tx_elements.push(build::input(input_value, key_id));
|
||||
}
|
||||
|
||||
for output_value in output_values {
|
||||
|
|
|
@ -103,8 +103,8 @@ impl fmt::Debug for Parent {
|
|||
/// Enum of errors
|
||||
#[derive(Debug)]
|
||||
pub enum PoolError {
|
||||
/// An invalid pool entry
|
||||
Invalid,
|
||||
/// An invalid pool entry caused by underlying tx validation error
|
||||
InvalidTx(transaction::Error),
|
||||
/// An entry already in the pool
|
||||
AlreadyInPool,
|
||||
/// A duplicate output
|
||||
|
@ -123,9 +123,6 @@ pub enum PoolError {
|
|||
/// The spent output
|
||||
spent_output: Commitment,
|
||||
},
|
||||
/// Attempt to spend an output before it matures
|
||||
/// lock_height must not exceed current block height
|
||||
ImmatureCoinbase,
|
||||
/// Attempt to add a transaction to the pool with lock_height
|
||||
/// greater than height of current block
|
||||
ImmatureTransaction {
|
||||
|
@ -155,7 +152,7 @@ pub trait BlockChain {
|
|||
/// orphans, etc.
|
||||
/// We do not maintain outputs themselves. The only information we have is the
|
||||
/// hash from the output MMR.
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<(), PoolError>;
|
||||
fn is_unspent(&self, output_ref: &OutputIdentifier) -> Result<hash::Hash, PoolError>;
|
||||
|
||||
/// Check if an output being spent by the input has sufficiently matured.
|
||||
/// This is only applicable for coinbase outputs (1,000 blocks).
|
||||
|
|
|
@ -69,13 +69,7 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a Hash by insertion position
|
||||
fn get(&self, position: u64, include_data:bool) -> Option<(Hash, Option<T>)> {
|
||||
// Check if this position has been pruned in the remove log or the
|
||||
// pruned list
|
||||
if self.rm_log.includes(position) {
|
||||
return None;
|
||||
}
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
||||
let shift = self.pruned_nodes.get_shift(position);
|
||||
if let None = shift {
|
||||
return None;
|
||||
|
@ -89,8 +83,8 @@ where
|
|||
let hash_record_len = 32;
|
||||
let file_offset = ((pos - shift.unwrap()) as usize) * hash_record_len;
|
||||
let data = self.hash_file.read(file_offset, hash_record_len);
|
||||
let hash_val = match ser::deserialize(&mut &data[..]) {
|
||||
Ok(h) => h,
|
||||
match ser::deserialize(&mut &data[..]) {
|
||||
Ok(h) => Some(h),
|
||||
Err(e) => {
|
||||
error!(
|
||||
LOGGER,
|
||||
|
@ -99,10 +93,26 @@ where
|
|||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a Hash by insertion position
|
||||
fn get(&self, position: u64, include_data: bool) -> Option<(Hash, Option<T>)> {
|
||||
// Check if this position has been pruned in the remove log or the
|
||||
// pruned list
|
||||
if self.rm_log.includes(position) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let hash_val = self.get_from_file(position);
|
||||
|
||||
// TODO - clean this up
|
||||
if !include_data {
|
||||
return Some(((hash_val), None));
|
||||
if let Some(hash) = hash_val {
|
||||
return Some((hash, None));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally read flatfile storage to get data element
|
||||
|
@ -123,7 +133,12 @@ where
|
|||
}
|
||||
};
|
||||
|
||||
Some((hash_val, data))
|
||||
// TODO - clean this up
|
||||
if let Some(hash) = hash_val {
|
||||
return Some((hash, data));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> {
|
||||
|
@ -338,5 +353,3 @@ where
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2017 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -20,7 +20,6 @@ use std::collections::HashMap;
|
|||
use failure::{ResultExt};
|
||||
|
||||
use api;
|
||||
use core::core::hash::Hash;
|
||||
use types::*;
|
||||
use keychain::{Identifier, Keychain};
|
||||
use util::secp::pedersen;
|
||||
|
@ -65,7 +64,7 @@ fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> R
|
|||
.values()
|
||||
.filter(|x| {
|
||||
x.root_key_id == keychain.root_key_id() &&
|
||||
x.block.hash() == Hash::zero() &&
|
||||
x.block.is_none() &&
|
||||
x.status == OutputStatus::Unspent
|
||||
})
|
||||
{
|
||||
|
@ -113,11 +112,16 @@ fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> R
|
|||
debug!(LOGGER, "{:?}", url);
|
||||
|
||||
let mut api_blocks: HashMap<pedersen::Commitment, api::BlockHeaderInfo> = HashMap::new();
|
||||
let mut api_merkle_proofs: HashMap<pedersen::Commitment, MerkleProofWrapper> = HashMap::new();
|
||||
match api::client::get::<Vec<api::BlockOutputs>>(url.as_str()) {
|
||||
Ok(blocks) => {
|
||||
for block in blocks {
|
||||
for out in block.outputs {
|
||||
api_blocks.insert(out.commit, block.header.clone());
|
||||
if let Some(merkle_proof) = out.merkle_proof {
|
||||
let wrapper = MerkleProofWrapper(merkle_proof);
|
||||
api_merkle_proofs.insert(out.commit, wrapper);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -138,8 +142,11 @@ fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> R
|
|||
if let Entry::Occupied(mut output) = wallet_data.outputs.entry(id.to_hex()) {
|
||||
if let Some(b) = api_blocks.get(&commit) {
|
||||
let output = output.get_mut();
|
||||
output.block = BlockIdentifier::from_str(&b.hash).unwrap();
|
||||
output.block = Some(BlockIdentifier::from_hex(&b.hash).unwrap());
|
||||
output.height = b.height;
|
||||
if let Some(merkle_proof) = api_merkle_proofs.get(&commit) {
|
||||
output.merkle_proof = Some(merkle_proof.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -98,7 +98,8 @@ fn handle_sender_initiation(
|
|||
height: 0,
|
||||
lock_height: 0,
|
||||
is_coinbase: false,
|
||||
block: BlockIdentifier::zero(),
|
||||
block: None,
|
||||
merkle_proof: None,
|
||||
});
|
||||
|
||||
key_id
|
||||
|
@ -322,7 +323,8 @@ pub fn receive_coinbase(
|
|||
height: height,
|
||||
lock_height: lock_height,
|
||||
is_coinbase: true,
|
||||
block: BlockIdentifier::zero(),
|
||||
block: None,
|
||||
merkle_proof: None,
|
||||
});
|
||||
|
||||
(key_id, derivation)
|
||||
|
@ -404,7 +406,8 @@ fn build_final_transaction(
|
|||
height: 0,
|
||||
lock_height: 0,
|
||||
is_coinbase: false,
|
||||
block: BlockIdentifier::zero(),
|
||||
block: None,
|
||||
merkle_proof: None,
|
||||
});
|
||||
|
||||
(key_id, derivation)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2017 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -19,7 +19,7 @@ use api;
|
|||
use core::global;
|
||||
use core::core::{Output, SwitchCommitHash};
|
||||
use core::core::transaction::OutputFeatures;
|
||||
use types::{BlockIdentifier, WalletConfig, WalletData, OutputData, OutputStatus, Error, ErrorKind};
|
||||
use types::{WalletConfig, WalletData, OutputData, OutputStatus, Error, ErrorKind};
|
||||
use byteorder::{BigEndian, ByteOrder};
|
||||
|
||||
|
||||
|
@ -315,7 +315,8 @@ pub fn restore(
|
|||
height: output.4,
|
||||
lock_height: output.5,
|
||||
is_coinbase: output.6,
|
||||
block: BlockIdentifier::zero(),
|
||||
block: None,
|
||||
merkle_proof: None,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|
|
@ -331,9 +331,18 @@ fn inputs_and_change(
|
|||
for coin in coins {
|
||||
let key_id = keychain.derive_key_id(coin.n_child).context(ErrorKind::Keychain)?;
|
||||
if coin.is_coinbase {
|
||||
parts.push(build::coinbase_input(coin.value, coin.block.hash(), key_id));
|
||||
let block = coin.block.clone();
|
||||
let merkle_proof = coin.merkle_proof.clone();
|
||||
let merkle_proof = merkle_proof.unwrap().merkle_proof();
|
||||
|
||||
parts.push(build::coinbase_input(
|
||||
coin.value,
|
||||
block.unwrap().hash(),
|
||||
merkle_proof,
|
||||
key_id,
|
||||
));
|
||||
} else {
|
||||
parts.push(build::input(coin.value, coin.block.hash(), key_id));
|
||||
parts.push(build::input(coin.value, key_id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,7 +361,8 @@ fn inputs_and_change(
|
|||
height: 0,
|
||||
lock_height: 0,
|
||||
is_coinbase: false,
|
||||
block: BlockIdentifier::zero(),
|
||||
block: None,
|
||||
merkle_proof: None,
|
||||
});
|
||||
|
||||
change_key
|
||||
|
@ -366,7 +376,6 @@ fn inputs_and_change(
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use core::core::build;
|
||||
use core::core::hash::ZERO_HASH;
|
||||
use keychain::Keychain;
|
||||
|
||||
|
||||
|
@ -378,7 +387,7 @@ mod test {
|
|||
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||
|
||||
let tx1 = build::transaction(vec![build::output(105, key_id1.clone())], &keychain).unwrap();
|
||||
let tx2 = build::transaction(vec![build::input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
|
||||
let tx2 = build::transaction(vec![build::input(105, key_id1.clone())], &keychain).unwrap();
|
||||
|
||||
assert_eq!(tx1.outputs[0].features, tx2.inputs[0].features);
|
||||
assert_eq!(tx1.outputs[0].commitment(), tx2.inputs[0].commitment());
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2017 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -36,6 +36,7 @@ use failure::{Backtrace, Context, Fail, ResultExt};
|
|||
use core::consensus;
|
||||
use core::core::Transaction;
|
||||
use core::core::hash::Hash;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::ser;
|
||||
use keychain;
|
||||
use keychain::BlindingFactor;
|
||||
|
@ -98,7 +99,7 @@ pub enum ErrorKind {
|
|||
#[fail(display = "JSON format error")]
|
||||
Format,
|
||||
|
||||
|
||||
|
||||
#[fail(display = "I/O error")]
|
||||
IO,
|
||||
|
||||
|
@ -124,7 +125,6 @@ pub enum ErrorKind {
|
|||
/// Wallet seed already exists
|
||||
#[fail(display = "Wallet seed exists error")]
|
||||
WalletSeedExists,
|
||||
|
||||
|
||||
#[fail(display = "Generic error: {}", _0)]
|
||||
GenericError(&'static str),
|
||||
|
@ -226,6 +226,52 @@ impl fmt::Display for OutputStatus {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord)]
|
||||
pub struct MerkleProofWrapper(pub MerkleProof);
|
||||
|
||||
impl MerkleProofWrapper {
|
||||
pub fn merkle_proof(&self) -> MerkleProof {
|
||||
self.0.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::Serialize for MerkleProofWrapper {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::ser::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.0.to_hex())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::de::Deserialize<'de> for MerkleProofWrapper {
|
||||
fn deserialize<D>(deserializer: D) -> Result<MerkleProofWrapper, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_str(MerkleProofWrapperVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct MerkleProofWrapperVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for MerkleProofWrapperVisitor {
|
||||
type Value = MerkleProofWrapper;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("a merkle proof")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let merkle_proof = MerkleProof::from_hex(s).unwrap();
|
||||
Ok(MerkleProofWrapper(merkle_proof))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord)]
|
||||
pub struct BlockIdentifier(Hash);
|
||||
|
||||
|
@ -234,14 +280,10 @@ impl BlockIdentifier {
|
|||
self.0
|
||||
}
|
||||
|
||||
pub fn from_str(hex: &str) -> Result<BlockIdentifier, Error> {
|
||||
pub fn from_hex(hex: &str) -> Result<BlockIdentifier, Error> {
|
||||
let hash = Hash::from_hex(hex).context(ErrorKind::GenericError("Invalid hex"))?;
|
||||
Ok(BlockIdentifier(hash))
|
||||
}
|
||||
|
||||
pub fn zero() -> BlockIdentifier {
|
||||
BlockIdentifier(Hash::zero())
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::ser::Serialize for BlockIdentifier {
|
||||
|
@ -302,7 +344,8 @@ pub struct OutputData {
|
|||
/// Is this a coinbase output? Is it subject to coinbase locktime?
|
||||
pub is_coinbase: bool,
|
||||
/// Hash of the block this output originated from.
|
||||
pub block: BlockIdentifier,
|
||||
pub block: Option<BlockIdentifier>,
|
||||
pub merkle_proof: Option<MerkleProofWrapper>,
|
||||
}
|
||||
|
||||
impl OutputData {
|
||||
|
@ -529,8 +572,8 @@ impl WalletData {
|
|||
fn read_outputs(data_file_path: &str) -> Result<Vec<OutputData>, Error> {
|
||||
let data_file = File::open(data_file_path).context(ErrorKind::WalletData(&"Could not open wallet file"))?;
|
||||
serde_json::from_reader(data_file).map_err(|e| { e.context(ErrorKind::WalletData(&"Error reading wallet file ")).into()})
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
/// Populate wallet_data with output_data from disk.
|
||||
|
|
Loading…
Reference in a new issue