mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
[consensus breaking] New Improved Merkle Proofs (#1119)
* new improved Merkle proofs * fix pool and chain tests * fixup core tests after the merge
This commit is contained in:
parent
70ba1c838c
commit
0ff6763ee6
23 changed files with 632 additions and 541 deletions
|
@ -16,7 +16,7 @@ use std::sync::Arc;
|
|||
|
||||
use chain;
|
||||
use core::core::hash::Hashed;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::{core, ser};
|
||||
use p2p;
|
||||
use serde;
|
||||
|
|
|
@ -21,7 +21,7 @@ use std::sync::{Arc, Mutex, RwLock};
|
|||
use std::time::{Duration, Instant};
|
||||
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::target::Difficulty;
|
||||
use core::core::Committed;
|
||||
use core::core::{Block, BlockHeader, Output, OutputIdentifier, Transaction, TxKernel};
|
||||
|
|
|
@ -28,7 +28,9 @@ use util::secp::pedersen::{Commitment, RangeProof};
|
|||
|
||||
use core::core::committed::Committed;
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::core::pmmr::{self, MerkleProof, PMMR};
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::pmmr;
|
||||
use core::core::pmmr::PMMR;
|
||||
use core::core::{Block, BlockHeader, Input, Output, OutputFeatures, OutputIdentifier, Transaction,
|
||||
TxKernel};
|
||||
use core::global;
|
||||
|
@ -539,17 +541,36 @@ impl<'a> Extension<'a> {
|
|||
inputs: &Vec<Input>,
|
||||
height: u64,
|
||||
) -> Result<(), Error> {
|
||||
for x in inputs {
|
||||
if x.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let header = self.commit_index.get_block_header(&x.block_hash())?;
|
||||
let pos = self.get_output_pos(&x.commitment())?;
|
||||
let out_hash = self.output_pmmr.get_hash(pos).ok_or(Error::OutputNotFound)?;
|
||||
x.verify_maturity(out_hash, &header, height)?;
|
||||
for input in inputs {
|
||||
if input.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
self.verify_maturity_via_merkle_proof(input, height)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn verify_maturity_via_merkle_proof(&self, input: &Input, height: u64) -> Result<(), Error> {
|
||||
let header = self.commit_index.get_block_header(&input.block_hash())?;
|
||||
|
||||
// Check that the height indicates it has matured sufficiently
|
||||
// we will check the Merkle proof below to ensure we are being
|
||||
// honest about the height
|
||||
if header.height + global::coinbase_maturity() >= height {
|
||||
return Err(Error::ImmatureCoinbase);
|
||||
}
|
||||
|
||||
// We need the MMR pos to verify the Merkle proof
|
||||
let pos = self.get_output_pos(&input.commitment())?;
|
||||
|
||||
let out_id = OutputIdentifier::from_input(input);
|
||||
let res = input
|
||||
.merkle_proof()
|
||||
.verify(header.output_root, &out_id, pos)
|
||||
.map_err(|_| Error::MerkleProof)?;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Apply a new set of blocks on top the existing sum trees. Blocks are
|
||||
/// applied in order of the provided Vec. If pruning is enabled, inputs also
|
||||
/// prune MMR data.
|
||||
|
|
|
@ -85,6 +85,10 @@ pub enum Error {
|
|||
AlreadySpent(Commitment),
|
||||
/// An output with that commitment already exists (should be unique)
|
||||
DuplicateCommitment(Commitment),
|
||||
/// Attempt to spend a coinbase output before it sufficiently matures.
|
||||
ImmatureCoinbase,
|
||||
/// Error validating a Merkle proof (coinbase output)
|
||||
MerkleProof,
|
||||
/// output not found
|
||||
OutputNotFound,
|
||||
/// output spent
|
||||
|
|
|
@ -123,7 +123,7 @@ fn test_coinbase_maturity() {
|
|||
// Confirm the tx attempting to spend the coinbase output
|
||||
// is not valid at the current block height given the current chain state.
|
||||
match chain.verify_coinbase_maturity(&coinbase_txn) {
|
||||
Err(Error::Transaction(transaction::Error::ImmatureCoinbase)) => {}
|
||||
Err(Error::ImmatureCoinbase) => {}
|
||||
_ => panic!("Expected transaction error with immature coinbase."),
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ use chain::store::ChainKVStore;
|
|||
use chain::txhashset::{self, TxHashSet};
|
||||
use chain::types::Tip;
|
||||
use chain::ChainStore;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::target::Difficulty;
|
||||
use core::core::{Block, BlockHeader};
|
||||
use keychain::{ExtKeychain, Keychain};
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
|
||||
//! Public types for config modules
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use servers::{ServerConfig, StratumServerConfig};
|
||||
use util::LoggingConfig;
|
||||
|
|
|
@ -52,15 +52,6 @@ pub enum Error {
|
|||
Keychain(keychain::Error),
|
||||
/// Underlying consensus error (sort order currently)
|
||||
Consensus(consensus::Error),
|
||||
/// Coinbase has not yet matured and cannot be spent (1,000 blocks)
|
||||
ImmatureCoinbase {
|
||||
/// The height of the block containing the input spending the coinbase
|
||||
/// output
|
||||
height: u64,
|
||||
/// The lock_height needed to be reached for the coinbase output to
|
||||
/// mature
|
||||
lock_height: u64,
|
||||
},
|
||||
/// Underlying Merkle proof error
|
||||
MerkleProof,
|
||||
/// Error when verifying kernel sums via committed trait.
|
||||
|
@ -682,7 +673,6 @@ impl Block {
|
|||
self.verify_sorted()?;
|
||||
self.verify_cut_through()?;
|
||||
self.verify_coinbase()?;
|
||||
self.verify_inputs()?;
|
||||
self.verify_kernel_lock_heights()?;
|
||||
|
||||
let sums = self.verify_kernel_sums(
|
||||
|
@ -725,27 +715,6 @@ impl Block {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// We can verify the Merkle proof (for coinbase inputs) here in isolation.
|
||||
/// But we cannot check the following as we need data from the index and
|
||||
/// the PMMR. So we must be sure to check these at the appropriate point
|
||||
/// during block validation. * node is in the correct pos in the PMMR
|
||||
/// * block is the correct one (based on output_root from block_header
|
||||
/// via the index)
|
||||
fn verify_inputs(&self) -> Result<(), Error> {
|
||||
let coinbase_inputs = self.inputs
|
||||
.iter()
|
||||
.filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT));
|
||||
|
||||
for input in coinbase_inputs {
|
||||
let merkle_proof = input.merkle_proof();
|
||||
if !merkle_proof.verify() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn verify_kernel_lock_heights(&self) -> Result<(), Error> {
|
||||
for k in &self.kernels {
|
||||
// check we have no kernels with lock_heights greater than current height
|
||||
|
|
156
core/src/core/merkle_proof.rs
Normal file
156
core/src/core/merkle_proof.rs
Normal file
|
@ -0,0 +1,156 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! Merkle Proofs
|
||||
|
||||
use core::hash::Hash;
|
||||
use core::pmmr;
|
||||
use ser;
|
||||
use ser::{PMMRIndexHashable, Readable, Reader, Writeable, Writer};
|
||||
use util;
|
||||
|
||||
/// Merkle proof errors.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum MerkleProofError {
|
||||
/// Merkle proof root hash does not match when attempting to verify.
|
||||
RootMismatch,
|
||||
}
|
||||
|
||||
/// A Merkle proof that proves a particular element exists in the MMR.
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, PartialOrd, Ord)]
|
||||
pub struct MerkleProof {
|
||||
/// The size of the MMR at the time the proof was created.
|
||||
pub mmr_size: u64,
|
||||
/// The sibling path from the leaf up to the final sibling hashing to the
|
||||
/// root.
|
||||
pub path: Vec<Hash>,
|
||||
}
|
||||
|
||||
impl Writeable for MerkleProof {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
writer.write_u64(self.mmr_size)?;
|
||||
writer.write_u64(self.path.len() as u64)?;
|
||||
self.path.write(writer)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Readable for MerkleProof {
|
||||
fn read(reader: &mut Reader) -> Result<MerkleProof, ser::Error> {
|
||||
let mmr_size = reader.read_u64()?;
|
||||
let path_len = reader.read_u64()?;
|
||||
let mut path = Vec::with_capacity(path_len as usize);
|
||||
for _ in 0..path_len {
|
||||
let hash = Hash::read(reader)?;
|
||||
path.push(hash);
|
||||
}
|
||||
|
||||
Ok(MerkleProof { mmr_size, path })
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MerkleProof {
|
||||
fn default() -> MerkleProof {
|
||||
MerkleProof::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl MerkleProof {
|
||||
/// The "empty" Merkle proof.
|
||||
pub fn empty() -> MerkleProof {
|
||||
MerkleProof {
|
||||
mmr_size: 0,
|
||||
path: Vec::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize the Merkle proof as a hex string (for api json endpoints)
|
||||
pub fn to_hex(&self) -> String {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
util::to_hex(vec)
|
||||
}
|
||||
|
||||
/// Convert hex string represenation back to a Merkle proof instance
|
||||
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
|
||||
let bytes = util::from_hex(hex.to_string()).unwrap();
|
||||
let res = ser::deserialize(&mut &bytes[..])
|
||||
.map_err(|_| format!("failed to deserialize a Merkle Proof"))?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Verifies the Merkle proof against the provided
|
||||
/// root hash, element and position in the MMR.
|
||||
pub fn verify(
|
||||
&self,
|
||||
root: Hash,
|
||||
element: &PMMRIndexHashable,
|
||||
node_pos: u64,
|
||||
) -> Result<(), MerkleProofError> {
|
||||
let mut proof = self.clone();
|
||||
// calculate the peaks once as these are based on overall MMR size
|
||||
// (and will not change)
|
||||
let peaks_pos = pmmr::peaks(self.mmr_size);
|
||||
proof.verify_consume(root, element, node_pos, peaks_pos)
|
||||
}
|
||||
|
||||
/// Consumes the Merkle proof while verifying it.
|
||||
/// The proof can no longer beused by the caller after dong this.
|
||||
/// Caller must clone() the proof first.
|
||||
fn verify_consume(
|
||||
&mut self,
|
||||
root: Hash,
|
||||
element: &PMMRIndexHashable,
|
||||
node_pos: u64,
|
||||
peaks_pos: Vec<u64>,
|
||||
) -> Result<(), MerkleProofError> {
|
||||
let node_hash = if node_pos > self.mmr_size {
|
||||
element.hash_with_index(self.mmr_size)
|
||||
} else {
|
||||
element.hash_with_index(node_pos - 1)
|
||||
};
|
||||
|
||||
// handle special case of only a single entry in the MMR
|
||||
// (no siblings to hash together)
|
||||
if self.path.len() == 0 {
|
||||
if root == node_hash {
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(MerkleProofError::RootMismatch);
|
||||
}
|
||||
}
|
||||
|
||||
let sibling = self.path.remove(0);
|
||||
let (parent_pos, sibling_pos) = pmmr::family(node_pos);
|
||||
|
||||
if let Ok(x) = peaks_pos.binary_search(&node_pos) {
|
||||
let parent = if x == peaks_pos.len() - 1 {
|
||||
(sibling, node_hash)
|
||||
} else {
|
||||
(node_hash, sibling)
|
||||
};
|
||||
self.verify(root, &parent, parent_pos)
|
||||
} else if parent_pos > self.mmr_size {
|
||||
let parent = (sibling, node_hash);
|
||||
self.verify(root, &parent, parent_pos)
|
||||
} else {
|
||||
let parent = if pmmr::is_left_sibling(sibling_pos) {
|
||||
(sibling, node_hash)
|
||||
} else {
|
||||
(node_hash, sibling)
|
||||
};
|
||||
self.verify(root, &parent, parent_pos)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -18,6 +18,7 @@ pub mod block;
|
|||
pub mod committed;
|
||||
pub mod hash;
|
||||
pub mod id;
|
||||
pub mod merkle_proof;
|
||||
pub mod pmmr;
|
||||
pub mod prune_list;
|
||||
pub mod target;
|
||||
|
|
|
@ -35,15 +35,15 @@
|
|||
//! The underlying Hashes are stored in a Backend implementation that can
|
||||
//! either be a simple Vec or a database.
|
||||
|
||||
use std::marker;
|
||||
|
||||
use croaring::Bitmap;
|
||||
|
||||
use core::hash::Hash;
|
||||
use core::merkle_proof::MerkleProof;
|
||||
use core::BlockHeader;
|
||||
use ser::{self, PMMRIndexHashable, PMMRable, Readable, Reader, Writeable, Writer};
|
||||
|
||||
use std::clone::Clone;
|
||||
use std::marker;
|
||||
use util::{self, LOGGER};
|
||||
use ser::{PMMRIndexHashable, PMMRable};
|
||||
use util::LOGGER;
|
||||
|
||||
/// Storage backend for the MMR, just needs to be indexed by order of insertion.
|
||||
/// The PMMR itself does not need the Backend to be accurate on the existence
|
||||
|
@ -106,168 +106,6 @@ where
|
|||
fn dump_stats(&self);
|
||||
}
|
||||
|
||||
/// Maximum peaks for a Merkle proof
|
||||
pub const MAX_PEAKS: u64 = 100;
|
||||
|
||||
/// Maximum path for a Merkle proof
|
||||
pub const MAX_PATH: u64 = 100;
|
||||
|
||||
/// A Merkle proof.
|
||||
/// Proves inclusion of an output (node) in the output MMR.
|
||||
/// We can use this to prove an output was unspent at the time of a given block
|
||||
/// as the root will match the output_root of the block header.
|
||||
/// The path and left_right can be used to reconstruct the peak hash for a
|
||||
/// given tree in the MMR.
|
||||
/// The root is the result of hashing all the peaks together.
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
pub struct MerkleProof {
|
||||
/// The root hash of the full Merkle tree (in an MMR the hash of all peaks)
|
||||
pub root: Hash,
|
||||
/// The hash of the element in the tree we care about
|
||||
pub node: Hash,
|
||||
/// The size of the full Merkle tree
|
||||
pub mmr_size: u64,
|
||||
/// The full list of peak hashes in the MMR
|
||||
pub peaks: Vec<Hash>,
|
||||
/// The sibling (hash, pos) along the path of the tree
|
||||
/// as we traverse from node to peak
|
||||
pub path: Vec<(Hash, u64)>,
|
||||
}
|
||||
|
||||
impl Writeable for MerkleProof {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
ser_multiwrite!(
|
||||
writer,
|
||||
[write_fixed_bytes, &self.root],
|
||||
[write_fixed_bytes, &self.node],
|
||||
[write_u64, self.mmr_size],
|
||||
[write_u64, self.peaks.len() as u64],
|
||||
[write_u64, self.path.len() as u64]
|
||||
);
|
||||
|
||||
self.peaks.write(writer)?;
|
||||
self.path.write(writer)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Readable for MerkleProof {
|
||||
fn read(reader: &mut Reader) -> Result<MerkleProof, ser::Error> {
|
||||
let root = Hash::read(reader)?;
|
||||
let node = Hash::read(reader)?;
|
||||
|
||||
let (mmr_size, peaks_len, path_len) = ser_multiread!(reader, read_u64, read_u64, read_u64);
|
||||
|
||||
if peaks_len > MAX_PEAKS || path_len > MAX_PATH {
|
||||
return Err(ser::Error::CorruptedData);
|
||||
}
|
||||
|
||||
let mut peaks = Vec::with_capacity(peaks_len as usize);
|
||||
for _ in 0..peaks_len {
|
||||
peaks.push(Hash::read(reader)?);
|
||||
}
|
||||
|
||||
let mut path = Vec::with_capacity(path_len as usize);
|
||||
for _ in 0..path_len {
|
||||
let hash = Hash::read(reader)?;
|
||||
let pos = reader.read_u64()?;
|
||||
path.push((hash, pos));
|
||||
}
|
||||
|
||||
Ok(MerkleProof {
|
||||
root,
|
||||
node,
|
||||
mmr_size,
|
||||
peaks,
|
||||
path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MerkleProof {
|
||||
fn default() -> MerkleProof {
|
||||
MerkleProof::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl MerkleProof {
|
||||
/// The "empty" Merkle proof.
|
||||
/// Basically some reasonable defaults. Will not verify successfully.
|
||||
pub fn empty() -> MerkleProof {
|
||||
MerkleProof {
|
||||
root: Hash::default(),
|
||||
node: Hash::default(),
|
||||
mmr_size: 0,
|
||||
peaks: vec![],
|
||||
path: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize the Merkle proof as a hex string (for api json endpoints)
|
||||
pub fn to_hex(&self) -> String {
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &self).expect("serialization failed");
|
||||
util::to_hex(vec)
|
||||
}
|
||||
|
||||
/// Convert hex string representation back to a Merkle proof instance
|
||||
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
|
||||
let bytes = util::from_hex(hex.to_string()).unwrap();
|
||||
let res = ser::deserialize(&mut &bytes[..])
|
||||
.map_err(|_| "failed to deserialize a Merkle Proof".to_string())?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Verify the Merkle proof.
|
||||
/// We do this by verifying the following -
|
||||
/// * inclusion of the node beneath a peak (via the Merkle path/branch of
|
||||
/// siblings) * inclusion of the peak in the "bag of peaks" beneath the
|
||||
/// root
|
||||
pub fn verify(&self) -> bool {
|
||||
// if we have no further elements in the path
|
||||
// then this proof verifies successfully if our node is
|
||||
// one of the peaks
|
||||
// and the peaks themselves hash to give the root
|
||||
if self.path.len() == 0 {
|
||||
if !self.peaks.contains(&self.node) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut bagged = None;
|
||||
for peak in self.peaks.iter().rev() {
|
||||
bagged = match bagged {
|
||||
None => Some(*peak),
|
||||
Some(rhs) => Some((*peak, rhs).hash_with_index(self.mmr_size)),
|
||||
}
|
||||
}
|
||||
return bagged == Some(self.root);
|
||||
}
|
||||
|
||||
let mut path = self.path.clone();
|
||||
let (sibling, sibling_pos) = path.remove(0);
|
||||
let (parent_pos, _) = family(sibling_pos);
|
||||
|
||||
// hash our node and sibling together (noting left/right position of the
|
||||
// sibling)
|
||||
let parent = if is_left_sibling(sibling_pos) {
|
||||
(sibling, self.node).hash_with_index(parent_pos - 1)
|
||||
} else {
|
||||
(self.node, sibling).hash_with_index(parent_pos - 1)
|
||||
};
|
||||
|
||||
let proof = MerkleProof {
|
||||
root: self.root,
|
||||
node: parent,
|
||||
mmr_size: self.mmr_size,
|
||||
peaks: self.peaks.clone(),
|
||||
path,
|
||||
};
|
||||
|
||||
proof.verify()
|
||||
}
|
||||
}
|
||||
|
||||
/// Prunable Merkle Mountain Range implementation. All positions within the tree
|
||||
/// start at 1 as they're postorder tree traversal positions rather than array
|
||||
/// indices.
|
||||
|
@ -324,6 +162,41 @@ where
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn peak_path(&self, peak_pos: u64) -> Vec<Hash> {
|
||||
let rhs = self.bag_the_rhs(peak_pos);
|
||||
let mut res = peaks(self.last_pos)
|
||||
.into_iter()
|
||||
.filter(|x| x < &peak_pos)
|
||||
.filter_map(|x| self.backend.get_from_file(x))
|
||||
.collect::<Vec<_>>();
|
||||
res.reverse();
|
||||
if let Some(rhs) = rhs {
|
||||
res.insert(0, rhs);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Takes a single peak position and hashes together
|
||||
/// all the peaks to the right of this peak (if any).
|
||||
/// If this return a hash then this is our peaks sibling.
|
||||
/// If none then the sibling of our peak is the peak to the left.
|
||||
pub fn bag_the_rhs(&self, peak_pos: u64) -> Option<Hash> {
|
||||
let rhs = peaks(self.last_pos)
|
||||
.into_iter()
|
||||
.filter(|x| x > &peak_pos)
|
||||
.filter_map(|x| self.backend.get_from_file(x))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut res = None;
|
||||
for peak in rhs.iter().rev() {
|
||||
res = match res {
|
||||
None => Some(*peak),
|
||||
Some(rhash) => Some((*peak, rhash).hash_with_index(self.unpruned_size())),
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Computes the root of the MMR. Find all the peaks in the current
|
||||
/// tree and "bags" them to get a single peak.
|
||||
pub fn root(&self) -> Hash {
|
||||
|
@ -337,45 +210,46 @@ where
|
|||
res.expect("no root, invalid tree")
|
||||
}
|
||||
|
||||
/// Build a Merkle proof for the element at the given position in the MMR
|
||||
/// Build a Merkle proof for the element at the given position.
|
||||
pub fn merkle_proof(&self, pos: u64) -> Result<MerkleProof, String> {
|
||||
debug!(
|
||||
LOGGER,
|
||||
"merkle_proof (via rewind) - {}, last_pos {}", pos, self.last_pos
|
||||
);
|
||||
debug!(LOGGER, "merkle_proof {}, last_pos {}", pos, self.last_pos);
|
||||
|
||||
// check this pos is actually a leaf in the MMR
|
||||
if !is_leaf(pos) {
|
||||
return Err(format!("not a leaf at pos {}", pos));
|
||||
}
|
||||
|
||||
let root = self.root();
|
||||
|
||||
let node = self.get_hash(pos)
|
||||
// check we actually have a hash in the MMR at this pos
|
||||
self.get_hash(pos)
|
||||
.ok_or(format!("no element at pos {}", pos))?;
|
||||
|
||||
let mmr_size = self.unpruned_size();
|
||||
|
||||
// Edge case: an MMR with a single entry in it
|
||||
// this entry is a leaf, a peak and the root itself
|
||||
// and there are no siblings to hash with
|
||||
if mmr_size == 1 {
|
||||
return Ok(MerkleProof {
|
||||
mmr_size,
|
||||
path: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let family_branch = family_branch(pos, self.last_pos);
|
||||
|
||||
let path = family_branch
|
||||
let mut path = family_branch
|
||||
.iter()
|
||||
.map(|x| (self.get_from_file(x.1).unwrap_or_default(), x.1))
|
||||
.filter_map(|x| self.get_from_file(x.1))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let peaks = peaks(self.last_pos)
|
||||
.iter()
|
||||
.filter_map(|&x| self.get_from_file(x))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let proof = MerkleProof {
|
||||
root,
|
||||
node,
|
||||
mmr_size,
|
||||
peaks,
|
||||
path,
|
||||
let peak_pos = match family_branch.last() {
|
||||
Some(&(x, _)) => x,
|
||||
None => pos,
|
||||
};
|
||||
|
||||
Ok(proof)
|
||||
path.append(&mut self.peak_path(peak_pos));
|
||||
|
||||
Ok(MerkleProof { mmr_size, path })
|
||||
}
|
||||
|
||||
/// Push a new element into the MMR. Computes new related peaks at
|
||||
|
|
|
@ -26,8 +26,8 @@ use util::{kernel_sig_msg, static_secp_instance};
|
|||
|
||||
use consensus::{self, VerifySortOrder};
|
||||
use core::hash::{Hash, Hashed, ZERO_HASH};
|
||||
use core::pmmr::MerkleProof;
|
||||
use core::{committed, global, BlockHeader, Committed};
|
||||
use core::merkle_proof::MerkleProof;
|
||||
use core::{committed, Committed};
|
||||
use keychain::{self, BlindingFactor};
|
||||
use ser::{self, read_and_verify_sorted, ser_vec, PMMRable, Readable, Reader, Writeable,
|
||||
WriteableSorted, Writer};
|
||||
|
@ -65,9 +65,6 @@ pub enum Error {
|
|||
RangeProof,
|
||||
/// Error originating from an invalid Merkle proof
|
||||
MerkleProof,
|
||||
/// Error originating from an input attempting to spend an immature
|
||||
/// coinbase output
|
||||
ImmatureCoinbase,
|
||||
/// Returns if the value hidden within the a RangeProof message isn't
|
||||
/// repeated 3 times, indicating it's incorrect
|
||||
InvalidProofMessage,
|
||||
|
@ -743,64 +740,6 @@ impl Input {
|
|||
let merkle_proof = self.merkle_proof.clone();
|
||||
merkle_proof.unwrap_or_else(MerkleProof::empty)
|
||||
}
|
||||
|
||||
/// Verify the maturity of an output being spent by an input.
|
||||
/// Only relevant for spending coinbase outputs currently (locked for 1,000
|
||||
/// confirmations).
|
||||
///
|
||||
/// The proof associates the output with the root by its hash (and pos) in
|
||||
/// the MMR. The proof shows the output existed and was unspent at the
|
||||
/// time the output_root was built. The root associates the proof with a
|
||||
/// specific block header with that output_root. So the proof shows the
|
||||
/// output was unspent at the time of the block and is at least as old as
|
||||
/// that block (may be older).
|
||||
///
|
||||
/// We can verify maturity of the output being spent by -
|
||||
///
|
||||
/// * verifying the Merkle Proof produces the correct root for the given
|
||||
/// hash (from MMR) * verifying the root matches the output_root in the
|
||||
/// block_header * verifying the hash matches the node hash in the Merkle
|
||||
/// Proof * finally verify maturity rules based on height of the block
|
||||
/// header
|
||||
///
|
||||
pub fn verify_maturity(
|
||||
&self,
|
||||
hash: Hash,
|
||||
header: &BlockHeader,
|
||||
height: u64,
|
||||
) -> Result<(), Error> {
|
||||
if self.features.contains(OutputFeatures::COINBASE_OUTPUT) {
|
||||
let block_hash = self.block_hash();
|
||||
let merkle_proof = self.merkle_proof();
|
||||
|
||||
// Check we are dealing with the correct block header
|
||||
if block_hash != header.hash() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Is our Merkle Proof valid? Does node hash up consistently to the root?
|
||||
if !merkle_proof.verify() {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Is the root the correct root for the given block header?
|
||||
if merkle_proof.root != header.output_root {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Does the hash from the MMR actually match the one in the Merkle Proof?
|
||||
if merkle_proof.node != hash {
|
||||
return Err(Error::MerkleProof);
|
||||
}
|
||||
|
||||
// Finally has the output matured sufficiently now we know the block?
|
||||
let lock_height = header.height + global::coinbase_maturity();
|
||||
if lock_height > height {
|
||||
return Err(Error::ImmatureCoinbase);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
|
|
|
@ -19,9 +19,9 @@ extern crate grin_keychain as keychain;
|
|||
extern crate grin_util as util;
|
||||
extern crate grin_wallet as wallet;
|
||||
|
||||
use grin_core::core::Transaction;
|
||||
use grin_core::core::block::{Block, BlockHeader};
|
||||
use grin_core::core::target::Difficulty;
|
||||
use grin_core::core::Transaction;
|
||||
use keychain::{Identifier, Keychain};
|
||||
use wallet::libtx::build::{self, input, output, with_fee};
|
||||
use wallet::libtx::reward;
|
||||
|
|
193
core/tests/merkle_proof.rs
Normal file
193
core/tests/merkle_proof.rs
Normal file
|
@ -0,0 +1,193 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#[macro_use]
|
||||
extern crate grin_core as core;
|
||||
extern crate croaring;
|
||||
|
||||
mod vec_backend;
|
||||
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::pmmr::PMMR;
|
||||
use core::ser;
|
||||
use core::ser::PMMRIndexHashable;
|
||||
use vec_backend::{TestElem, VecBackend};
|
||||
|
||||
#[test]
|
||||
fn empty_merkle_proof() {
|
||||
let proof = MerkleProof::empty();
|
||||
assert_eq!(proof.path, vec![]);
|
||||
assert_eq!(proof.mmr_size, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merkle_proof_ser_deser() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
for x in 0..15 {
|
||||
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
|
||||
}
|
||||
let proof = pmmr.merkle_proof(9).unwrap();
|
||||
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &proof).expect("serialization failed");
|
||||
let proof_2: MerkleProof = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof_prune_and_rewind() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
pmmr.push(TestElem([0, 0, 0, 1])).unwrap();
|
||||
pmmr.push(TestElem([0, 0, 0, 2])).unwrap();
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
|
||||
// now prune an element and check we can still generate
|
||||
// the correct Merkle proof for the other element (after sibling pruned)
|
||||
pmmr.prune(1).unwrap();
|
||||
let proof_2 = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof() {
|
||||
let elems = [
|
||||
TestElem([0, 0, 0, 1]),
|
||||
TestElem([0, 0, 0, 2]),
|
||||
TestElem([0, 0, 0, 3]),
|
||||
TestElem([0, 0, 0, 4]),
|
||||
TestElem([0, 0, 0, 5]),
|
||||
TestElem([0, 0, 0, 6]),
|
||||
TestElem([0, 0, 0, 7]),
|
||||
TestElem([0, 0, 0, 8]),
|
||||
TestElem([1, 0, 0, 0]),
|
||||
];
|
||||
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
|
||||
pmmr.push(elems[0]).unwrap();
|
||||
let pos_0 = elems[0].hash_with_index(0);
|
||||
assert_eq!(pmmr.get_hash(1).unwrap(), pos_0);
|
||||
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof.path, vec![]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[0], 1).is_ok());
|
||||
|
||||
pmmr.push(elems[1]).unwrap();
|
||||
let pos_1 = elems[1].hash_with_index(1);
|
||||
assert_eq!(pmmr.get_hash(2).unwrap(), pos_1);
|
||||
let pos_2 = (pos_0, pos_1).hash_with_index(2);
|
||||
assert_eq!(pmmr.get_hash(3).unwrap(), pos_2);
|
||||
|
||||
assert_eq!(pmmr.root(), pos_2);
|
||||
assert_eq!(pmmr.peaks(), [pos_2]);
|
||||
|
||||
// single peak, path with single sibling
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_1]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[0], 1).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_0]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[1], 2).is_ok());
|
||||
|
||||
// three leaves, two peaks (one also the right-most leaf)
|
||||
pmmr.push(elems[2]).unwrap();
|
||||
let pos_3 = elems[2].hash_with_index(3);
|
||||
assert_eq!(pmmr.get_hash(4).unwrap(), pos_3);
|
||||
|
||||
assert_eq!(pmmr.root(), (pos_2, pos_3).hash_with_index(4));
|
||||
assert_eq!(pmmr.peaks(), [pos_2, pos_3]);
|
||||
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_1, pos_3]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[0], 1).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_0, pos_3]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[1], 2).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(4).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_2]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[2], 4).is_ok());
|
||||
|
||||
// 7 leaves, 3 peaks, 11 pos in total
|
||||
pmmr.push(elems[3]).unwrap();
|
||||
let pos_4 = elems[3].hash_with_index(4);
|
||||
assert_eq!(pmmr.get_hash(5).unwrap(), pos_4);
|
||||
let pos_5 = (pos_3, pos_4).hash_with_index(5);
|
||||
assert_eq!(pmmr.get_hash(6).unwrap(), pos_5);
|
||||
let pos_6 = (pos_2, pos_5).hash_with_index(6);
|
||||
assert_eq!(pmmr.get_hash(7).unwrap(), pos_6);
|
||||
|
||||
pmmr.push(elems[4]).unwrap();
|
||||
let pos_7 = elems[4].hash_with_index(7);
|
||||
assert_eq!(pmmr.get_hash(8).unwrap(), pos_7);
|
||||
|
||||
pmmr.push(elems[5]).unwrap();
|
||||
let pos_8 = elems[5].hash_with_index(8);
|
||||
assert_eq!(pmmr.get_hash(9).unwrap(), pos_8);
|
||||
|
||||
let pos_9 = (pos_7, pos_8).hash_with_index(9);
|
||||
assert_eq!(pmmr.get_hash(10).unwrap(), pos_9);
|
||||
|
||||
pmmr.push(elems[6]).unwrap();
|
||||
let pos_10 = elems[6].hash_with_index(10);
|
||||
assert_eq!(pmmr.get_hash(11).unwrap(), pos_10);
|
||||
|
||||
assert_eq!(pmmr.unpruned_size(), 11);
|
||||
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(
|
||||
proof.path,
|
||||
vec![pos_1, pos_5, (pos_9, pos_10).hash_with_index(11)]
|
||||
);
|
||||
assert!(proof.verify(pmmr.root(), &elems[0], 1).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(
|
||||
proof.path,
|
||||
vec![pos_0, pos_5, (pos_9, pos_10).hash_with_index(11)]
|
||||
);
|
||||
assert!(proof.verify(pmmr.root(), &elems[1], 2).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(4).unwrap();
|
||||
assert_eq!(
|
||||
proof.path,
|
||||
vec![pos_4, pos_2, (pos_9, pos_10).hash_with_index(11)]
|
||||
);
|
||||
assert!(proof.verify(pmmr.root(), &elems[2], 4).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(5).unwrap();
|
||||
assert_eq!(
|
||||
proof.path,
|
||||
vec![pos_3, pos_2, (pos_9, pos_10).hash_with_index(11)]
|
||||
);
|
||||
assert!(proof.verify(pmmr.root(), &elems[3], 5).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(8).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_8, pos_10, pos_6]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[4], 8).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(9).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_7, pos_10, pos_6]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[5], 9).is_ok());
|
||||
|
||||
let proof = pmmr.merkle_proof(11).unwrap();
|
||||
assert_eq!(proof.path, vec![pos_9, pos_6]);
|
||||
assert!(proof.verify(pmmr.root(), &elems[6], 11).is_ok());
|
||||
}
|
|
@ -17,125 +17,13 @@
|
|||
extern crate grin_core as core;
|
||||
extern crate croaring;
|
||||
|
||||
use croaring::Bitmap;
|
||||
mod vec_backend;
|
||||
|
||||
use core::core::hash::Hash;
|
||||
use core::core::pmmr::{self, Backend, MerkleProof, PMMR};
|
||||
use core::core::pmmr::{self, PMMR};
|
||||
use core::core::prune_list::PruneList;
|
||||
use core::core::BlockHeader;
|
||||
use core::ser::{self, Error, PMMRIndexHashable, PMMRable, Readable, Reader, Writeable, Writer};
|
||||
|
||||
/// Simple MMR backend implementation based on a Vector. Pruning does not
|
||||
/// compact the Vec itself.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
/// Backend elements
|
||||
pub elems: Vec<Option<(Hash, Option<T>)>>,
|
||||
/// Positions of removed elements
|
||||
pub remove_list: Vec<u64>,
|
||||
}
|
||||
|
||||
impl<T> Backend<T> for VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
fn append(&mut self, _position: u64, data: Vec<(Hash, Option<T>)>) -> Result<(), String> {
|
||||
self.elems.append(&mut map_vec!(data, |d| Some(d.clone())));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_hash(&self, position: u64) -> Option<Hash> {
|
||||
if self.remove_list.contains(&position) {
|
||||
None
|
||||
} else {
|
||||
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||
Some(elem.0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_data(&self, position: u64) -> Option<T> {
|
||||
if self.remove_list.contains(&position) {
|
||||
None
|
||||
} else {
|
||||
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||
elem.1.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
||||
if let Some(ref x) = self.elems[(position - 1) as usize] {
|
||||
Some(x.0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_data_from_file(&self, position: u64) -> Option<T> {
|
||||
if let Some(ref x) = self.elems[(position - 1) as usize] {
|
||||
x.1.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, position: u64) -> Result<(), String> {
|
||||
self.remove_list.push(position);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rewind(
|
||||
&mut self,
|
||||
position: u64,
|
||||
rewind_add_pos: &Bitmap,
|
||||
rewind_rm_pos: &Bitmap,
|
||||
) -> Result<(), String> {
|
||||
panic!("not yet implemented for vec backend...");
|
||||
}
|
||||
|
||||
fn get_data_file_path(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
fn snapshot(&self, header: &BlockHeader) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn dump_stats(&self) {}
|
||||
}
|
||||
|
||||
impl<T> VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
/// Instantiates a new VecBackend<T>
|
||||
pub fn new() -> VecBackend<T> {
|
||||
VecBackend {
|
||||
elems: vec![],
|
||||
remove_list: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Current number of elements in the underlying Vec.
|
||||
pub fn used_size(&self) -> usize {
|
||||
let mut usz = self.elems.len();
|
||||
for (idx, _) in self.elems.iter().enumerate() {
|
||||
let idx = idx as u64;
|
||||
if self.remove_list.contains(&idx) {
|
||||
usz -= 1;
|
||||
}
|
||||
}
|
||||
usz
|
||||
}
|
||||
}
|
||||
use core::ser::PMMRIndexHashable;
|
||||
use vec_backend::{TestElem, VecBackend};
|
||||
|
||||
#[test]
|
||||
fn some_all_ones() {
|
||||
|
@ -310,124 +198,6 @@ fn some_peaks() {
|
|||
);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
struct TestElem([u32; 4]);
|
||||
|
||||
impl PMMRable for TestElem {
|
||||
fn len() -> usize {
|
||||
16
|
||||
}
|
||||
}
|
||||
|
||||
impl Writeable for TestElem {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
|
||||
writer.write_u32(self.0[0])?;
|
||||
writer.write_u32(self.0[1])?;
|
||||
writer.write_u32(self.0[2])?;
|
||||
writer.write_u32(self.0[3])
|
||||
}
|
||||
}
|
||||
|
||||
impl Readable for TestElem {
|
||||
fn read(reader: &mut Reader) -> Result<TestElem, Error> {
|
||||
Ok(TestElem([
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_merkle_proof() {
|
||||
let proof = MerkleProof::empty();
|
||||
assert_eq!(proof.verify(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof() {
|
||||
// 0 0 1 0 0 1 2 0 0 1 0 0 1 2 3
|
||||
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
|
||||
pmmr.push(TestElem([0, 0, 0, 1])).unwrap();
|
||||
assert_eq!(pmmr.last_pos, 1);
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
let root = pmmr.root();
|
||||
assert_eq!(proof.peaks, [root]);
|
||||
assert!(proof.path.is_empty());
|
||||
assert!(proof.verify());
|
||||
|
||||
// push two more elements into the PMMR
|
||||
pmmr.push(TestElem([0, 0, 0, 2])).unwrap();
|
||||
pmmr.push(TestElem([0, 0, 0, 3])).unwrap();
|
||||
assert_eq!(pmmr.last_pos, 4);
|
||||
|
||||
let proof1 = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof1.peaks.len(), 2);
|
||||
assert_eq!(proof1.path.len(), 1);
|
||||
assert!(proof1.verify());
|
||||
|
||||
let proof2 = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof2.peaks.len(), 2);
|
||||
assert_eq!(proof2.path.len(), 1);
|
||||
assert!(proof2.verify());
|
||||
|
||||
// check that we cannot generate a merkle proof for pos 3 (not a leaf node)
|
||||
assert_eq!(
|
||||
pmmr.merkle_proof(3).err(),
|
||||
Some(format!("not a leaf at pos 3"))
|
||||
);
|
||||
|
||||
let proof4 = pmmr.merkle_proof(4).unwrap();
|
||||
assert_eq!(proof4.peaks.len(), 2);
|
||||
assert!(proof4.path.is_empty());
|
||||
assert!(proof4.verify());
|
||||
|
||||
// now add a few more elements to the PMMR to build a larger merkle proof
|
||||
for x in 4..1000 {
|
||||
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
|
||||
}
|
||||
let proof = pmmr.merkle_proof(1).unwrap();
|
||||
assert_eq!(proof.peaks.len(), 8);
|
||||
assert_eq!(proof.path.len(), 9);
|
||||
assert!(proof.verify());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pmmr_merkle_proof_prune_and_rewind() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
pmmr.push(TestElem([0, 0, 0, 1])).unwrap();
|
||||
pmmr.push(TestElem([0, 0, 0, 2])).unwrap();
|
||||
let proof = pmmr.merkle_proof(2).unwrap();
|
||||
|
||||
// now prune an element and check we can still generate
|
||||
// the correct Merkle proof for the other element (after sibling pruned)
|
||||
pmmr.prune(1).unwrap();
|
||||
let proof_2 = pmmr.merkle_proof(2).unwrap();
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merkle_proof_ser_deser() {
|
||||
let mut ba = VecBackend::new();
|
||||
let mut pmmr = PMMR::new(&mut ba);
|
||||
for x in 0..15 {
|
||||
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
|
||||
}
|
||||
let proof = pmmr.merkle_proof(9).unwrap();
|
||||
assert!(proof.verify());
|
||||
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &proof).expect("serialization failed");
|
||||
let proof_2: MerkleProof = ser::deserialize(&mut &vec[..]).unwrap();
|
||||
|
||||
assert_eq!(proof, proof_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn pmmr_push_root() {
|
||||
|
|
165
core/tests/vec_backend/mod.rs
Normal file
165
core/tests/vec_backend/mod.rs
Normal file
|
@ -0,0 +1,165 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
extern crate croaring;
|
||||
|
||||
use croaring::Bitmap;
|
||||
|
||||
use core::core::hash::Hash;
|
||||
use core::core::pmmr::Backend;
|
||||
use core::core::BlockHeader;
|
||||
use core::ser;
|
||||
use core::ser::{PMMRable, Readable, Reader, Writeable, Writer};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TestElem(pub [u32; 4]);
|
||||
|
||||
impl PMMRable for TestElem {
|
||||
fn len() -> usize {
|
||||
16
|
||||
}
|
||||
}
|
||||
|
||||
impl Writeable for TestElem {
|
||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||
try!(writer.write_u32(self.0[0]));
|
||||
try!(writer.write_u32(self.0[1]));
|
||||
try!(writer.write_u32(self.0[2]));
|
||||
writer.write_u32(self.0[3])
|
||||
}
|
||||
}
|
||||
|
||||
impl Readable for TestElem {
|
||||
fn read(reader: &mut Reader) -> Result<TestElem, ser::Error> {
|
||||
Ok(TestElem([
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
reader.read_u32()?,
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
/// Simple MMR backend implementation based on a Vector. Pruning does not
|
||||
/// compact the Vec itself.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
/// Backend elements
|
||||
pub elems: Vec<Option<(Hash, Option<T>)>>,
|
||||
/// Positions of removed elements
|
||||
pub remove_list: Vec<u64>,
|
||||
}
|
||||
|
||||
impl<T> Backend<T> for VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
fn append(&mut self, _position: u64, data: Vec<(Hash, Option<T>)>) -> Result<(), String> {
|
||||
self.elems.append(&mut map_vec!(data, |d| Some(d.clone())));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_hash(&self, position: u64) -> Option<Hash> {
|
||||
if self.remove_list.contains(&position) {
|
||||
None
|
||||
} else {
|
||||
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||
Some(elem.0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_data(&self, position: u64) -> Option<T> {
|
||||
if self.remove_list.contains(&position) {
|
||||
None
|
||||
} else {
|
||||
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||
elem.1.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
||||
if let Some(ref x) = self.elems[(position - 1) as usize] {
|
||||
Some(x.0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_data_from_file(&self, position: u64) -> Option<T> {
|
||||
if let Some(ref x) = self.elems[(position - 1) as usize] {
|
||||
x.1.clone()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, position: u64) -> Result<(), String> {
|
||||
self.remove_list.push(position);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rewind(
|
||||
&mut self,
|
||||
position: u64,
|
||||
_rewind_add_pos: &Bitmap,
|
||||
_rewind_rm_pos: &Bitmap,
|
||||
) -> Result<(), String> {
|
||||
self.elems = self.elems[0..(position as usize) + 1].to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn snapshot(&self, _header: &BlockHeader) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_data_file_path(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
fn dump_stats(&self) {}
|
||||
}
|
||||
|
||||
impl<T> VecBackend<T>
|
||||
where
|
||||
T: PMMRable,
|
||||
{
|
||||
/// Instantiates a new VecBackend<T>
|
||||
pub fn new() -> VecBackend<T> {
|
||||
VecBackend {
|
||||
elems: vec![],
|
||||
remove_list: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
// /// Current number of elements in the underlying Vec.
|
||||
// pub fn used_size(&self) -> usize {
|
||||
// let mut usz = self.elems.len();
|
||||
// for (idx, _) in self.elems.iter().enumerate() {
|
||||
// let idx = idx as u64;
|
||||
// if self.remove_list.contains(&idx) {
|
||||
// usz -= 1;
|
||||
// }
|
||||
// }
|
||||
// usz
|
||||
// }
|
||||
}
|
|
@ -15,8 +15,8 @@
|
|||
use blake2::blake2b::blake2b;
|
||||
use byteorder::{BigEndian, ByteOrder};
|
||||
use types::{Error, Identifier};
|
||||
use util::secp::Secp256k1;
|
||||
use util::secp::key::SecretKey;
|
||||
use util::secp::Secp256k1;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ChildKey {
|
||||
|
@ -119,8 +119,8 @@ mod test {
|
|||
|
||||
use super::{ExtendedKey, Identifier};
|
||||
use util;
|
||||
use util::secp::Secp256k1;
|
||||
use util::secp::key::SecretKey;
|
||||
use util::secp::Secp256k1;
|
||||
|
||||
fn from_hex(hex_str: &str) -> Vec<u8> {
|
||||
util::from_hex(hex_str.to_string()).unwrap()
|
||||
|
|
|
@ -297,8 +297,8 @@ mod test {
|
|||
use rand::thread_rng;
|
||||
|
||||
use types::BlindingFactor;
|
||||
use util::secp::Secp256k1;
|
||||
use util::secp::key::{SecretKey, ZERO_KEY};
|
||||
use util::secp::Secp256k1;
|
||||
|
||||
#[test]
|
||||
fn split_blinding_factor() {
|
||||
|
|
|
@ -34,7 +34,7 @@ use chain::store::ChainKVStore;
|
|||
use chain::txhashset;
|
||||
use chain::txhashset::TxHashSet;
|
||||
use core::core::hash::Hashed;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use pool::*;
|
||||
|
||||
use keychain::Keychain;
|
||||
|
|
|
@ -307,9 +307,7 @@ impl StratumServer {
|
|||
|
||||
// Call the handler function for requested method
|
||||
let response = match request.method.as_str() {
|
||||
"login" => {
|
||||
self.handle_login(request.params, &mut workers_l[num])
|
||||
}
|
||||
"login" => self.handle_login(request.params, &mut workers_l[num]),
|
||||
"submit" => {
|
||||
let res = self.handle_submit(
|
||||
request.params,
|
||||
|
|
|
@ -188,7 +188,7 @@ impl LeafSet {
|
|||
self.bitmap.cardinality() as usize
|
||||
}
|
||||
|
||||
// Is the leaf_set empty.
|
||||
/// Is the leaf_set empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
use util::{kernel_sig_msg, secp};
|
||||
|
||||
use core::core::hash::Hash;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::{Input, Output, OutputFeatures, ProofMessageElements, Transaction, TxKernel};
|
||||
use keychain::{self, BlindSum, BlindingFactor, Identifier, Keychain};
|
||||
use libtx::{aggsig, proof};
|
||||
|
|
|
@ -23,7 +23,8 @@ use serde;
|
|||
use failure::ResultExt;
|
||||
|
||||
use core::core::hash::Hash;
|
||||
use core::core::pmmr::MerkleProof;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
|
||||
use keychain::{Identifier, Keychain};
|
||||
|
||||
use libtx::slate::Slate;
|
||||
|
|
Loading…
Reference in a new issue