mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 17:01:09 +03:00
Split pmmr.get() into get_hash() and get_data() (#855)
This commit is contained in:
parent
05d2b1df1e
commit
7a8d6147f4
5 changed files with 148 additions and 120 deletions
|
@ -627,19 +627,19 @@ impl Chain {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the last n nodes inserted into the output sum tree
|
/// returns the last n nodes inserted into the output sum tree
|
||||||
pub fn get_last_n_output(&self, distance: u64) -> Vec<(Hash, Option<OutputIdentifier>)> {
|
pub fn get_last_n_output(&self, distance: u64) -> Vec<(Hash, OutputIdentifier)> {
|
||||||
let mut txhashset = self.txhashset.write().unwrap();
|
let mut txhashset = self.txhashset.write().unwrap();
|
||||||
txhashset.last_n_output(distance)
|
txhashset.last_n_output(distance)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for rangeproofs
|
/// as above, for rangeproofs
|
||||||
pub fn get_last_n_rangeproof(&self, distance: u64) -> Vec<(Hash, Option<RangeProof>)> {
|
pub fn get_last_n_rangeproof(&self, distance: u64) -> Vec<(Hash, RangeProof)> {
|
||||||
let mut txhashset = self.txhashset.write().unwrap();
|
let mut txhashset = self.txhashset.write().unwrap();
|
||||||
txhashset.last_n_rangeproof(distance)
|
txhashset.last_n_rangeproof(distance)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for kernels
|
/// as above, for kernels
|
||||||
pub fn get_last_n_kernel(&self, distance: u64) -> Vec<(Hash, Option<TxKernel>)> {
|
pub fn get_last_n_kernel(&self, distance: u64) -> Vec<(Hash, TxKernel)> {
|
||||||
let mut txhashset = self.txhashset.write().unwrap();
|
let mut txhashset = self.txhashset.write().unwrap();
|
||||||
txhashset.last_n_kernel(distance)
|
txhashset.last_n_kernel(distance)
|
||||||
}
|
}
|
||||||
|
|
|
@ -145,7 +145,7 @@ impl TxHashSet {
|
||||||
Ok(pos) => {
|
Ok(pos) => {
|
||||||
let output_pmmr: PMMR<OutputIdentifier, _> =
|
let output_pmmr: PMMR<OutputIdentifier, _> =
|
||||||
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
||||||
if let Some((hash, _)) = output_pmmr.get(pos, false) {
|
if let Some(hash) = output_pmmr.get_hash(pos) {
|
||||||
if hash == output_id.hash_with_index(pos) {
|
if hash == output_id.hash_with_index(pos) {
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
} else {
|
} else {
|
||||||
|
@ -163,21 +163,21 @@ impl TxHashSet {
|
||||||
/// returns the last N nodes inserted into the tree (i.e. the 'bottom'
|
/// returns the last N nodes inserted into the tree (i.e. the 'bottom'
|
||||||
/// nodes at level 0
|
/// nodes at level 0
|
||||||
/// TODO: These need to return the actual data from the flat-files instead of hashes now
|
/// TODO: These need to return the actual data from the flat-files instead of hashes now
|
||||||
pub fn last_n_output(&mut self, distance: u64) -> Vec<(Hash, Option<OutputIdentifier>)> {
|
pub fn last_n_output(&mut self, distance: u64) -> Vec<(Hash, OutputIdentifier)> {
|
||||||
let output_pmmr: PMMR<OutputIdentifier, _> =
|
let output_pmmr: PMMR<OutputIdentifier, _> =
|
||||||
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
||||||
output_pmmr.get_last_n_insertions(distance)
|
output_pmmr.get_last_n_insertions(distance)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for range proofs
|
/// as above, for range proofs
|
||||||
pub fn last_n_rangeproof(&mut self, distance: u64) -> Vec<(Hash, Option<RangeProof>)> {
|
pub fn last_n_rangeproof(&mut self, distance: u64) -> Vec<(Hash, RangeProof)> {
|
||||||
let rproof_pmmr: PMMR<RangeProof, _> =
|
let rproof_pmmr: PMMR<RangeProof, _> =
|
||||||
PMMR::at(&mut self.rproof_pmmr_h.backend, self.rproof_pmmr_h.last_pos);
|
PMMR::at(&mut self.rproof_pmmr_h.backend, self.rproof_pmmr_h.last_pos);
|
||||||
rproof_pmmr.get_last_n_insertions(distance)
|
rproof_pmmr.get_last_n_insertions(distance)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for kernels
|
/// as above, for kernels
|
||||||
pub fn last_n_kernel(&mut self, distance: u64) -> Vec<(Hash, Option<TxKernel>)> {
|
pub fn last_n_kernel(&mut self, distance: u64) -> Vec<(Hash, TxKernel)> {
|
||||||
let kernel_pmmr: PMMR<TxKernel, _> =
|
let kernel_pmmr: PMMR<TxKernel, _> =
|
||||||
PMMR::at(&mut self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos);
|
PMMR::at(&mut self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos);
|
||||||
kernel_pmmr.get_last_n_insertions(distance)
|
kernel_pmmr.get_last_n_insertions(distance)
|
||||||
|
@ -387,11 +387,13 @@ impl<'a> Extension<'a> {
|
||||||
let pos_res = self.get_output_pos(&commit);
|
let pos_res = self.get_output_pos(&commit);
|
||||||
if let Ok(pos) = pos_res {
|
if let Ok(pos) = pos_res {
|
||||||
let output_id_hash = OutputIdentifier::from_input(input).hash_with_index(pos);
|
let output_id_hash = OutputIdentifier::from_input(input).hash_with_index(pos);
|
||||||
if let Some((read_hash, read_elem)) = self.output_pmmr.get(pos, true) {
|
if let Some(read_hash) = self.output_pmmr.get_hash(pos) {
|
||||||
// check hash from pmmr matches hash from input (or corresponding output)
|
// check hash from pmmr matches hash from input (or corresponding output)
|
||||||
// if not then the input is not being honest about
|
// if not then the input is not being honest about
|
||||||
// what it is attempting to spend...
|
// what it is attempting to spend...
|
||||||
|
|
||||||
|
let read_elem = self.output_pmmr.get_data(pos);
|
||||||
|
|
||||||
if output_id_hash != read_hash
|
if output_id_hash != read_hash
|
||||||
|| output_id_hash
|
|| output_id_hash
|
||||||
!= read_elem
|
!= read_elem
|
||||||
|
@ -435,7 +437,7 @@ impl<'a> Extension<'a> {
|
||||||
// (non-historical node will have a much smaller one)
|
// (non-historical node will have a much smaller one)
|
||||||
// note that this doesn't show the commitment *never* existed, just
|
// note that this doesn't show the commitment *never* existed, just
|
||||||
// that this is not an existing unspent commitment right now
|
// that this is not an existing unspent commitment right now
|
||||||
if let Some((hash, _)) = self.output_pmmr.get(pos, false) {
|
if let Some(hash) = self.output_pmmr.get_hash(pos) {
|
||||||
// processing a new fork so we may get a position on the old
|
// processing a new fork so we may get a position on the old
|
||||||
// fork that exists but matches a different node
|
// fork that exists but matches a different node
|
||||||
// filtering that case out
|
// filtering that case out
|
||||||
|
@ -635,9 +637,8 @@ impl<'a> Extension<'a> {
|
||||||
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
||||||
// non-pruned leaves only
|
// non-pruned leaves only
|
||||||
if pmmr::bintree_postorder_height(n) == 0 {
|
if pmmr::bintree_postorder_height(n) == 0 {
|
||||||
if let Some((_, out)) = self.output_pmmr.get(n, true) {
|
if let Some(out) = self.output_pmmr.get_data(n) {
|
||||||
self.commit_index
|
self.commit_index.save_output_pos(&out.commit, n)?;
|
||||||
.save_output_pos(&out.expect("not a leaf node").commit, n)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -708,7 +709,7 @@ impl<'a> Extension<'a> {
|
||||||
|
|
||||||
for n in 1..self.kernel_pmmr.unpruned_size() + 1 {
|
for n in 1..self.kernel_pmmr.unpruned_size() + 1 {
|
||||||
if pmmr::is_leaf(n) {
|
if pmmr::is_leaf(n) {
|
||||||
if let Some((_, Some(kernel))) = self.kernel_pmmr.get(n, true) {
|
if let Some(kernel) = self.kernel_pmmr.get_data(n) {
|
||||||
kernel.verify()?;
|
kernel.verify()?;
|
||||||
commitments.push(kernel.excess.clone());
|
commitments.push(kernel.excess.clone());
|
||||||
}
|
}
|
||||||
|
@ -736,8 +737,8 @@ impl<'a> Extension<'a> {
|
||||||
let mut proof_count = 0;
|
let mut proof_count = 0;
|
||||||
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
||||||
if pmmr::is_leaf(n) {
|
if pmmr::is_leaf(n) {
|
||||||
if let Some((_, Some(out))) = self.output_pmmr.get(n, true) {
|
if let Some(out) = self.output_pmmr.get_data(n) {
|
||||||
if let Some((_, Some(rp))) = self.rproof_pmmr.get(n, true) {
|
if let Some(rp) = self.rproof_pmmr.get_data(n) {
|
||||||
out.to_output(rp).verify_proof()?;
|
out.to_output(rp).verify_proof()?;
|
||||||
} else {
|
} else {
|
||||||
// TODO - rangeproof not found
|
// TODO - rangeproof not found
|
||||||
|
@ -764,7 +765,7 @@ impl<'a> Extension<'a> {
|
||||||
let mut commitments = vec![];
|
let mut commitments = vec![];
|
||||||
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
for n in 1..self.output_pmmr.unpruned_size() + 1 {
|
||||||
if pmmr::is_leaf(n) {
|
if pmmr::is_leaf(n) {
|
||||||
if let Some((_, Some(out))) = self.output_pmmr.get(n, true) {
|
if let Some(out) = self.output_pmmr.get_data(n) {
|
||||||
commitments.push(out.commit.clone());
|
commitments.push(out.commit.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,16 +63,18 @@ where
|
||||||
/// occurred (see remove).
|
/// occurred (see remove).
|
||||||
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String>;
|
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String>;
|
||||||
|
|
||||||
/// Get a Hash by insertion position. If include_data is true, will
|
/// Get a Hash by insertion position.
|
||||||
/// also return the associated data element
|
fn get_hash(&self, position: u64) -> Option<Hash>;
|
||||||
fn get(&self, position: u64, include_data: bool) -> Option<(Hash, Option<T>)>;
|
|
||||||
|
|
||||||
/// Get a Hash by original insertion position (ignoring the remove
|
/// Get underlying data by insertion position.
|
||||||
/// list).
|
fn get_data(&self, position: u64) -> Option<T>;
|
||||||
|
|
||||||
|
/// Get a Hash by original insertion position
|
||||||
|
/// (ignoring the remove log).
|
||||||
fn get_from_file(&self, position: u64) -> Option<Hash>;
|
fn get_from_file(&self, position: u64) -> Option<Hash>;
|
||||||
|
|
||||||
/// Get a Data Element by original insertion position (ignoring the remove
|
/// Get a Data Element by original insertion position
|
||||||
/// list).
|
/// (ignoring the remove log).
|
||||||
fn get_data_from_file(&self, position: u64) -> Option<T>;
|
fn get_data_from_file(&self, position: u64) -> Option<T>;
|
||||||
|
|
||||||
/// Remove HashSums by insertion position. An index is also provided so the
|
/// Remove HashSums by insertion position. An index is also provided so the
|
||||||
|
@ -327,9 +329,8 @@ where
|
||||||
|
|
||||||
let root = self.root();
|
let root = self.root();
|
||||||
|
|
||||||
let node = self.get(pos, false)
|
let node = self.get_hash(pos)
|
||||||
.ok_or(format!("no element at pos {}", pos))?
|
.ok_or(format!("no element at pos {}", pos))?;
|
||||||
.0;
|
|
||||||
|
|
||||||
let family_branch = family_branch(pos, self.last_pos);
|
let family_branch = family_branch(pos, self.last_pos);
|
||||||
|
|
||||||
|
@ -413,7 +414,7 @@ where
|
||||||
/// to keep an index of elements to positions in the tree. Prunes parent
|
/// to keep an index of elements to positions in the tree. Prunes parent
|
||||||
/// nodes as well when they become childless.
|
/// nodes as well when they become childless.
|
||||||
pub fn prune(&mut self, position: u64, index: u32) -> Result<bool, String> {
|
pub fn prune(&mut self, position: u64, index: u32) -> Result<bool, String> {
|
||||||
if let None = self.backend.get(position, false) {
|
if let None = self.backend.get_hash(position) {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
let prunable_height = bintree_postorder_height(position);
|
let prunable_height = bintree_postorder_height(position);
|
||||||
|
@ -439,7 +440,7 @@ where
|
||||||
|
|
||||||
// if we have a pruned sibling, we can continue up the tree
|
// if we have a pruned sibling, we can continue up the tree
|
||||||
// otherwise we're done
|
// otherwise we're done
|
||||||
if let None = self.backend.get(sibling, false) {
|
if let None = self.backend.get_hash(sibling) {
|
||||||
current = parent;
|
current = parent;
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
|
@ -450,34 +451,47 @@ where
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to get a node at a given position from
|
/// Get a hash at provided position in the MMR.
|
||||||
/// the backend.
|
pub fn get_hash(&self, pos: u64) -> Option<Hash> {
|
||||||
pub fn get(&self, position: u64, include_data: bool) -> Option<(Hash, Option<T>)> {
|
if pos > self.last_pos {
|
||||||
if position > self.last_pos {
|
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
self.backend.get(position, include_data)
|
self.backend.get_hash(pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_from_file(&self, position: u64) -> Option<Hash> {
|
/// Get the data element at provided in the MMR.
|
||||||
if position > self.last_pos {
|
pub fn get_data(&self, pos: u64) -> Option<T> {
|
||||||
|
if pos > self.last_pos {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
self.backend.get_from_file(position)
|
self.backend.get_data(pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the hash from the underlying MMR file
|
||||||
|
/// (ignores the remove log).
|
||||||
|
fn get_from_file(&self, pos: u64) -> Option<Hash> {
|
||||||
|
if pos > self.last_pos {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
self.backend.get_from_file(pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to get the last N nodes inserted, i.e. the last
|
/// Helper function to get the last N nodes inserted, i.e. the last
|
||||||
/// n nodes along the bottom of the tree
|
/// n nodes along the bottom of the tree
|
||||||
pub fn get_last_n_insertions(&self, n: u64) -> Vec<(Hash, Option<T>)> {
|
pub fn get_last_n_insertions(&self, n: u64) -> Vec<(Hash, T)> {
|
||||||
let mut return_vec = Vec::new();
|
let mut return_vec = vec![];
|
||||||
let mut last_leaf = self.last_pos;
|
let mut last_leaf = self.last_pos;
|
||||||
let size = self.unpruned_size();
|
let size = self.unpruned_size();
|
||||||
// Special case that causes issues in bintree functions,
|
// Special case that causes issues in bintree functions,
|
||||||
// just return
|
// just return
|
||||||
if size == 1 {
|
if size == 1 {
|
||||||
return_vec.push(self.backend.get(last_leaf, true).unwrap());
|
return_vec.push((
|
||||||
|
self.backend.get_hash(last_leaf).unwrap(),
|
||||||
|
self.backend.get_data(last_leaf).unwrap(),
|
||||||
|
));
|
||||||
return return_vec;
|
return return_vec;
|
||||||
}
|
}
|
||||||
// if size is even, we're already at the bottom, otherwise
|
// if size is even, we're already at the bottom, otherwise
|
||||||
|
@ -492,7 +506,10 @@ where
|
||||||
if bintree_postorder_height(last_leaf) > 0 {
|
if bintree_postorder_height(last_leaf) > 0 {
|
||||||
last_leaf = bintree_rightmost(last_leaf);
|
last_leaf = bintree_rightmost(last_leaf);
|
||||||
}
|
}
|
||||||
return_vec.push(self.backend.get(last_leaf, true).unwrap());
|
return_vec.push((
|
||||||
|
self.backend.get_hash(last_leaf).unwrap(),
|
||||||
|
self.backend.get_data(last_leaf).unwrap(),
|
||||||
|
));
|
||||||
|
|
||||||
last_leaf = bintree_jump_left_sibling(last_leaf);
|
last_leaf = bintree_jump_left_sibling(last_leaf);
|
||||||
}
|
}
|
||||||
|
@ -504,7 +521,7 @@ where
|
||||||
// iterate on all parent nodes
|
// iterate on all parent nodes
|
||||||
for n in 1..(self.last_pos + 1) {
|
for n in 1..(self.last_pos + 1) {
|
||||||
if bintree_postorder_height(n) > 0 {
|
if bintree_postorder_height(n) > 0 {
|
||||||
if let Some(hs) = self.get(n, false) {
|
if let Some(hash) = self.get_hash(n) {
|
||||||
// take the left and right children, if they exist
|
// take the left and right children, if they exist
|
||||||
let left_pos = bintree_move_down_left(n).ok_or(format!("left_pos not found"))?;
|
let left_pos = bintree_move_down_left(n).ok_or(format!("left_pos not found"))?;
|
||||||
let right_pos = bintree_jump_right_sibling(left_pos);
|
let right_pos = bintree_jump_right_sibling(left_pos);
|
||||||
|
@ -514,7 +531,7 @@ where
|
||||||
if let Some(right_child_hs) = self.get_from_file(right_pos) {
|
if let Some(right_child_hs) = self.get_from_file(right_pos) {
|
||||||
// hash the two child nodes together with parent_pos and compare
|
// hash the two child nodes together with parent_pos and compare
|
||||||
let (parent_pos, _) = family(left_pos);
|
let (parent_pos, _) = family(left_pos);
|
||||||
if (left_child_hs, right_child_hs).hash_with_index(parent_pos) != hs.0 {
|
if (left_child_hs, right_child_hs).hash_with_index(parent_pos) != hash {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"Invalid MMR, hash of parent at {} does \
|
"Invalid MMR, hash of parent at {} does \
|
||||||
not match children.",
|
not match children.",
|
||||||
|
@ -556,9 +573,9 @@ where
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
idx.push_str(&format!("{:>8} ", m + 1));
|
idx.push_str(&format!("{:>8} ", m + 1));
|
||||||
let ohs = self.get(m + 1, false);
|
let ohs = self.get_hash(m + 1);
|
||||||
match ohs {
|
match ohs {
|
||||||
Some(hs) => hashes.push_str(&format!("{} ", hs.0)),
|
Some(hs) => hashes.push_str(&format!("{} ", hs)),
|
||||||
None => hashes.push_str(&format!("{:>8} ", "??")),
|
None => hashes.push_str(&format!("{:>8} ", "??")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1015,11 +1032,27 @@ mod test {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, position: u64, _include_data: bool) -> Option<(Hash, Option<T>)> {
|
fn get_hash(&self, position: u64) -> Option<Hash> {
|
||||||
if self.remove_list.contains(&position) {
|
if self.remove_list.contains(&position) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
self.elems[(position - 1) as usize].clone()
|
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||||
|
Some(elem.0)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data(&self, position: u64) -> Option<T> {
|
||||||
|
if self.remove_list.contains(&position) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
if let Some(ref elem) = self.elems[(position - 1) as usize] {
|
||||||
|
elem.1.clone()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -161,26 +161,27 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a Hash by insertion position
|
/// Get the hash at pos.
|
||||||
fn get(&self, position: u64, include_data: bool) -> Option<(Hash, Option<T>)> {
|
/// Return None if it has been removed.
|
||||||
|
fn get_hash(&self, pos: u64) -> Option<(Hash)> {
|
||||||
// Check if this position has been pruned in the remove log...
|
// Check if this position has been pruned in the remove log...
|
||||||
if self.rm_log.includes(position) {
|
if self.rm_log.includes(pos) {
|
||||||
return None;
|
None
|
||||||
|
} else {
|
||||||
|
self.get_from_file(pos)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let hash_val = self.get_from_file(position);
|
/// Get the data at pos.
|
||||||
if !include_data {
|
/// Return None if it has been removed or if pos is not a leaf node.
|
||||||
return hash_val.map(|hash| (hash, None));
|
fn get_data(&self, pos: u64) -> Option<(T)> {
|
||||||
|
if self.rm_log.includes(pos) {
|
||||||
|
None
|
||||||
|
} else if !pmmr::is_leaf(pos) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
self.get_data_from_file(pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
// if this is not a leaf then we have no data
|
|
||||||
if !pmmr::is_leaf(position) {
|
|
||||||
return hash_val.map(|hash| (hash, None));
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = self.get_data_from_file(position);
|
|
||||||
|
|
||||||
hash_val.map(|x| (x, data))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> {
|
fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> {
|
||||||
|
|
|
@ -39,7 +39,7 @@ fn pmmr_append() {
|
||||||
|
|
||||||
// check the resulting backend store and the computation of the root
|
// check the resulting backend store and the computation of the root
|
||||||
let node_hash = elems[0].hash_with_index(1);
|
let node_hash = elems[0].hash_with_index(1);
|
||||||
assert_eq!(backend.get(1, false).expect("").0, node_hash);
|
assert_eq!(backend.get_hash(1).unwrap(), node_hash);
|
||||||
|
|
||||||
// 0010012001001230
|
// 0010012001001230
|
||||||
|
|
||||||
|
@ -88,9 +88,9 @@ fn pmmr_compact_leaf_sibling() {
|
||||||
let (pos_1_hash, pos_2_hash, pos_3_hash) = {
|
let (pos_1_hash, pos_2_hash, pos_3_hash) = {
|
||||||
let mut pmmr = PMMR::at(&mut backend, mmr_size);
|
let mut pmmr = PMMR::at(&mut backend, mmr_size);
|
||||||
(
|
(
|
||||||
pmmr.get(1, false).unwrap().0,
|
pmmr.get_hash(1).unwrap(),
|
||||||
pmmr.get(2, false).unwrap().0,
|
pmmr.get_hash(2).unwrap(),
|
||||||
pmmr.get(3, false).unwrap().0,
|
pmmr.get_hash(3).unwrap(),
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -109,11 +109,11 @@ fn pmmr_compact_leaf_sibling() {
|
||||||
let pmmr = PMMR::at(&mut backend, mmr_size);
|
let pmmr = PMMR::at(&mut backend, mmr_size);
|
||||||
|
|
||||||
// check that pos 1 is "removed"
|
// check that pos 1 is "removed"
|
||||||
assert_eq!(pmmr.get(1, false), None);
|
assert_eq!(pmmr.get_hash(1), None);
|
||||||
|
|
||||||
// check that pos 2 and 3 are unchanged
|
// check that pos 2 and 3 are unchanged
|
||||||
assert_eq!(pmmr.get(2, false).unwrap().0, pos_2_hash);
|
assert_eq!(pmmr.get_hash(2).unwrap(), pos_2_hash);
|
||||||
assert_eq!(pmmr.get(3, false).unwrap().0, pos_3_hash);
|
assert_eq!(pmmr.get_hash(3).unwrap(), pos_3_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
// check we can still retrieve the "removed" element at pos 1
|
// check we can still retrieve the "removed" element at pos 1
|
||||||
|
@ -128,11 +128,11 @@ fn pmmr_compact_leaf_sibling() {
|
||||||
let pmmr = PMMR::at(&mut backend, mmr_size);
|
let pmmr = PMMR::at(&mut backend, mmr_size);
|
||||||
|
|
||||||
// check that pos 1 is "removed"
|
// check that pos 1 is "removed"
|
||||||
assert_eq!(pmmr.get(1, false), None);
|
assert_eq!(pmmr.get_hash(1), None);
|
||||||
|
|
||||||
// check that pos 2 and 3 are unchanged
|
// check that pos 2 and 3 are unchanged
|
||||||
assert_eq!(pmmr.get(2, false).unwrap().0, pos_2_hash);
|
assert_eq!(pmmr.get_hash(2).unwrap(), pos_2_hash);
|
||||||
assert_eq!(pmmr.get(3, false).unwrap().0, pos_3_hash);
|
assert_eq!(pmmr.get_hash(3).unwrap(), pos_3_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check we can still retrieve the "removed" hash at pos 1 from the hash file.
|
// Check we can still retrieve the "removed" hash at pos 1 from the hash file.
|
||||||
|
@ -171,9 +171,9 @@ fn pmmr_prune_compact() {
|
||||||
let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
||||||
assert_eq!(root, pmmr.root());
|
assert_eq!(root, pmmr.root());
|
||||||
// check we can still retrieve same element from leaf index 2
|
// check we can still retrieve same element from leaf index 2
|
||||||
assert_eq!(pmmr.get(2, true).unwrap().1.unwrap(), TestElem(2));
|
assert_eq!(pmmr.get_data(2).unwrap(), TestElem(2));
|
||||||
// and the same for leaf index 7
|
// and the same for leaf index 7
|
||||||
assert_eq!(pmmr.get(11, true).unwrap().1.unwrap(), TestElem(7));
|
assert_eq!(pmmr.get_data(11).unwrap(), TestElem(7));
|
||||||
}
|
}
|
||||||
|
|
||||||
// compact
|
// compact
|
||||||
|
@ -183,8 +183,8 @@ fn pmmr_prune_compact() {
|
||||||
{
|
{
|
||||||
let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
||||||
assert_eq!(root, pmmr.root());
|
assert_eq!(root, pmmr.root());
|
||||||
assert_eq!(pmmr.get(2, true).unwrap().1.unwrap(), TestElem(2));
|
assert_eq!(pmmr.get_data(2).unwrap(), TestElem(2));
|
||||||
assert_eq!(pmmr.get(11, true).unwrap().1.unwrap(), TestElem(7));
|
assert_eq!(pmmr.get_data(11).unwrap(), TestElem(7));
|
||||||
}
|
}
|
||||||
|
|
||||||
teardown(data_dir);
|
teardown(data_dir);
|
||||||
|
@ -200,9 +200,9 @@ fn pmmr_reload() {
|
||||||
let mmr_size = load(0, &elems[..], &mut backend);
|
let mmr_size = load(0, &elems[..], &mut backend);
|
||||||
|
|
||||||
// retrieve entries from the hash file for comparison later
|
// retrieve entries from the hash file for comparison later
|
||||||
let (pos_3_hash, _) = backend.get(3, false).unwrap();
|
let pos_3_hash = backend.get_hash(3).unwrap();
|
||||||
let (pos_4_hash, _) = backend.get(4, false).unwrap();
|
let pos_4_hash = backend.get_hash(4).unwrap();
|
||||||
let (pos_5_hash, _) = backend.get(5, false).unwrap();
|
let pos_5_hash = backend.get_hash(5).unwrap();
|
||||||
|
|
||||||
// save the root
|
// save the root
|
||||||
let root = {
|
let root = {
|
||||||
|
@ -257,16 +257,16 @@ fn pmmr_reload() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// pos 1 and pos 2 are both removed (via parent pos 3 in prune list)
|
// pos 1 and pos 2 are both removed (via parent pos 3 in prune list)
|
||||||
assert_eq!(backend.get(1, false), None);
|
assert_eq!(backend.get_hash(1), None);
|
||||||
assert_eq!(backend.get(2, false), None);
|
assert_eq!(backend.get_hash(2), None);
|
||||||
|
|
||||||
// pos 3 is removed (via prune list)
|
// pos 3 is removed (via prune list)
|
||||||
assert_eq!(backend.get(3, false), None);
|
assert_eq!(backend.get_hash(3), None);
|
||||||
|
|
||||||
// pos 4 is removed (via prune list)
|
// pos 4 is removed (via prune list)
|
||||||
assert_eq!(backend.get(4, false), None);
|
assert_eq!(backend.get_hash(4), None);
|
||||||
// pos 5 is removed (via rm_log)
|
// pos 5 is removed (via rm_log)
|
||||||
assert_eq!(backend.get(5, false), None);
|
assert_eq!(backend.get_hash(5), None);
|
||||||
|
|
||||||
// now check contents of the hash file
|
// now check contents of the hash file
|
||||||
// pos 1 and pos 2 are no longer in the hash file
|
// pos 1 and pos 2 are no longer in the hash file
|
||||||
|
@ -383,13 +383,10 @@ fn pmmr_compact_entire_peak() {
|
||||||
let mmr_size = load(0, &elems[0..5], &mut backend);
|
let mmr_size = load(0, &elems[0..5], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
let pos_7 = backend.get(7, true).unwrap();
|
let pos_7_hash = backend.get_hash(7).unwrap();
|
||||||
let pos_7_hash = backend.get_from_file(7).unwrap();
|
|
||||||
assert_eq!(pos_7.0, pos_7_hash);
|
|
||||||
|
|
||||||
let pos_8 = backend.get(8, true).unwrap();
|
let pos_8 = backend.get_data(8).unwrap();
|
||||||
let pos_8_hash = backend.get_from_file(8).unwrap();
|
let pos_8_hash = backend.get_hash(8).unwrap();
|
||||||
assert_eq!(pos_8.0, pos_8_hash);
|
|
||||||
|
|
||||||
// prune all leaves under the peak at pos 7
|
// prune all leaves under the peak at pos 7
|
||||||
{
|
{
|
||||||
|
@ -407,11 +404,12 @@ fn pmmr_compact_entire_peak() {
|
||||||
|
|
||||||
// now check we have pruned up to and including the peak at pos 7
|
// now check we have pruned up to and including the peak at pos 7
|
||||||
// hash still available in underlying hash file
|
// hash still available in underlying hash file
|
||||||
assert_eq!(backend.get(7, false), None);
|
assert_eq!(backend.get_hash(7), None);
|
||||||
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
||||||
|
|
||||||
// now check we still have subsequent hash and data where we expect
|
// now check we still have subsequent hash and data where we expect
|
||||||
assert_eq!(backend.get(8, true), Some(pos_8));
|
assert_eq!(backend.get_hash(8), Some(pos_8_hash));
|
||||||
|
assert_eq!(backend.get_data(8), Some(pos_8));
|
||||||
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
||||||
|
|
||||||
teardown(data_dir);
|
teardown(data_dir);
|
||||||
|
@ -429,25 +427,17 @@ fn pmmr_compact_horizon() {
|
||||||
assert_eq!(backend.data_size().unwrap(), 19);
|
assert_eq!(backend.data_size().unwrap(), 19);
|
||||||
assert_eq!(backend.hash_size().unwrap(), 35);
|
assert_eq!(backend.hash_size().unwrap(), 35);
|
||||||
|
|
||||||
let pos_3 = backend.get(3, false).unwrap();
|
let pos_3_hash = backend.get_hash(3).unwrap();
|
||||||
let pos_3_hash = backend.get_from_file(3).unwrap();
|
|
||||||
assert_eq!(pos_3.0, pos_3_hash);
|
|
||||||
|
|
||||||
let pos_6 = backend.get(6, false).unwrap();
|
let pos_6_hash = backend.get_hash(6).unwrap();
|
||||||
let pos_6_hash = backend.get_from_file(6).unwrap();
|
|
||||||
assert_eq!(pos_6.0, pos_6_hash);
|
|
||||||
|
|
||||||
let pos_7 = backend.get(7, false).unwrap();
|
let pos_7_hash = backend.get_hash(7).unwrap();
|
||||||
let pos_7_hash = backend.get_from_file(7).unwrap();
|
|
||||||
assert_eq!(pos_7.0, pos_7_hash);
|
|
||||||
|
|
||||||
let pos_8 = backend.get(8, true).unwrap();
|
let pos_8 = backend.get_data(8).unwrap();
|
||||||
let pos_8_hash = backend.get_from_file(8).unwrap();
|
let pos_8_hash = backend.get_hash(8).unwrap();
|
||||||
assert_eq!(pos_8.0, pos_8_hash);
|
|
||||||
|
|
||||||
let pos_11 = backend.get(11, true).unwrap();
|
let pos_11 = backend.get_data(11).unwrap();
|
||||||
let pos_11_hash = backend.get_from_file(11).unwrap();
|
let pos_11_hash = backend.get_hash(11).unwrap();
|
||||||
assert_eq!(pos_11.0, pos_11_hash);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
// pruning some choice nodes with an increasing block height
|
// pruning some choice nodes with an increasing block height
|
||||||
|
@ -462,19 +452,21 @@ fn pmmr_compact_horizon() {
|
||||||
|
|
||||||
// check we can read hashes and data correctly after pruning
|
// check we can read hashes and data correctly after pruning
|
||||||
{
|
{
|
||||||
assert_eq!(backend.get(3, false), None);
|
assert_eq!(backend.get_hash(3), None);
|
||||||
assert_eq!(backend.get_from_file(3), Some(pos_3_hash));
|
assert_eq!(backend.get_from_file(3), Some(pos_3_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(6, false), None);
|
assert_eq!(backend.get_hash(6), None);
|
||||||
assert_eq!(backend.get_from_file(6), Some(pos_6_hash));
|
assert_eq!(backend.get_from_file(6), Some(pos_6_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(7, true), None);
|
assert_eq!(backend.get_hash(7), None);
|
||||||
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(8, true), Some(pos_8));
|
assert_eq!(backend.get_hash(8), Some(pos_8_hash));
|
||||||
|
assert_eq!(backend.get_data(8), Some(pos_8));
|
||||||
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(11, true), Some(pos_11));
|
assert_eq!(backend.get_hash(11), Some(pos_11_hash));
|
||||||
|
assert_eq!(backend.get_data(11), Some(pos_11));
|
||||||
assert_eq!(backend.get_from_file(11), Some(pos_11_hash));
|
assert_eq!(backend.get_from_file(11), Some(pos_11_hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -484,16 +476,16 @@ fn pmmr_compact_horizon() {
|
||||||
|
|
||||||
// check we can read a hash by pos correctly after compaction
|
// check we can read a hash by pos correctly after compaction
|
||||||
{
|
{
|
||||||
assert_eq!(backend.get(3, false), None);
|
assert_eq!(backend.get_hash(3), None);
|
||||||
assert_eq!(backend.get_from_file(3), Some(pos_3_hash));
|
assert_eq!(backend.get_from_file(3), Some(pos_3_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(6, false), None);
|
assert_eq!(backend.get_hash(6), None);
|
||||||
assert_eq!(backend.get_from_file(6), Some(pos_6_hash));
|
assert_eq!(backend.get_from_file(6), Some(pos_6_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(7, true), None);
|
assert_eq!(backend.get_hash(7), None);
|
||||||
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(8, true), Some(pos_8));
|
assert_eq!(backend.get_hash(8), Some(pos_8_hash));
|
||||||
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -508,10 +500,10 @@ fn pmmr_compact_horizon() {
|
||||||
assert_eq!(backend.hash_size().unwrap(), 33);
|
assert_eq!(backend.hash_size().unwrap(), 33);
|
||||||
|
|
||||||
// check we can read a hash by pos correctly from recreated backend
|
// check we can read a hash by pos correctly from recreated backend
|
||||||
assert_eq!(backend.get(7, true), None);
|
assert_eq!(backend.get_hash(7), None);
|
||||||
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(8, true), Some(pos_8));
|
assert_eq!(backend.get_hash(8), Some(pos_8_hash));
|
||||||
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
assert_eq!(backend.get_from_file(8), Some(pos_8_hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -542,10 +534,11 @@ fn pmmr_compact_horizon() {
|
||||||
assert_eq!(backend.hash_size().unwrap(), 29);
|
assert_eq!(backend.hash_size().unwrap(), 29);
|
||||||
|
|
||||||
// check we can read a hash by pos correctly from recreated backend
|
// check we can read a hash by pos correctly from recreated backend
|
||||||
assert_eq!(backend.get(7, true), None);
|
assert_eq!(backend.get_hash(7), None);
|
||||||
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
assert_eq!(backend.get_from_file(7), Some(pos_7_hash));
|
||||||
|
|
||||||
assert_eq!(backend.get(11, true), Some(pos_11));
|
assert_eq!(backend.get_hash(11), Some(pos_11_hash));
|
||||||
|
assert_eq!(backend.get_data(11), Some(pos_11));
|
||||||
assert_eq!(backend.get_from_file(11), Some(pos_11_hash));
|
assert_eq!(backend.get_from_file(11), Some(pos_11_hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue