Fix kernel history verification (#1218)

Only rewind kernels to avoid requiring proper bitmap extraction. Also avoids maintaining bitmap data for kernels by introducing a "prunable" flag on PMMR backend.
This commit is contained in:
Ignotus Peverell 2018-07-05 03:31:08 +01:00 committed by GitHub
parent ece2fcc56a
commit 26d41fefd0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 66 additions and 126 deletions

View file

@ -432,7 +432,7 @@ impl Chain {
// ensure the view is consistent. // ensure the view is consistent.
txhashset::extending_readonly(&mut txhashset, |extension| { txhashset::extending_readonly(&mut txhashset, |extension| {
// TODO - is this rewind guaranteed to be redundant now? // TODO - is this rewind guaranteed to be redundant now?
extension.rewind(&header, &header, true, true, true)?; extension.rewind(&header, &header)?;
extension.validate(&header, skip_rproofs, &NoStatus)?; extension.validate(&header, skip_rproofs, &NoStatus)?;
Ok(()) Ok(())
}) })
@ -502,7 +502,7 @@ impl Chain {
{ {
let mut txhashset = self.txhashset.write().unwrap(); let mut txhashset = self.txhashset.write().unwrap();
txhashset::extending_readonly(&mut txhashset, |extension| { txhashset::extending_readonly(&mut txhashset, |extension| {
extension.rewind(&header, &head_header, true, true, true)?; extension.rewind(&header, &head_header)?;
extension.snapshot(&header)?; extension.snapshot(&header)?;
Ok(()) Ok(())
})?; })?;
@ -530,7 +530,7 @@ impl Chain {
where where
T: TxHashsetWriteStatus, T: TxHashsetWriteStatus,
{ {
self.txhashset_lock.lock().unwrap(); let _ = self.txhashset_lock.lock().unwrap();
status.on_setup(); status.on_setup();
let head = self.head().unwrap(); let head = self.head().unwrap();
let header_head = self.get_header_head().unwrap(); let header_head = self.get_header_head().unwrap();
@ -550,7 +550,7 @@ impl Chain {
txhashset::extending(&mut txhashset, &mut batch, |extension| { txhashset::extending(&mut txhashset, &mut batch, |extension| {
// TODO do we need to rewind here? We have no blocks to rewind // TODO do we need to rewind here? We have no blocks to rewind
// (and we need them for the pos to unremove) // (and we need them for the pos to unremove)
extension.rewind(&header, &header, true, true, true)?; extension.rewind(&header, &header)?;
extension.validate(&header, false, status)?; extension.validate(&header, false, status)?;
extension.rebuild_index()?; extension.rebuild_index()?;
Ok(()) Ok(())
@ -816,7 +816,7 @@ fn setup_head(
let header = store.get_block_header(&head.last_block_h)?; let header = store.get_block_header(&head.last_block_h)?;
let res = txhashset::extending(txhashset, &mut batch, |extension| { let res = txhashset::extending(txhashset, &mut batch, |extension| {
extension.rewind(&header, &head_header, true, true, true)?; extension.rewind(&header, &head_header)?;
extension.validate_roots(&header)?; extension.validate_roots(&header)?;
debug!( debug!(
LOGGER, LOGGER,

View file

@ -500,7 +500,7 @@ pub fn rewind_and_apply_fork(
); );
// rewind the sum trees up to the forking block // rewind the sum trees up to the forking block
ext.rewind(&forked_header, &head_header, true, true, true)?; ext.rewind(&forked_header, &head_header)?;
trace!( trace!(
LOGGER, LOGGER,

View file

@ -65,11 +65,12 @@ where
fn new( fn new(
root_dir: String, root_dir: String,
file_name: &str, file_name: &str,
prunable: bool,
header: Option<&BlockHeader>, header: Option<&BlockHeader>,
) -> Result<PMMRHandle<T>, Error> { ) -> Result<PMMRHandle<T>, Error> {
let path = Path::new(&root_dir).join(TXHASHSET_SUBDIR).join(file_name); let path = Path::new(&root_dir).join(TXHASHSET_SUBDIR).join(file_name);
fs::create_dir_all(path.clone())?; fs::create_dir_all(path.clone())?;
let be = PMMRBackend::new(path.to_str().unwrap().to_string(), header)?; let be = PMMRBackend::new(path.to_str().unwrap().to_string(), prunable, header)?;
let sz = be.unpruned_size()?; let sz = be.unpruned_size()?;
Ok(PMMRHandle { Ok(PMMRHandle {
backend: be, backend: be,
@ -120,9 +121,9 @@ impl TxHashSet {
fs::create_dir_all(kernel_file_path.clone())?; fs::create_dir_all(kernel_file_path.clone())?;
Ok(TxHashSet { Ok(TxHashSet {
output_pmmr_h: PMMRHandle::new(root_dir.clone(), OUTPUT_SUBDIR, header)?, output_pmmr_h: PMMRHandle::new(root_dir.clone(), OUTPUT_SUBDIR, true, header)?,
rproof_pmmr_h: PMMRHandle::new(root_dir.clone(), RANGE_PROOF_SUBDIR, header)?, rproof_pmmr_h: PMMRHandle::new(root_dir.clone(), RANGE_PROOF_SUBDIR, true, header)?,
kernel_pmmr_h: PMMRHandle::new(root_dir.clone(), KERNEL_SUBDIR, None)?, kernel_pmmr_h: PMMRHandle::new(root_dir.clone(), KERNEL_SUBDIR, false, None)?,
commit_index, commit_index,
}) })
} }
@ -461,9 +462,6 @@ impl<'a> Extension<'a> {
kernel_pos, kernel_pos,
&rewind_add_pos, &rewind_add_pos,
rewind_rm_pos, rewind_rm_pos,
true,
true,
true,
)?; )?;
Ok(()) Ok(())
} }
@ -725,7 +723,7 @@ impl<'a> Extension<'a> {
// rewind to the specified block for a consistent view // rewind to the specified block for a consistent view
let head_header = self.commit_index.head_header()?; let head_header = self.commit_index.head_header()?;
self.rewind(block_header, &head_header, true, true, true)?; self.rewind(block_header, &head_header)?;
// then calculate the Merkle Proof based on the known pos // then calculate the Merkle Proof based on the known pos
let pos = self.batch.get_output_pos(&output.commit)?; let pos = self.batch.get_output_pos(&output.commit)?;
@ -757,9 +755,6 @@ impl<'a> Extension<'a> {
&mut self, &mut self,
block_header: &BlockHeader, block_header: &BlockHeader,
head_header: &BlockHeader, head_header: &BlockHeader,
rewind_utxo: bool,
rewind_kernel: bool,
rewind_rangeproof: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
trace!( trace!(
LOGGER, LOGGER,
@ -787,12 +782,7 @@ impl<'a> Extension<'a> {
block_header.kernel_mmr_size, block_header.kernel_mmr_size,
&rewind_add_pos, &rewind_add_pos,
&rewind_rm_pos.1, &rewind_rm_pos.1,
rewind_utxo, )
rewind_kernel,
rewind_rangeproof,
)?;
Ok(())
} }
/// Rewinds the MMRs to the provided positions, given the output and /// Rewinds the MMRs to the provided positions, given the output and
@ -803,9 +793,6 @@ impl<'a> Extension<'a> {
kernel_pos: u64, kernel_pos: u64,
rewind_add_pos: &Bitmap, rewind_add_pos: &Bitmap,
rewind_rm_pos: &Bitmap, rewind_rm_pos: &Bitmap,
rewind_utxo: bool,
rewind_kernel: bool,
rewind_rproof: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
trace!( trace!(
LOGGER, LOGGER,
@ -819,22 +806,15 @@ impl<'a> Extension<'a> {
// been sync'd to disk. // been sync'd to disk.
self.new_output_commits.retain(|_, &mut v| v <= output_pos); self.new_output_commits.retain(|_, &mut v| v <= output_pos);
if rewind_utxo { self.output_pmmr
self.output_pmmr .rewind(output_pos, rewind_add_pos, rewind_rm_pos)
.rewind(output_pos, rewind_add_pos, rewind_rm_pos) .map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&ErrorKind::TxHashSetErr)?; self.rproof_pmmr
} .rewind(output_pos, rewind_add_pos, rewind_rm_pos)
if rewind_rproof { .map_err(&ErrorKind::TxHashSetErr)?;
self.rproof_pmmr self.kernel_pmmr
.rewind(output_pos, rewind_add_pos, rewind_rm_pos) .rewind(kernel_pos, rewind_add_pos, rewind_rm_pos)
.map_err(&ErrorKind::TxHashSetErr)?; .map_err(&ErrorKind::TxHashSetErr)?;
}
if rewind_kernel {
self.kernel_pmmr
.rewind(kernel_pos, rewind_add_pos, rewind_rm_pos)
.map_err(&ErrorKind::TxHashSetErr)?;
}
Ok(()) Ok(())
} }
@ -1068,14 +1048,17 @@ impl<'a> Extension<'a> {
// fast sync where a reorg past the horizon could allow a whole rewrite of // fast sync where a reorg past the horizon could allow a whole rewrite of
// the kernel set. // the kernel set.
let mut current = header.clone(); let mut current = header.clone();
let empty_bitmap = Bitmap::create();
loop { loop {
current = self.commit_index.get_block_header(&current.previous)?; current = self.commit_index.get_block_header(&current.previous)?;
if current.height == 0 { if current.height == 0 {
break; break;
} }
let head_header = self.commit_index.head_header()?; // rewinding kernels only further and further back
// rewinding further and further back self.kernel_pmmr
self.rewind(&current, &head_header, false, true, false)?; .rewind(current.kernel_mmr_size, &empty_bitmap, &empty_bitmap)
.map_err(&ErrorKind::TxHashSetErr)?;
if self.kernel_pmmr.root() != current.kernel_root { if self.kernel_pmmr.root() != current.kernel_root {
return Err(ErrorKind::InvalidTxHashSet(format!( return Err(ErrorKind::InvalidTxHashSet(format!(
"Kernel root at {} does not match", "Kernel root at {} does not match",
@ -1085,6 +1068,7 @@ impl<'a> Extension<'a> {
} }
Ok(()) Ok(())
} }
} }
/// Packages the txhashset data files into a zip and returns a Read to the /// Packages the txhashset data files into a zip and returns a Read to the

View file

@ -57,6 +57,7 @@ where
T: PMMRable, T: PMMRable,
{ {
data_dir: String, data_dir: String,
prunable: bool,
hash_file: AppendOnlyFile, hash_file: AppendOnlyFile,
data_file: AppendOnlyFile, data_file: AppendOnlyFile,
leaf_set: LeafSet, leaf_set: LeafSet,
@ -76,8 +77,10 @@ where
if let Some(elem) = d.1 { if let Some(elem) = d.1 {
self.data_file.append(&mut ser::ser_vec(&elem).unwrap()); self.data_file.append(&mut ser::ser_vec(&elem).unwrap());
// Add the new position to our leaf_set. if self.prunable {
self.leaf_set.add(position); // Add the new position to our leaf_set.
self.leaf_set.add(position);
}
} }
} }
Ok(()) Ok(())
@ -137,7 +140,7 @@ where
/// Return None if pos is a leaf and it has been removed (or pruned or /// Return None if pos is a leaf and it has been removed (or pruned or
/// compacted). /// compacted).
fn get_hash(&self, pos: u64) -> Option<(Hash)> { fn get_hash(&self, pos: u64) -> Option<(Hash)> {
if pmmr::is_leaf(pos) && !self.leaf_set.includes(pos) { if self.prunable && pmmr::is_leaf(pos) && !self.leaf_set.includes(pos) {
return None; return None;
} }
self.get_from_file(pos) self.get_from_file(pos)
@ -149,7 +152,7 @@ where
if !pmmr::is_leaf(pos) { if !pmmr::is_leaf(pos) {
return None; return None;
} }
if !self.leaf_set.includes(pos) { if self.prunable && !self.leaf_set.includes(pos) {
return None; return None;
} }
self.get_data_from_file(pos) self.get_data_from_file(pos)
@ -163,7 +166,9 @@ where
rewind_rm_pos: &Bitmap, rewind_rm_pos: &Bitmap,
) -> Result<(), String> { ) -> Result<(), String> {
// First rewind the leaf_set with the necessary added and removed positions. // First rewind the leaf_set with the necessary added and removed positions.
self.leaf_set.rewind(rewind_add_pos, rewind_rm_pos); if self.prunable {
self.leaf_set.rewind(rewind_add_pos, rewind_rm_pos);
}
// Rewind the hash file accounting for pruned/compacted pos // Rewind the hash file accounting for pruned/compacted pos
let shift = self.prune_list.get_shift(position); let shift = self.prune_list.get_shift(position);
@ -183,6 +188,7 @@ where
/// Remove by insertion position. /// Remove by insertion position.
fn remove(&mut self, pos: u64) -> Result<(), String> { fn remove(&mut self, pos: u64) -> Result<(), String> {
assert!(self.prunable, "Remove on non-prunable MMR");
self.leaf_set.remove(pos); self.leaf_set.remove(pos);
Ok(()) Ok(())
} }
@ -218,60 +224,30 @@ where
{ {
/// Instantiates a new PMMR backend. /// Instantiates a new PMMR backend.
/// Use the provided dir to store its files. /// Use the provided dir to store its files.
pub fn new(data_dir: String, header: Option<&BlockHeader>) -> io::Result<PMMRBackend<T>> { pub fn new(
data_dir: String,
prunable: bool,
header: Option<&BlockHeader>
) -> io::Result<PMMRBackend<T>> {
let hash_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_HASH_FILE))?; let hash_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
let data_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_DATA_FILE))?; let data_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_DATA_FILE))?;
let leaf_set_path = format!("{}/{}", data_dir, PMMR_LEAF_FILE); let leaf_set_path = format!("{}/{}", data_dir, PMMR_LEAF_FILE);
// If we received a rewound "snapshot" leaf_set file // If we received a rewound "snapshot" leaf_set file move it into
// move it into place so we use it. // place so we use it.
if let Some(header) = header { if let Some(header) = header {
let leaf_snapshot_path = format!("{}/{}.{}", data_dir, PMMR_LEAF_FILE, header.hash()); let leaf_snapshot_path = format!("{}/{}.{}", data_dir, PMMR_LEAF_FILE, header.hash());
LeafSet::copy_snapshot(leaf_set_path.clone(), leaf_snapshot_path.clone())?; LeafSet::copy_snapshot(leaf_set_path.clone(), leaf_snapshot_path.clone())?;
} }
// If we need to migrate legacy prune_list do it here before we start. let prune_list = PruneList::open(format!("{}/{}", data_dir, PMMR_PRUN_FILE))?;
// Do *not* migrate if we already have a non-empty prune_list. let leaf_set = LeafSet::open(leaf_set_path.clone())?;
let mut prune_list = PruneList::open(format!("{}/{}", data_dir, PMMR_PRUN_FILE))?;
let legacy_prune_list_path = format!("{}/{}", data_dir, LEGACY_PRUNED_FILE);
if prune_list.is_empty() && Path::new(&legacy_prune_list_path).exists() {
debug!(LOGGER, "pmmr: migrating prune_list -> bitmap prune_list");
let legacy_prune_pos = read_ordered_vec(legacy_prune_list_path, 8)?;
for x in legacy_prune_pos {
prune_list.add(x);
}
prune_list.flush()?;
}
// If we need to migrate legacy rm_log to a new leaf_set do it here before we
// start. Do *not* migrate if we already have a non-empty leaf_set.
let mut leaf_set = LeafSet::open(leaf_set_path.clone())?;
let legacy_rm_log_path = format!("{}/{}", data_dir, LEGACY_RM_LOG_FILE);
if leaf_set.is_empty() && Path::new(&legacy_rm_log_path).exists() {
debug!(LOGGER, "pmmr: migrating rm_log -> leaf_set");
let mut rm_log = RemoveLog::open(legacy_rm_log_path)?;
if let Some(header) = header {
// Rewind the rm_log back to the height of the header we care about.
debug!(
LOGGER,
"pmmr: first rewinding rm_log to height {}", header.height
);
rm_log.rewind(header.height as u32)?;
}
let last_pos = {
let total_shift = prune_list.get_total_shift();
let record_len = 32;
let sz = hash_file.size()?;
sz / record_len + total_shift
};
migrate_rm_log(&mut leaf_set, &rm_log, &prune_list, last_pos)?;
}
Ok(PMMRBackend { Ok(PMMRBackend {
data_dir, data_dir,
prunable,
hash_file, hash_file,
data_file, data_file,
leaf_set, leaf_set,
@ -369,6 +345,8 @@ where
where where
P: Fn(&[u8]), P: Fn(&[u8]),
{ {
assert!(self.prunable, "Trying to compact a non-prunable PMMR");
// Paths for tmp hash and data files. // Paths for tmp hash and data files.
let tmp_prune_file_hash = format!("{}/{}.hashprune", self.data_dir, PMMR_HASH_FILE); let tmp_prune_file_hash = format!("{}/{}.hashprune", self.data_dir, PMMR_HASH_FILE);
let tmp_prune_file_data = format!("{}/{}.dataprune", self.data_dir, PMMR_DATA_FILE); let tmp_prune_file_data = format!("{}/{}.dataprune", self.data_dir, PMMR_DATA_FILE);
@ -500,25 +478,3 @@ fn removed_excl_roots(removed: Bitmap) -> Bitmap {
.collect() .collect()
} }
fn migrate_rm_log(
leaf_set: &mut LeafSet,
rm_log: &RemoveLog,
prune_list: &PruneList,
last_pos: u64,
) -> io::Result<()> {
info!(
LOGGER,
"Migrating rm_log -> leaf_set. Might take a little while... {} pos", last_pos
);
// check every leaf
// if not pruned and not removed, add it to the leaf_set
for x in 1..=last_pos {
if pmmr::is_leaf(x) && !prune_list.is_pruned(x) && !rm_log.includes(x) {
leaf_set.add(x);
}
}
leaf_set.flush()?;
Ok(())
}

View file

@ -29,7 +29,7 @@ use store::types::prune_noop;
#[test] #[test]
fn pmmr_append() { fn pmmr_append() {
let (data_dir, elems) = setup("append"); let (data_dir, elems) = setup("append");
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
// adding first set of 4 elements and sync // adding first set of 4 elements and sync
let mut mmr_size = load(0, &elems[0..4], &mut backend); let mut mmr_size = load(0, &elems[0..4], &mut backend);
@ -79,7 +79,7 @@ fn pmmr_compact_leaf_sibling() {
let (data_dir, elems) = setup("compact_leaf_sibling"); let (data_dir, elems) = setup("compact_leaf_sibling");
// setup the mmr store with all elements // setup the mmr store with all elements
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
let mmr_size = load(0, &elems[..], &mut backend); let mmr_size = load(0, &elems[..], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();
@ -151,7 +151,7 @@ fn pmmr_prune_compact() {
let (data_dir, elems) = setup("prune_compact"); let (data_dir, elems) = setup("prune_compact");
// setup the mmr store with all elements // setup the mmr store with all elements
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
let mmr_size = load(0, &elems[..], &mut backend); let mmr_size = load(0, &elems[..], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();
@ -201,7 +201,7 @@ fn pmmr_reload() {
let (data_dir, elems) = setup("reload"); let (data_dir, elems) = setup("reload");
// set everything up with an initial backend // set everything up with an initial backend
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
let mmr_size = load(0, &elems[..], &mut backend); let mmr_size = load(0, &elems[..], &mut backend);
@ -259,7 +259,7 @@ fn pmmr_reload() {
// create a new backend referencing the data files // create a new backend referencing the data files
// and check everything still works as expected // and check everything still works as expected
{ {
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
assert_eq!(backend.unpruned_size().unwrap(), mmr_size); assert_eq!(backend.unpruned_size().unwrap(), mmr_size);
{ {
let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size); let pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
@ -297,7 +297,7 @@ fn pmmr_reload() {
#[test] #[test]
fn pmmr_rewind() { fn pmmr_rewind() {
let (data_dir, elems) = setup("rewind"); let (data_dir, elems) = setup("rewind");
let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), true, None).unwrap();
// adding elements and keeping the corresponding root // adding elements and keeping the corresponding root
let mut mmr_size = load(0, &elems[0..4], &mut backend); let mut mmr_size = load(0, &elems[0..4], &mut backend);
@ -426,7 +426,7 @@ fn pmmr_rewind() {
#[test] #[test]
fn pmmr_compact_single_leaves() { fn pmmr_compact_single_leaves() {
let (data_dir, elems) = setup("compact_single_leaves"); let (data_dir, elems) = setup("compact_single_leaves");
let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), true, None).unwrap();
let mmr_size = load(0, &elems[0..5], &mut backend); let mmr_size = load(0, &elems[0..5], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();
@ -462,7 +462,7 @@ fn pmmr_compact_single_leaves() {
#[test] #[test]
fn pmmr_compact_entire_peak() { fn pmmr_compact_entire_peak() {
let (data_dir, elems) = setup("compact_entire_peak"); let (data_dir, elems) = setup("compact_entire_peak");
let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), true, None).unwrap();
let mmr_size = load(0, &elems[0..5], &mut backend); let mmr_size = load(0, &elems[0..5], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();
@ -503,7 +503,7 @@ fn pmmr_compact_entire_peak() {
#[test] #[test]
fn pmmr_compact_horizon() { fn pmmr_compact_horizon() {
let (data_dir, elems) = setup("compact_horizon"); let (data_dir, elems) = setup("compact_horizon");
let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.clone(), true, None).unwrap();
let mmr_size = load(0, &elems[..], &mut backend); let mmr_size = load(0, &elems[..], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();
@ -586,7 +586,7 @@ fn pmmr_compact_horizon() {
{ {
// recreate backend // recreate backend
let backend = let backend =
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), None).unwrap(); store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, None).unwrap();
assert_eq!(backend.data_size().unwrap(), 19); assert_eq!(backend.data_size().unwrap(), 19);
assert_eq!(backend.hash_size().unwrap(), 35); assert_eq!(backend.hash_size().unwrap(), 35);
@ -601,7 +601,7 @@ fn pmmr_compact_horizon() {
{ {
let mut backend = let mut backend =
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), None).unwrap(); store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, None).unwrap();
{ {
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size); let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut backend, mmr_size);
@ -620,7 +620,7 @@ fn pmmr_compact_horizon() {
{ {
// recreate backend // recreate backend
let backend = let backend =
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), None).unwrap(); store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, None).unwrap();
// 0010012001001230 // 0010012001001230
@ -646,7 +646,7 @@ fn compact_twice() {
let (data_dir, elems) = setup("compact_twice"); let (data_dir, elems) = setup("compact_twice");
// setup the mmr store with all elements // setup the mmr store with all elements
let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), None).unwrap(); let mut backend = store::pmmr::PMMRBackend::new(data_dir.to_string(), true, None).unwrap();
let mmr_size = load(0, &elems[..], &mut backend); let mmr_size = load(0, &elems[..], &mut backend);
backend.sync().unwrap(); backend.sync().unwrap();