mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
support for v2 txhashset (but only using v1 in mainnent and floonet) (#3051)
This commit is contained in:
parent
1c072f535c
commit
973a03c826
7 changed files with 211 additions and 134 deletions
|
@ -20,7 +20,6 @@ use crate::core::core::merkle_proof::MerkleProof;
|
||||||
use crate::core::core::verifier_cache::VerifierCache;
|
use crate::core::core::verifier_cache::VerifierCache;
|
||||||
use crate::core::core::{
|
use crate::core::core::{
|
||||||
Block, BlockHeader, BlockSums, Committed, Output, OutputIdentifier, Transaction, TxKernel,
|
Block, BlockHeader, BlockSums, Committed, Output, OutputIdentifier, Transaction, TxKernel,
|
||||||
TxKernelEntry,
|
|
||||||
};
|
};
|
||||||
use crate::core::global;
|
use crate::core::global;
|
||||||
use crate::core::pow;
|
use crate::core::pow;
|
||||||
|
@ -176,9 +175,24 @@ impl Chain {
|
||||||
// open the txhashset, creating a new one if necessary
|
// open the txhashset, creating a new one if necessary
|
||||||
let mut txhashset = txhashset::TxHashSet::open(db_root.clone(), store.clone(), None)?;
|
let mut txhashset = txhashset::TxHashSet::open(db_root.clone(), store.clone(), None)?;
|
||||||
|
|
||||||
let mut header_pmmr =
|
let mut header_pmmr = PMMRHandle::new(
|
||||||
PMMRHandle::new(&db_root, "header", "header_head", false, true, None)?;
|
&db_root,
|
||||||
let mut sync_pmmr = PMMRHandle::new(&db_root, "header", "sync_head", false, true, None)?;
|
"header",
|
||||||
|
"header_head",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
let mut sync_pmmr = PMMRHandle::new(
|
||||||
|
&db_root,
|
||||||
|
"header",
|
||||||
|
"sync_head",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
|
||||||
setup_head(
|
setup_head(
|
||||||
&genesis,
|
&genesis,
|
||||||
|
@ -661,7 +675,7 @@ impl Chain {
|
||||||
pub fn kernel_data_write(&self, reader: &mut dyn Read) -> Result<(), Error> {
|
pub fn kernel_data_write(&self, reader: &mut dyn Read) -> Result<(), Error> {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
let mut stream = StreamingReader::new(reader, ProtocolVersion::local());
|
let mut stream = StreamingReader::new(reader, ProtocolVersion::local());
|
||||||
while let Ok(_kernel) = TxKernelEntry::read(&mut stream) {
|
while let Ok(_kernel) = TxKernel::read(&mut stream) {
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1146,7 +1160,7 @@ impl Chain {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for kernels
|
/// as above, for kernels
|
||||||
pub fn get_last_n_kernel(&self, distance: u64) -> Vec<(Hash, TxKernelEntry)> {
|
pub fn get_last_n_kernel(&self, distance: u64) -> Vec<(Hash, TxKernel)> {
|
||||||
self.txhashset.read().last_n_kernel(distance)
|
self.txhashset.read().last_n_kernel(distance)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,10 +19,8 @@ use crate::core::core::committed::Committed;
|
||||||
use crate::core::core::hash::{Hash, Hashed};
|
use crate::core::core::hash::{Hash, Hashed};
|
||||||
use crate::core::core::merkle_proof::MerkleProof;
|
use crate::core::core::merkle_proof::MerkleProof;
|
||||||
use crate::core::core::pmmr::{self, Backend, ReadonlyPMMR, RewindablePMMR, PMMR};
|
use crate::core::core::pmmr::{self, Backend, ReadonlyPMMR, RewindablePMMR, PMMR};
|
||||||
use crate::core::core::{
|
use crate::core::core::{Block, BlockHeader, Input, Output, OutputIdentifier, TxKernel};
|
||||||
Block, BlockHeader, Input, Output, OutputIdentifier, TxKernel, TxKernelEntry,
|
use crate::core::ser::{PMMRIndexHashable, PMMRable, ProtocolVersion};
|
||||||
};
|
|
||||||
use crate::core::ser::{PMMRIndexHashable, PMMRable};
|
|
||||||
use crate::error::{Error, ErrorKind};
|
use crate::error::{Error, ErrorKind};
|
||||||
use crate::store::{Batch, ChainStore};
|
use crate::store::{Batch, ChainStore};
|
||||||
use crate::txhashset::{RewindableKernelView, UTXOView};
|
use crate::txhashset::{RewindableKernelView, UTXOView};
|
||||||
|
@ -62,6 +60,7 @@ impl<T: PMMRable> PMMRHandle<T> {
|
||||||
file_name: &str,
|
file_name: &str,
|
||||||
prunable: bool,
|
prunable: bool,
|
||||||
fixed_size: bool,
|
fixed_size: bool,
|
||||||
|
version: ProtocolVersion,
|
||||||
header: Option<&BlockHeader>,
|
header: Option<&BlockHeader>,
|
||||||
) -> Result<PMMRHandle<T>, Error> {
|
) -> Result<PMMRHandle<T>, Error> {
|
||||||
let path = Path::new(root_dir).join(sub_dir).join(file_name);
|
let path = Path::new(root_dir).join(sub_dir).join(file_name);
|
||||||
|
@ -69,7 +68,8 @@ impl<T: PMMRable> PMMRHandle<T> {
|
||||||
let path_str = path.to_str().ok_or(Error::from(ErrorKind::Other(
|
let path_str = path.to_str().ok_or(Error::from(ErrorKind::Other(
|
||||||
"invalid file path".to_owned(),
|
"invalid file path".to_owned(),
|
||||||
)))?;
|
)))?;
|
||||||
let backend = PMMRBackend::new(path_str.to_string(), prunable, fixed_size, header)?;
|
let backend =
|
||||||
|
PMMRBackend::new(path_str.to_string(), prunable, fixed_size, version, header)?;
|
||||||
let last_pos = backend.unpruned_size();
|
let last_pos = backend.unpruned_size();
|
||||||
Ok(PMMRHandle { backend, last_pos })
|
Ok(PMMRHandle { backend, last_pos })
|
||||||
}
|
}
|
||||||
|
@ -113,33 +113,78 @@ impl TxHashSet {
|
||||||
commit_index: Arc<ChainStore>,
|
commit_index: Arc<ChainStore>,
|
||||||
header: Option<&BlockHeader>,
|
header: Option<&BlockHeader>,
|
||||||
) -> Result<TxHashSet, Error> {
|
) -> Result<TxHashSet, Error> {
|
||||||
Ok(TxHashSet {
|
let output_pmmr_h = PMMRHandle::new(
|
||||||
output_pmmr_h: PMMRHandle::new(
|
|
||||||
&root_dir,
|
&root_dir,
|
||||||
TXHASHSET_SUBDIR,
|
TXHASHSET_SUBDIR,
|
||||||
OUTPUT_SUBDIR,
|
OUTPUT_SUBDIR,
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
|
ProtocolVersion(1),
|
||||||
header,
|
header,
|
||||||
)?,
|
)?;
|
||||||
rproof_pmmr_h: PMMRHandle::new(
|
|
||||||
|
let rproof_pmmr_h = PMMRHandle::new(
|
||||||
&root_dir,
|
&root_dir,
|
||||||
TXHASHSET_SUBDIR,
|
TXHASHSET_SUBDIR,
|
||||||
RANGE_PROOF_SUBDIR,
|
RANGE_PROOF_SUBDIR,
|
||||||
true,
|
true,
|
||||||
true,
|
true,
|
||||||
|
ProtocolVersion(1),
|
||||||
header,
|
header,
|
||||||
)?,
|
)?;
|
||||||
kernel_pmmr_h: PMMRHandle::new(
|
|
||||||
|
let mut maybe_kernel_handle: Option<PMMRHandle<TxKernel>> = None;
|
||||||
|
let versions = vec![ProtocolVersion(2), ProtocolVersion(1)];
|
||||||
|
for version in versions {
|
||||||
|
let handle = PMMRHandle::new(
|
||||||
&root_dir,
|
&root_dir,
|
||||||
TXHASHSET_SUBDIR,
|
TXHASHSET_SUBDIR,
|
||||||
KERNEL_SUBDIR,
|
KERNEL_SUBDIR,
|
||||||
false, // not prunable
|
false, // not prunable
|
||||||
false, // variable size kernel data file
|
false, // variable size kernel data file
|
||||||
|
version,
|
||||||
None,
|
None,
|
||||||
)?,
|
)?;
|
||||||
|
if handle.last_pos == 0 {
|
||||||
|
debug!(
|
||||||
|
"attempting to open (empty) kernel PMMR using {:?} - SUCCESS",
|
||||||
|
version
|
||||||
|
);
|
||||||
|
maybe_kernel_handle = Some(handle);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let kernel: Option<TxKernel> = ReadonlyPMMR::at(&handle.backend, 1).get_data(1);
|
||||||
|
if let Some(kernel) = kernel {
|
||||||
|
if kernel.verify().is_ok() {
|
||||||
|
debug!(
|
||||||
|
"attempting to open kernel PMMR using {:?} - SUCCESS",
|
||||||
|
version
|
||||||
|
);
|
||||||
|
maybe_kernel_handle = Some(handle);
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
debug!(
|
||||||
|
"attempting to open kernel PMMR using {:?} - FAIL (verify failed)",
|
||||||
|
version
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(
|
||||||
|
"attempting to open kernel PMMR using {:?} - FAIL (read failed)",
|
||||||
|
version
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(kernel_pmmr_h) = maybe_kernel_handle {
|
||||||
|
Ok(TxHashSet {
|
||||||
|
output_pmmr_h,
|
||||||
|
rproof_pmmr_h,
|
||||||
|
kernel_pmmr_h,
|
||||||
commit_index,
|
commit_index,
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
Err(ErrorKind::TxHashSetErr(format!("failed to open kernel PMMR")).into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Close all backend file handles
|
/// Close all backend file handles
|
||||||
|
@ -192,7 +237,7 @@ impl TxHashSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// as above, for kernels
|
/// as above, for kernels
|
||||||
pub fn last_n_kernel(&self, distance: u64) -> Vec<(Hash, TxKernelEntry)> {
|
pub fn last_n_kernel(&self, distance: u64) -> Vec<(Hash, TxKernel)> {
|
||||||
ReadonlyPMMR::at(&self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos)
|
ReadonlyPMMR::at(&self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos)
|
||||||
.get_last_n_insertions(distance)
|
.get_last_n_insertions(distance)
|
||||||
}
|
}
|
||||||
|
@ -247,9 +292,9 @@ impl TxHashSet {
|
||||||
let mut index = max_index + 1;
|
let mut index = max_index + 1;
|
||||||
while index > min_index {
|
while index > min_index {
|
||||||
index -= 1;
|
index -= 1;
|
||||||
if let Some(t) = pmmr.get_data(index) {
|
if let Some(kernel) = pmmr.get_data(index) {
|
||||||
if &t.kernel.excess == excess {
|
if &kernel.excess == excess {
|
||||||
return Some((t.kernel, index));
|
return Some((kernel, index));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,8 +303,6 @@ impl TxHashSet {
|
||||||
|
|
||||||
/// Get MMR roots.
|
/// Get MMR roots.
|
||||||
pub fn roots(&self) -> TxHashSetRoots {
|
pub fn roots(&self) -> TxHashSetRoots {
|
||||||
// let header_pmmr =
|
|
||||||
// ReadonlyPMMR::at(&self.header_pmmr_h.backend, self.header_pmmr_h.last_pos);
|
|
||||||
let output_pmmr =
|
let output_pmmr =
|
||||||
ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
|
||||||
let rproof_pmmr =
|
let rproof_pmmr =
|
||||||
|
@ -268,7 +311,6 @@ impl TxHashSet {
|
||||||
ReadonlyPMMR::at(&self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos);
|
ReadonlyPMMR::at(&self.kernel_pmmr_h.backend, self.kernel_pmmr_h.last_pos);
|
||||||
|
|
||||||
TxHashSetRoots {
|
TxHashSetRoots {
|
||||||
// header_root: header_pmmr.root(),
|
|
||||||
output_root: output_pmmr.root(),
|
output_root: output_pmmr.root(),
|
||||||
rproof_root: rproof_pmmr.root(),
|
rproof_root: rproof_pmmr.root(),
|
||||||
kernel_root: kernel_pmmr.root(),
|
kernel_root: kernel_pmmr.root(),
|
||||||
|
@ -1192,8 +1234,7 @@ impl<'a> Extension<'a> {
|
||||||
.kernel_pmmr
|
.kernel_pmmr
|
||||||
.get_data(n)
|
.get_data(n)
|
||||||
.ok_or::<Error>(ErrorKind::TxKernelNotFound.into())?;
|
.ok_or::<Error>(ErrorKind::TxKernelNotFound.into())?;
|
||||||
|
tx_kernels.push(kernel);
|
||||||
tx_kernels.push(kernel.kernel);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if tx_kernels.len() >= KERNEL_BATCH_SIZE || n >= self.kernel_pmmr.unpruned_size() {
|
if tx_kernels.len() >= KERNEL_BATCH_SIZE || n >= self.kernel_pmmr.unpruned_size() {
|
||||||
|
|
|
@ -344,15 +344,22 @@ impl Readable for TxKernel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We store TxKernelEntry in the kernel MMR.
|
/// We store kernels in the kernel MMR.
|
||||||
|
/// Note: These are "variable size" to support different kernel featuere variants.
|
||||||
impl PMMRable for TxKernel {
|
impl PMMRable for TxKernel {
|
||||||
type E = TxKernelEntry;
|
type E = Self;
|
||||||
|
|
||||||
fn as_elmt(&self) -> TxKernelEntry {
|
fn as_elmt(&self) -> Self::E {
|
||||||
TxKernelEntry::from_kernel(self)
|
self.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Kernels are "variable size" but we need to implement FixedLength for legacy reasons.
|
||||||
|
/// At some point we will refactor the MMR backend so this is no longer required.
|
||||||
|
impl FixedLength for TxKernel {
|
||||||
|
const LEN: usize = 0;
|
||||||
|
}
|
||||||
|
|
||||||
impl KernelFeatures {
|
impl KernelFeatures {
|
||||||
/// Is this a coinbase kernel?
|
/// Is this a coinbase kernel?
|
||||||
pub fn is_coinbase(&self) -> bool {
|
pub fn is_coinbase(&self) -> bool {
|
||||||
|
@ -494,61 +501,6 @@ impl TxKernel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrapper around a tx kernel used when maintaining them in the MMR.
|
|
||||||
/// These will be useful once we implement relative lockheights via relative kernels
|
|
||||||
/// as a kernel may have an optional rel_kernel but we will not want to store these
|
|
||||||
/// directly in the kernel MMR.
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct TxKernelEntry {
|
|
||||||
/// The underlying tx kernel.
|
|
||||||
pub kernel: TxKernel,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Writeable for TxKernelEntry {
|
|
||||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
|
||||||
self.kernel.write(writer)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Readable for TxKernelEntry {
|
|
||||||
fn read(reader: &mut dyn Reader) -> Result<TxKernelEntry, ser::Error> {
|
|
||||||
let kernel = TxKernel::read(reader)?;
|
|
||||||
Ok(TxKernelEntry { kernel })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TxKernelEntry {
|
|
||||||
/// The excess on the underlying tx kernel.
|
|
||||||
pub fn excess(&self) -> Commitment {
|
|
||||||
self.kernel.excess
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify the underlying tx kernel.
|
|
||||||
pub fn verify(&self) -> Result<(), Error> {
|
|
||||||
self.kernel.verify()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Build a new tx kernel entry from a kernel.
|
|
||||||
pub fn from_kernel(kernel: &TxKernel) -> TxKernelEntry {
|
|
||||||
TxKernelEntry {
|
|
||||||
kernel: kernel.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<TxKernel> for TxKernelEntry {
|
|
||||||
fn from(kernel: TxKernel) -> Self {
|
|
||||||
TxKernelEntry { kernel }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FixedLength for TxKernelEntry {
|
|
||||||
const LEN: usize = 17 // features plus fee and lock_height
|
|
||||||
+ secp::constants::PEDERSEN_COMMITMENT_SIZE
|
|
||||||
+ secp::constants::AGG_SIGNATURE_SIZE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enum of possible tx weight verification options -
|
/// Enum of possible tx weight verification options -
|
||||||
///
|
///
|
||||||
/// * As "transaction" checks tx (as block) weight does not exceed max_block_weight.
|
/// * As "transaction" checks tx (as block) weight does not exceed max_block_weight.
|
||||||
|
|
|
@ -204,13 +204,9 @@ impl<T: PMMRable> PMMRBackend<T> {
|
||||||
data_dir: P,
|
data_dir: P,
|
||||||
prunable: bool,
|
prunable: bool,
|
||||||
fixed_size: bool,
|
fixed_size: bool,
|
||||||
|
version: ProtocolVersion,
|
||||||
header: Option<&BlockHeader>,
|
header: Option<&BlockHeader>,
|
||||||
) -> io::Result<PMMRBackend<T>> {
|
) -> io::Result<PMMRBackend<T>> {
|
||||||
// Note: Explicit protocol version here.
|
|
||||||
// Regardless of our "default" protocol version we have existing MMR files
|
|
||||||
// and we need to be able to support these across upgrades.
|
|
||||||
let version = ProtocolVersion(1);
|
|
||||||
|
|
||||||
let data_dir = data_dir.as_ref();
|
let data_dir = data_dir.as_ref();
|
||||||
|
|
||||||
// Are we dealing with "fixed size" data elements or "variable size" data elements
|
// Are we dealing with "fixed size" data elements or "variable size" data elements
|
||||||
|
|
|
@ -185,6 +185,17 @@ pub struct AppendOnlyFile<T> {
|
||||||
_marker: marker::PhantomData<T>,
|
_marker: marker::PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl AppendOnlyFile<SizeEntry> {
|
||||||
|
fn sum_sizes(&self) -> io::Result<u64> {
|
||||||
|
let mut sum = 0;
|
||||||
|
for pos in 0..self.buffer_start_pos {
|
||||||
|
let entry = self.read_as_elmt(pos)?;
|
||||||
|
sum += entry.size as u64;
|
||||||
|
}
|
||||||
|
Ok(sum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> AppendOnlyFile<T>
|
impl<T> AppendOnlyFile<T>
|
||||||
where
|
where
|
||||||
T: Debug + Readable + Writeable,
|
T: Debug + Readable + Writeable,
|
||||||
|
@ -215,8 +226,9 @@ where
|
||||||
// This will occur during "fast sync" as we do not sync the size_file
|
// This will occur during "fast sync" as we do not sync the size_file
|
||||||
// and must build it locally.
|
// and must build it locally.
|
||||||
// And we can *only* do this after init() the data file (so we know sizes).
|
// And we can *only* do this after init() the data file (so we know sizes).
|
||||||
|
let expected_size = aof.size()?;
|
||||||
if let SizeInfo::VariableSize(ref mut size_file) = &mut aof.size_info {
|
if let SizeInfo::VariableSize(ref mut size_file) = &mut aof.size_info {
|
||||||
if size_file.size()? == 0 {
|
if size_file.sum_sizes()? != expected_size {
|
||||||
aof.rebuild_size_file()?;
|
aof.rebuild_size_file()?;
|
||||||
|
|
||||||
// (Re)init the entire file as we just rebuilt the size_file
|
// (Re)init the entire file as we just rebuilt the size_file
|
||||||
|
@ -517,6 +529,7 @@ where
|
||||||
if let SizeInfo::VariableSize(ref mut size_file) = &mut self.size_info {
|
if let SizeInfo::VariableSize(ref mut size_file) = &mut self.size_info {
|
||||||
// Note: Reading from data file and writing sizes to the associated (tmp) size_file.
|
// Note: Reading from data file and writing sizes to the associated (tmp) size_file.
|
||||||
let tmp_path = size_file.path.with_extension("tmp");
|
let tmp_path = size_file.path.with_extension("tmp");
|
||||||
|
debug!("rebuild_size_file: {:?}", tmp_path);
|
||||||
|
|
||||||
// Scope the reader and writer to within the block so we can safely replace files later on.
|
// Scope the reader and writer to within the block so we can safely replace files later on.
|
||||||
{
|
{
|
||||||
|
|
|
@ -35,7 +35,7 @@ impl PhatChunkStruct {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Readable for PhatChunkStruct {
|
impl Readable for PhatChunkStruct {
|
||||||
fn read(reader: &mut Reader) -> Result<PhatChunkStruct, ser::Error> {
|
fn read(reader: &mut dyn Reader) -> Result<PhatChunkStruct, ser::Error> {
|
||||||
let mut retval = PhatChunkStruct::new();
|
let mut retval = PhatChunkStruct::new();
|
||||||
for _ in 0..TEST_ALLOC_SIZE {
|
for _ in 0..TEST_ALLOC_SIZE {
|
||||||
retval.phatness = reader.read_u64()?;
|
retval.phatness = reader.read_u64()?;
|
||||||
|
|
|
@ -24,15 +24,22 @@ use croaring::Bitmap;
|
||||||
use crate::core::core::hash::DefaultHashable;
|
use crate::core::core::hash::DefaultHashable;
|
||||||
use crate::core::core::pmmr::{Backend, PMMR};
|
use crate::core::core::pmmr::{Backend, PMMR};
|
||||||
use crate::core::ser::{
|
use crate::core::ser::{
|
||||||
Error, FixedLength, PMMRIndexHashable, PMMRable, Readable, Reader, Writeable, Writer,
|
Error, FixedLength, PMMRIndexHashable, PMMRable, ProtocolVersion, Readable, Reader, Writeable,
|
||||||
|
Writer,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn pmmr_append() {
|
fn pmmr_append() {
|
||||||
let (data_dir, elems) = setup("append");
|
let (data_dir, elems) = setup("append");
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// adding first set of 4 elements and sync
|
// adding first set of 4 elements and sync
|
||||||
let mut mmr_size = load(0, &elems[0..4], &mut backend);
|
let mut mmr_size = load(0, &elems[0..4], &mut backend);
|
||||||
|
@ -114,8 +121,14 @@ fn pmmr_compact_leaf_sibling() {
|
||||||
|
|
||||||
// setup the mmr store with all elements
|
// setup the mmr store with all elements
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
let mmr_size = load(0, &elems[..], &mut backend);
|
let mmr_size = load(0, &elems[..], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
@ -187,8 +200,14 @@ fn pmmr_prune_compact() {
|
||||||
|
|
||||||
// setup the mmr store with all elements
|
// setup the mmr store with all elements
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
let mmr_size = load(0, &elems[..], &mut backend);
|
let mmr_size = load(0, &elems[..], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
@ -238,8 +257,14 @@ fn pmmr_reload() {
|
||||||
|
|
||||||
// set everything up with an initial backend
|
// set everything up with an initial backend
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let mmr_size = load(0, &elems[..], &mut backend);
|
let mmr_size = load(0, &elems[..], &mut backend);
|
||||||
|
|
||||||
|
@ -298,8 +323,14 @@ fn pmmr_reload() {
|
||||||
// create a new backend referencing the data files
|
// create a new backend referencing the data files
|
||||||
// and check everything still works as expected
|
// and check everything still works as expected
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(backend.unpruned_size(), mmr_size);
|
assert_eq!(backend.unpruned_size(), mmr_size);
|
||||||
{
|
{
|
||||||
let pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
let pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut backend, mmr_size);
|
||||||
|
@ -340,7 +371,8 @@ fn pmmr_rewind() {
|
||||||
let (data_dir, elems) = setup("rewind");
|
let (data_dir, elems) = setup("rewind");
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend =
|
||||||
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, None).unwrap();
|
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, ProtocolVersion(1), None)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// adding elements and keeping the corresponding root
|
// adding elements and keeping the corresponding root
|
||||||
let mut mmr_size = load(0, &elems[0..4], &mut backend);
|
let mut mmr_size = load(0, &elems[0..4], &mut backend);
|
||||||
|
@ -456,7 +488,8 @@ fn pmmr_compact_single_leaves() {
|
||||||
let (data_dir, elems) = setup("compact_single_leaves");
|
let (data_dir, elems) = setup("compact_single_leaves");
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend =
|
||||||
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, None).unwrap();
|
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, ProtocolVersion(1), None)
|
||||||
|
.unwrap();
|
||||||
let mmr_size = load(0, &elems[0..5], &mut backend);
|
let mmr_size = load(0, &elems[0..5], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
@ -491,7 +524,8 @@ fn pmmr_compact_entire_peak() {
|
||||||
let (data_dir, elems) = setup("compact_entire_peak");
|
let (data_dir, elems) = setup("compact_entire_peak");
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend =
|
||||||
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, None).unwrap();
|
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, ProtocolVersion(1), None)
|
||||||
|
.unwrap();
|
||||||
let mmr_size = load(0, &elems[0..5], &mut backend);
|
let mmr_size = load(0, &elems[0..5], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
@ -546,8 +580,14 @@ fn pmmr_compact_horizon() {
|
||||||
|
|
||||||
let mmr_size;
|
let mmr_size;
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.clone(), true, false, None).unwrap();
|
data_dir.clone(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
mmr_size = load(0, &elems[..], &mut backend);
|
mmr_size = load(0, &elems[..], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
@ -626,8 +666,13 @@ fn pmmr_compact_horizon() {
|
||||||
// recheck stored data
|
// recheck stored data
|
||||||
{
|
{
|
||||||
// recreate backend
|
// recreate backend
|
||||||
let backend =
|
let backend = store::pmmr::PMMRBackend::<TestElem>::new(
|
||||||
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, false, None)
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(backend.data_size(), 19);
|
assert_eq!(backend.data_size(), 19);
|
||||||
|
@ -642,8 +687,13 @@ fn pmmr_compact_horizon() {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::<TestElem>::new(
|
||||||
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, false, None)
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -660,8 +710,13 @@ fn pmmr_compact_horizon() {
|
||||||
// recheck stored data
|
// recheck stored data
|
||||||
{
|
{
|
||||||
// recreate backend
|
// recreate backend
|
||||||
let backend =
|
let backend = store::pmmr::PMMRBackend::<TestElem>::new(
|
||||||
store::pmmr::PMMRBackend::<TestElem>::new(data_dir.to_string(), true, false, None)
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// 0010012001001230
|
// 0010012001001230
|
||||||
|
@ -691,8 +746,14 @@ fn compact_twice() {
|
||||||
// setup the mmr store with all elements
|
// setup the mmr store with all elements
|
||||||
// Scoped to allow Windows to teardown
|
// Scoped to allow Windows to teardown
|
||||||
{
|
{
|
||||||
let mut backend =
|
let mut backend = store::pmmr::PMMRBackend::new(
|
||||||
store::pmmr::PMMRBackend::new(data_dir.to_string(), true, false, None).unwrap();
|
data_dir.to_string(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ProtocolVersion(1),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
let mmr_size = load(0, &elems[..], &mut backend);
|
let mmr_size = load(0, &elems[..], &mut backend);
|
||||||
backend.sync().unwrap();
|
backend.sync().unwrap();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue