mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
Tiny store crate cleanup (#1899)
This commit is contained in:
parent
12be191ecd
commit
368e1a461e
8 changed files with 72 additions and 71 deletions
|
@ -63,7 +63,7 @@ impl HashOnlyMMRHandle {
|
|||
fn new(root_dir: &str, sub_dir: &str, file_name: &str) -> Result<HashOnlyMMRHandle, Error> {
|
||||
let path = Path::new(root_dir).join(sub_dir).join(file_name);
|
||||
fs::create_dir_all(path.clone())?;
|
||||
let backend = HashOnlyMMRBackend::new(path.to_str().unwrap().to_string())?;
|
||||
let backend = HashOnlyMMRBackend::new(path.to_str().unwrap())?;
|
||||
let last_pos = backend.unpruned_size()?;
|
||||
Ok(HashOnlyMMRHandle { backend, last_pos })
|
||||
}
|
||||
|
|
|
@ -23,10 +23,10 @@ use core::core::hash::Hashed;
|
|||
use core::core::pmmr;
|
||||
use core::core::BlockHeader;
|
||||
use prune_list::PruneList;
|
||||
use save_via_temp_file;
|
||||
use {read_bitmap, save_via_temp_file};
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufWriter, Read, Write};
|
||||
use std::io::{self, BufWriter, Write};
|
||||
|
||||
/// Compact (roaring) bitmap representing the set of positions of
|
||||
/// leaves that are currently unpruned in the MMR.
|
||||
|
@ -39,26 +39,23 @@ pub struct LeafSet {
|
|||
impl LeafSet {
|
||||
/// Open the remove log file.
|
||||
/// The content of the file will be read in memory for fast checking.
|
||||
pub fn open(path: String) -> io::Result<LeafSet> {
|
||||
pub fn open(path: &str) -> io::Result<LeafSet> {
|
||||
let file_path = Path::new(&path);
|
||||
let bitmap = if file_path.exists() {
|
||||
let mut bitmap_file = File::open(path.clone())?;
|
||||
let mut buffer = vec![];
|
||||
bitmap_file.read_to_end(&mut buffer)?;
|
||||
Bitmap::deserialize(&buffer)
|
||||
read_bitmap(file_path)?
|
||||
} else {
|
||||
Bitmap::create()
|
||||
};
|
||||
|
||||
Ok(LeafSet {
|
||||
path: path.clone(),
|
||||
bitmap: bitmap.clone(),
|
||||
path: path.to_string(),
|
||||
bitmap_bak: bitmap.clone(),
|
||||
bitmap,
|
||||
})
|
||||
}
|
||||
|
||||
/// Copies a snapshot of the utxo file into the primary utxo file.
|
||||
pub fn copy_snapshot(path: String, cp_path: String) -> io::Result<()> {
|
||||
pub fn copy_snapshot(path: &str, cp_path: &str) -> io::Result<()> {
|
||||
let cp_file_path = Path::new(&cp_path);
|
||||
|
||||
if !cp_file_path.exists() {
|
||||
|
@ -66,17 +63,13 @@ impl LeafSet {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let mut bitmap_file = File::open(cp_path.clone())?;
|
||||
let mut buffer = vec![];
|
||||
bitmap_file.read_to_end(&mut buffer)?;
|
||||
let bitmap = Bitmap::deserialize(&buffer);
|
||||
|
||||
let bitmap = read_bitmap(cp_file_path)?;
|
||||
debug!("leaf_set: copying rewound file {} to {}", cp_path, path);
|
||||
|
||||
let mut leaf_set = LeafSet {
|
||||
path: path.clone(),
|
||||
bitmap: bitmap.clone(),
|
||||
path: path.to_string(),
|
||||
bitmap_bak: bitmap.clone(),
|
||||
bitmap,
|
||||
};
|
||||
|
||||
leaf_set.flush()?;
|
||||
|
|
|
@ -32,7 +32,6 @@ extern crate log;
|
|||
extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
|
||||
#[macro_use]
|
||||
extern crate grin_core as core;
|
||||
extern crate grin_util as util;
|
||||
|
@ -43,7 +42,7 @@ pub mod pmmr;
|
|||
pub mod prune_list;
|
||||
pub mod types;
|
||||
|
||||
const SEP: u8 = ':' as u8;
|
||||
const SEP: u8 = b':';
|
||||
|
||||
use byteorder::{BigEndian, WriteBytesExt};
|
||||
|
||||
|
@ -68,7 +67,7 @@ pub fn to_key_u64(prefix: u8, k: &mut Vec<u8>, val: u64) -> Vec<u8> {
|
|||
res
|
||||
}
|
||||
/// Build a db key from a prefix and a numeric identifier.
|
||||
pub fn u64_to_key<'a>(prefix: u8, val: u64) -> Vec<u8> {
|
||||
pub fn u64_to_key(prefix: u8, val: u64) -> Vec<u8> {
|
||||
let mut u64_vec = vec![];
|
||||
u64_vec.write_u64::<BigEndian>(val).unwrap();
|
||||
u64_vec.insert(0, SEP);
|
||||
|
@ -111,3 +110,16 @@ where
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
use croaring::Bitmap;
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
/// Read Bitmap from a file
|
||||
pub fn read_bitmap<P: AsRef<Path>>(file_path: P) -> io::Result<Bitmap> {
|
||||
use std::fs::File;
|
||||
let mut bitmap_file = File::open(file_path)?;
|
||||
let f_md = bitmap_file.metadata()?;
|
||||
let mut buffer = Vec::with_capacity(f_md.len() as usize);
|
||||
bitmap_file.read_to_end(&mut buffer)?;
|
||||
Ok(Bitmap::deserialize(&buffer))
|
||||
}
|
||||
|
|
|
@ -148,11 +148,11 @@ impl Store {
|
|||
/// Produces an iterator of `Readable` types moving forward from the
|
||||
/// provided key.
|
||||
pub fn iter<T: ser::Readable>(&self, from: &[u8]) -> Result<SerIterator<T>, Error> {
|
||||
let txn = Arc::new(lmdb::ReadTransaction::new(self.env.clone())?);
|
||||
let cursor = Arc::new(txn.cursor(self.db.clone()).unwrap());
|
||||
let tx = Arc::new(lmdb::ReadTransaction::new(self.env.clone())?);
|
||||
let cursor = Arc::new(tx.cursor(self.db.clone()).unwrap());
|
||||
Ok(SerIterator {
|
||||
tx: txn,
|
||||
cursor: cursor,
|
||||
tx,
|
||||
cursor,
|
||||
seek: false,
|
||||
prefix: from.to_vec(),
|
||||
_marker: marker::PhantomData,
|
||||
|
|
|
@ -25,10 +25,10 @@ use leaf_set::LeafSet;
|
|||
use prune_list::PruneList;
|
||||
use types::{prune_noop, AppendOnlyFile, HashFile};
|
||||
|
||||
const PMMR_HASH_FILE: &'static str = "pmmr_hash.bin";
|
||||
const PMMR_DATA_FILE: &'static str = "pmmr_data.bin";
|
||||
const PMMR_LEAF_FILE: &'static str = "pmmr_leaf.bin";
|
||||
const PMMR_PRUN_FILE: &'static str = "pmmr_prun.bin";
|
||||
const PMMR_HASH_FILE: &str = "pmmr_hash.bin";
|
||||
const PMMR_DATA_FILE: &str = "pmmr_data.bin";
|
||||
const PMMR_LEAF_FILE: &str = "pmmr_leaf.bin";
|
||||
const PMMR_PRUN_FILE: &str = "pmmr_prun.bin";
|
||||
|
||||
/// The list of PMMR_Files for internal purposes
|
||||
pub const PMMR_FILES: [&str; 4] = [
|
||||
|
@ -77,7 +77,7 @@ where
|
|||
self.leaf_set.add(position);
|
||||
}
|
||||
self.data_file.append(&mut ser::ser_vec(&data).unwrap());
|
||||
for ref h in hashes {
|
||||
for h in &hashes {
|
||||
self.hash_file.append(&mut ser::ser_vec(h).unwrap());
|
||||
}
|
||||
Ok(())
|
||||
|
@ -105,7 +105,7 @@ where
|
|||
"Corrupted storage, could not read an entry from hash store: {:?}",
|
||||
e
|
||||
);
|
||||
return None;
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ where
|
|||
"Corrupted storage, could not read an entry from data store: {:?}",
|
||||
e
|
||||
);
|
||||
return None;
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -220,8 +220,8 @@ where
|
|||
prunable: bool,
|
||||
header: Option<&BlockHeader>,
|
||||
) -> io::Result<PMMRBackend<T>> {
|
||||
let hash_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
|
||||
let data_file = AppendOnlyFile::open(format!("{}/{}", data_dir, PMMR_DATA_FILE))?;
|
||||
let hash_file = AppendOnlyFile::open(&format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
|
||||
let data_file = AppendOnlyFile::open(&format!("{}/{}", data_dir, PMMR_DATA_FILE))?;
|
||||
|
||||
let leaf_set_path = format!("{}/{}", data_dir, PMMR_LEAF_FILE);
|
||||
|
||||
|
@ -229,11 +229,11 @@ where
|
|||
// place so we use it.
|
||||
if let Some(header) = header {
|
||||
let leaf_snapshot_path = format!("{}/{}.{}", data_dir, PMMR_LEAF_FILE, header.hash());
|
||||
LeafSet::copy_snapshot(leaf_set_path.clone(), leaf_snapshot_path.clone())?;
|
||||
LeafSet::copy_snapshot(&leaf_set_path, &leaf_snapshot_path)?;
|
||||
}
|
||||
|
||||
let leaf_set = LeafSet::open(leaf_set_path.clone())?;
|
||||
let prune_list = PruneList::open(format!("{}/{}", data_dir, PMMR_PRUN_FILE))?;
|
||||
let leaf_set = LeafSet::open(&leaf_set_path)?;
|
||||
let prune_list = PruneList::open(&format!("{}/{}", data_dir, PMMR_PRUN_FILE))?;
|
||||
|
||||
Ok(PMMRBackend {
|
||||
data_dir,
|
||||
|
@ -357,7 +357,7 @@ where
|
|||
|
||||
self.hash_file.save_prune(
|
||||
tmp_prune_file_hash.clone(),
|
||||
off_to_rm,
|
||||
&off_to_rm,
|
||||
record_len,
|
||||
&prune_noop,
|
||||
)?;
|
||||
|
@ -381,7 +381,7 @@ where
|
|||
|
||||
self.data_file.save_prune(
|
||||
tmp_prune_file_data.clone(),
|
||||
off_to_rm,
|
||||
&off_to_rm,
|
||||
record_len,
|
||||
prune_cb,
|
||||
)?;
|
||||
|
@ -400,14 +400,14 @@ where
|
|||
tmp_prune_file_hash.clone(),
|
||||
format!("{}/{}", self.data_dir, PMMR_HASH_FILE),
|
||||
)?;
|
||||
self.hash_file = AppendOnlyFile::open(format!("{}/{}", self.data_dir, PMMR_HASH_FILE))?;
|
||||
self.hash_file = AppendOnlyFile::open(&format!("{}/{}", self.data_dir, PMMR_HASH_FILE))?;
|
||||
|
||||
// 5. Rename the compact copy of the data file and reopen it.
|
||||
fs::rename(
|
||||
tmp_prune_file_data.clone(),
|
||||
format!("{}/{}", self.data_dir, PMMR_DATA_FILE),
|
||||
)?;
|
||||
self.data_file = AppendOnlyFile::open(format!("{}/{}", self.data_dir, PMMR_DATA_FILE))?;
|
||||
self.data_file = AppendOnlyFile::open(&format!("{}/{}", self.data_dir, PMMR_DATA_FILE))?;
|
||||
|
||||
// 6. Write the leaf_set to disk.
|
||||
// Optimize the bitmap storage in the process.
|
||||
|
@ -445,7 +445,7 @@ where
|
|||
}
|
||||
}
|
||||
}
|
||||
(leaf_pos_to_rm, removed_excl_roots(expanded))
|
||||
(leaf_pos_to_rm, removed_excl_roots(&expanded))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -457,7 +457,7 @@ pub struct HashOnlyMMRBackend {
|
|||
|
||||
impl HashOnlyBackend for HashOnlyMMRBackend {
|
||||
fn append(&mut self, hashes: Vec<Hash>) -> Result<(), String> {
|
||||
for ref h in hashes {
|
||||
for h in &hashes {
|
||||
self.hash_file
|
||||
.append(h)
|
||||
.map_err(|e| format!("Failed to append to backend, {:?}", e))?;
|
||||
|
@ -480,8 +480,8 @@ impl HashOnlyBackend for HashOnlyMMRBackend {
|
|||
impl HashOnlyMMRBackend {
|
||||
/// Instantiates a new PMMR backend.
|
||||
/// Use the provided dir to store its files.
|
||||
pub fn new(data_dir: String) -> io::Result<HashOnlyMMRBackend> {
|
||||
let hash_file = HashFile::open(format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
|
||||
pub fn new(data_dir: &str) -> io::Result<HashOnlyMMRBackend> {
|
||||
let hash_file = HashFile::open(&format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
|
||||
Ok(HashOnlyMMRBackend { hash_file })
|
||||
}
|
||||
|
||||
|
@ -510,7 +510,7 @@ impl HashOnlyMMRBackend {
|
|||
|
||||
/// Filter remove list to exclude roots.
|
||||
/// We want to keep roots around so we have hashes for Merkle proofs.
|
||||
fn removed_excl_roots(removed: Bitmap) -> Bitmap {
|
||||
fn removed_excl_roots(removed: &Bitmap) -> Bitmap {
|
||||
removed
|
||||
.iter()
|
||||
.filter(|pos| {
|
||||
|
|
|
@ -21,14 +21,13 @@
|
|||
//! must be shifted the appropriate amount when reading from the hash and data
|
||||
//! files.
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufWriter, Read, Write};
|
||||
use std::io::{self, BufWriter, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use croaring::Bitmap;
|
||||
|
||||
use core::core::pmmr::{bintree_postorder_height, family, path};
|
||||
use save_via_temp_file;
|
||||
use {read_bitmap, save_via_temp_file};
|
||||
|
||||
/// Maintains a list of previously pruned nodes in PMMR, compacting the list as
|
||||
/// parents get pruned and allowing checking whether a leaf is pruned. Given
|
||||
|
@ -64,19 +63,16 @@ impl PruneList {
|
|||
}
|
||||
|
||||
/// Open an existing prune_list or create a new one.
|
||||
pub fn open(path: String) -> io::Result<PruneList> {
|
||||
pub fn open(path: &str) -> io::Result<PruneList> {
|
||||
let file_path = Path::new(&path);
|
||||
let bitmap = if file_path.exists() {
|
||||
let mut bitmap_file = File::open(path.clone())?;
|
||||
let mut buffer = vec![];
|
||||
bitmap_file.read_to_end(&mut buffer)?;
|
||||
Bitmap::deserialize(&buffer)
|
||||
read_bitmap(&file_path)?
|
||||
} else {
|
||||
Bitmap::create()
|
||||
};
|
||||
|
||||
let mut prune_list = PruneList {
|
||||
path: Some(path.clone()),
|
||||
path: Some(path.to_string()),
|
||||
bitmap,
|
||||
pruned_cache: Bitmap::create(),
|
||||
shift_cache: vec![],
|
||||
|
|
|
@ -38,7 +38,7 @@ pub struct HashFile {
|
|||
|
||||
impl HashFile {
|
||||
/// Open (or create) a hash file at the provided path on disk.
|
||||
pub fn open(path: String) -> io::Result<HashFile> {
|
||||
pub fn open(path: &str) -> io::Result<HashFile> {
|
||||
let file = AppendOnlyFile::open(path)?;
|
||||
Ok(HashFile { file })
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ impl HashFile {
|
|||
"Corrupted storage, could not read an entry from hash file: {:?}",
|
||||
e
|
||||
);
|
||||
return None;
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,15 +113,15 @@ pub struct AppendOnlyFile {
|
|||
|
||||
impl AppendOnlyFile {
|
||||
/// Open a file (existing or not) as append-only, backed by a mmap.
|
||||
pub fn open(path: String) -> io::Result<AppendOnlyFile> {
|
||||
pub fn open(path: &str) -> io::Result<AppendOnlyFile> {
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(path.clone())?;
|
||||
.open(&path)?;
|
||||
let mut aof = AppendOnlyFile {
|
||||
path: path.clone(),
|
||||
file: file,
|
||||
file,
|
||||
path: path.to_string(),
|
||||
mmap: None,
|
||||
buffer_start: 0,
|
||||
buffer: vec![],
|
||||
|
@ -216,7 +216,7 @@ impl AppendOnlyFile {
|
|||
let buffer_offset = offset - self.buffer_start;
|
||||
return self.read_from_buffer(buffer_offset, length);
|
||||
}
|
||||
if let None = self.mmap {
|
||||
if self.mmap.is_none() {
|
||||
return vec![];
|
||||
}
|
||||
let mmap = self.mmap.as_ref().unwrap();
|
||||
|
@ -255,7 +255,7 @@ impl AppendOnlyFile {
|
|||
pub fn save_prune<T>(
|
||||
&self,
|
||||
target: String,
|
||||
prune_offs: Vec<u64>,
|
||||
prune_offs: &[u64],
|
||||
prune_len: u64,
|
||||
prune_cb: T,
|
||||
) -> io::Result<()>
|
||||
|
@ -263,11 +263,11 @@ impl AppendOnlyFile {
|
|||
T: Fn(&[u8]),
|
||||
{
|
||||
if prune_offs.is_empty() {
|
||||
fs::copy(self.path.clone(), target.clone())?;
|
||||
fs::copy(&self.path, &target)?;
|
||||
Ok(())
|
||||
} else {
|
||||
let mut reader = File::open(self.path.clone())?;
|
||||
let mut writer = BufWriter::new(File::create(target.clone())?);
|
||||
let mut reader = File::open(&self.path)?;
|
||||
let mut writer = BufWriter::new(File::create(&target)?);
|
||||
|
||||
// align the buffer on prune_len to avoid misalignments
|
||||
let mut buf = vec![0; (prune_len * 256) as usize];
|
||||
|
@ -299,7 +299,7 @@ impl AppendOnlyFile {
|
|||
break;
|
||||
}
|
||||
}
|
||||
writer.write_all(&mut buf[buf_start..(len as usize)])?;
|
||||
writer.write_all(&buf[buf_start..(len as usize)])?;
|
||||
read += len;
|
||||
}
|
||||
}
|
||||
|
@ -322,14 +322,14 @@ impl AppendOnlyFile {
|
|||
}
|
||||
|
||||
/// Read an ordered vector of scalars from a file.
|
||||
pub fn read_ordered_vec<T>(path: String, elmt_len: usize) -> io::Result<Vec<T>>
|
||||
pub fn read_ordered_vec<T>(path: &str, elmt_len: usize) -> io::Result<Vec<T>>
|
||||
where
|
||||
T: ser::Readable + cmp::Ord,
|
||||
{
|
||||
let file_path = Path::new(&path);
|
||||
let mut ovec = Vec::with_capacity(1000);
|
||||
if file_path.exists() {
|
||||
let mut file = BufReader::with_capacity(elmt_len * 1000, File::open(path.clone())?);
|
||||
let mut file = BufReader::with_capacity(elmt_len * 1000, File::open(&path)?);
|
||||
loop {
|
||||
// need a block to end mutable borrow before consume
|
||||
let buf_len = {
|
||||
|
@ -360,7 +360,7 @@ where
|
|||
}
|
||||
|
||||
/// Writes an ordered vector to a file
|
||||
pub fn write_vec<T>(path: String, v: &Vec<T>) -> io::Result<()>
|
||||
pub fn write_vec<T>(path: &str, v: &Vec<T>) -> io::Result<()>
|
||||
where
|
||||
T: ser::Writeable,
|
||||
{
|
||||
|
|
|
@ -101,7 +101,7 @@ fn setup(test_name: &str) -> (LeafSet, String) {
|
|||
let _ = env_logger::init();
|
||||
let data_dir = format!("./target/{}-{}", test_name, Utc::now().timestamp());
|
||||
fs::create_dir_all(data_dir.clone()).unwrap();
|
||||
let leaf_set = LeafSet::open(format!("{}/{}", data_dir, "utxo.bin")).unwrap();
|
||||
let leaf_set = LeafSet::open(&format!("{}/{}", data_dir, "utxo.bin")).unwrap();
|
||||
(leaf_set, data_dir)
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue