Optimizing the code and making it more idiomatic (#2269)

This commit is contained in:
Elichai Turkel 2019-01-02 01:29:16 +02:00 committed by Ignotus Peverell
parent 23df6fa976
commit f2a29ce37a
17 changed files with 119 additions and 104 deletions

3
.gitignore vendored
View file

@ -5,10 +5,11 @@ node*
!node_clients
!node_clients.rs
target
Cargo.lock
*/Cargo.lock
*.iml
grin.log
wallet.seed
test_output
wallet_data
wallet/db
.idea/

View file

@ -270,7 +270,7 @@ impl<'a> Batch<'a> {
fn save_block_input_bitmap(&self, bh: &Hash, bm: &Bitmap) -> Result<(), Error> {
self.db.put(
&to_key(BLOCK_INPUT_BITMAP_PREFIX, &mut bh.to_vec())[..],
bm.serialize(),
&bm.serialize(),
)
}

View file

@ -309,19 +309,13 @@ where
/// Count, in units of 1/100 (a percent), the number of "secondary" (AR) blocks in the provided window of blocks.
pub fn ar_count(_height: u64, diff_data: &[HeaderInfo]) -> u64 {
100 * diff_data
.iter()
.filter(|n| n.is_secondary)
.count() as u64
100 * diff_data.iter().filter(|n| n.is_secondary).count() as u64
}
/// Factor by which the secondary proof of work difficulty will be adjusted
pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 {
// Get the scaling factor sum of the last DIFFICULTY_ADJUST_WINDOW elements
let scale_sum: u64 = diff_data
.iter()
.map(|dd| dd.secondary_scaling as u64)
.sum();
let scale_sum: u64 = diff_data.iter().map(|dd| dd.secondary_scaling as u64).sum();
// compute ideal 2nd_pow_fraction in pct and across window
let target_pct = secondary_pow_ratio(height);

View file

@ -27,10 +27,7 @@ use crate::core::committed::{self, Committed};
use crate::core::compact_block::{CompactBlock, CompactBlockBody};
use crate::core::hash::{Hash, Hashed, ZERO_HASH};
use crate::core::verifier_cache::VerifierCache;
use crate::core::{
transaction, Commitment, Input, Output, Transaction,
TransactionBody, TxKernel,
};
use crate::core::{transaction, Commitment, Input, Output, Transaction, TransactionBody, TxKernel};
use crate::global;
use crate::keychain::{self, BlindingFactor};
use crate::pow::{Difficulty, Proof, ProofOfWork};

View file

@ -17,6 +17,7 @@ use croaring::Bitmap;
use crate::core::hash::Hash;
use crate::core::BlockHeader;
use crate::ser::PMMRable;
use std::path::Path;
/// Storage backend for the MMR, just needs to be indexed by order of insertion.
/// The PMMR itself does not need the Backend to be accurate on the existence
@ -59,7 +60,7 @@ pub trait Backend<T: PMMRable> {
/// Returns the data file path.. this is a bit of a hack now that doesn't
/// sit well with the design, but TxKernels have to be summed and the
/// fastest way to to be able to allow direct access to the file
fn get_data_file_path(&self) -> &str;
fn get_data_file_path(&self) -> &Path;
/// Also a bit of a hack...
/// Saves a snapshot of the rewound utxo file with the block hash as

View file

@ -22,6 +22,7 @@ use crate::core::merkle_proof::MerkleProof;
use crate::core::pmmr::{Backend, ReadonlyPMMR};
use crate::core::BlockHeader;
use crate::ser::{PMMRIndexHashable, PMMRable};
use std::path::Path;
/// 64 bits all ones: 0b11111111...1
const ALL_ONES: u64 = u64::MAX;
@ -323,7 +324,7 @@ where
}
/// Return the path of the data file (needed to sum kernels efficiently)
pub fn data_file_path(&self) -> &str {
pub fn data_file_path(&self) -> &Path {
self.backend.get_data_file_path()
}

View file

@ -23,11 +23,7 @@ use crate::libtx::{aggsig, proof};
use crate::util::static_secp_instance;
/// output a reward output
pub fn output<K>(
keychain: &K,
key_id: &Identifier,
fees: u64,
) -> Result<(Output, TxKernel), Error>
pub fn output<K>(keychain: &K, key_id: &Identifier, fees: u64) -> Result<(Output, TxKernel), Error>
where
K: Keychain,
{

View file

@ -342,7 +342,6 @@ fn adjustment_scenarios() {
println!("*********************************************************");
print_chain_sim(chain_sim);
println!("*********************************************************");
}
/// Checks different next_target adjustments and difficulty boundaries

View file

@ -20,6 +20,7 @@ use self::core::ser::{FixedLength, PMMRable, Readable, Reader, Writeable, Writer
use croaring;
use croaring::Bitmap;
use grin_core as core;
use std::path::Path;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct TestElem(pub [u32; 4]);
@ -117,8 +118,8 @@ impl<T: PMMRable> Backend<T> for VecBackend<T> {
Ok(())
}
fn get_data_file_path(&self) -> &str {
""
fn get_data_file_path(&self) -> &Path {
Path::new("")
}
fn dump_stats(&self) {}

View file

@ -15,7 +15,7 @@
//! Compact (roaring) bitmap representing the set of leaf positions
//! that exist and are not currently pruned in the MMR.
use std::path::Path;
use std::path::{Path, PathBuf};
use croaring::Bitmap;
@ -31,7 +31,7 @@ use std::io::{self, BufWriter, Write};
/// Compact (roaring) bitmap representing the set of positions of
/// leaves that are currently unpruned in the MMR.
pub struct LeafSet {
path: String,
path: PathBuf,
bitmap: Bitmap,
bitmap_bak: Bitmap,
}
@ -39,35 +39,42 @@ pub struct LeafSet {
impl LeafSet {
/// Open the remove log file.
/// The content of the file will be read in memory for fast checking.
pub fn open(path: &str) -> io::Result<LeafSet> {
let file_path = Path::new(&path);
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<LeafSet> {
let file_path = path.as_ref();
let bitmap = if file_path.exists() {
read_bitmap(file_path)?
read_bitmap(&file_path)?
} else {
Bitmap::create()
};
Ok(LeafSet {
path: path.to_string(),
path: file_path.to_path_buf(),
bitmap_bak: bitmap.clone(),
bitmap,
})
}
/// Copies a snapshot of the utxo file into the primary utxo file.
pub fn copy_snapshot(path: &str, cp_path: &str) -> io::Result<()> {
let cp_file_path = Path::new(&cp_path);
pub fn copy_snapshot<P: AsRef<Path>>(path: P, cp_path: P) -> io::Result<()> {
let cp_file_path = cp_path.as_ref();
if !cp_file_path.exists() {
debug!("leaf_set: rewound leaf file not found: {}", cp_path);
debug!(
"leaf_set: rewound leaf file not found: {}",
cp_file_path.display()
);
return Ok(());
}
let bitmap = read_bitmap(cp_file_path)?;
debug!("leaf_set: copying rewound file {} to {}", cp_path, path);
let bitmap = read_bitmap(&cp_file_path)?;
debug!(
"leaf_set: copying rewound file {} to {}",
cp_file_path.display(),
path.as_ref().display()
);
let mut leaf_set = LeafSet {
path: path.to_string(),
path: path.as_ref().to_path_buf(),
bitmap_bak: bitmap.clone(),
bitmap,
};
@ -148,7 +155,7 @@ impl LeafSet {
let mut cp_bitmap = self.bitmap.clone();
cp_bitmap.run_optimize();
let cp_path = format!("{}.{}", self.path, header.hash());
let cp_path = self.path.join(header.hash().to_string());
let mut file = BufWriter::new(File::create(cp_path)?);
file.write_all(&cp_bitmap.serialize())?;
file.flush()?;

View file

@ -56,7 +56,7 @@ pub fn to_key(prefix: u8, k: &mut Vec<u8>) -> Vec<u8> {
/// Build a db key from a prefix and a byte vector identifier and numeric identifier
pub fn to_key_u64(prefix: u8, k: &mut Vec<u8>, val: u64) -> Vec<u8> {
let mut res = vec![];
let mut res = Vec::with_capacity(k.len() + 10);
res.push(prefix);
res.push(SEP);
res.append(k);
@ -65,31 +65,36 @@ pub fn to_key_u64(prefix: u8, k: &mut Vec<u8>, val: u64) -> Vec<u8> {
}
/// Build a db key from a prefix and a numeric identifier.
pub fn u64_to_key(prefix: u8, val: u64) -> Vec<u8> {
let mut u64_vec = vec![];
u64_vec.write_u64::<BigEndian>(val).unwrap();
u64_vec.insert(0, SEP);
u64_vec.insert(0, prefix);
u64_vec
let mut res = Vec::with_capacity(10);
res.push(prefix);
res.push(SEP);
res.write_u64::<BigEndian>(val).unwrap();
res
}
use std::ffi::OsStr;
use std::fs::{remove_file, rename, File};
use std::path::Path;
/// Creates temporary file with name created by adding `temp_suffix` to `path`.
/// Applies writer function to it and renames temporary file into original specified by `path`.
pub fn save_via_temp_file<F>(
path: &str,
temp_suffix: &str,
pub fn save_via_temp_file<F, P, E>(
path: P,
temp_suffix: E,
mut writer: F,
) -> Result<(), std::io::Error>
where
F: FnMut(Box<dyn std::io::Write>) -> Result<(), std::io::Error>,
P: AsRef<Path>,
E: AsRef<OsStr>,
{
assert_ne!(*temp_suffix, *"");
use std::fs::{remove_file, rename, File};
use std::path::Path;
let temp_suffix = temp_suffix.as_ref();
assert!(!temp_suffix.is_empty());
let original = path.as_ref();
let mut _original = original.as_os_str().to_os_string();
_original.push(temp_suffix);
// Write temporary file
let temp_name = format!("{}{}", &path, temp_suffix);
let temp_path = Path::new(&temp_name);
let temp_path = Path::new(&_original);
if temp_path.exists() {
remove_file(&temp_path)?;
}
@ -98,7 +103,6 @@ where
writer(Box::new(file))?;
// Move temporary file into original
let original = Path::new(&path);
if original.exists() {
remove_file(&original)?;
}
@ -110,10 +114,8 @@ where
use croaring::Bitmap;
use std::io::{self, Read};
use std::path::Path;
/// Read Bitmap from a file
pub fn read_bitmap<P: AsRef<Path>>(file_path: P) -> io::Result<Bitmap> {
use std::fs::File;
let mut bitmap_file = File::open(file_path)?;
let f_md = bitmap_file.metadata()?;
let mut buffer = Vec::with_capacity(f_md.len() as usize);

View file

@ -67,15 +67,17 @@ pub fn new_env(path: String) -> lmdb::Environment {
pub fn new_named_env(path: String, name: String) -> lmdb::Environment {
let full_path = [path, name].join("/");
fs::create_dir_all(&full_path).unwrap();
unsafe {
let mut env_builder = lmdb::EnvBuilder::new().unwrap();
env_builder.set_maxdbs(8).unwrap();
// half a TB should give us plenty room, will be an issue on 32 bits
// (which we don't support anyway)
env_builder.set_mapsize(549755813888).unwrap_or_else(|e| {
env_builder
.set_mapsize(549_755_813_888)
.unwrap_or_else(|e| {
panic!("Unable to allocate LMDB space: {:?}", e);
});
unsafe {
env_builder
.open(&full_path, lmdb::open::Flags::empty(), 0o600)
.unwrap()
@ -178,10 +180,10 @@ pub struct Batch<'a> {
impl<'a> Batch<'a> {
/// Writes a single key/value pair to the db
pub fn put(&self, key: &[u8], value: Vec<u8>) -> Result<(), Error> {
pub fn put(&self, key: &[u8], value: &[u8]) -> Result<(), Error> {
self.tx
.access()
.put(&self.store.db, key, &value, lmdb::put::Flags::empty())?;
.put(&self.store.db, key, value, lmdb::put::Flags::empty())?;
Ok(())
}
@ -190,7 +192,7 @@ impl<'a> Batch<'a> {
pub fn put_ser<W: ser::Writeable>(&self, key: &[u8], value: &W) -> Result<(), Error> {
let ser_value = ser::ser_vec(value);
match ser_value {
Ok(data) => self.put(key, data),
Ok(data) => self.put(key, &data),
Err(err) => Err(Error::SerErr(format!("{}", err))),
}
}

View file

@ -23,6 +23,7 @@ use crate::leaf_set::LeafSet;
use crate::prune_list::PruneList;
use crate::types::{prune_noop, DataFile};
use croaring::Bitmap;
use std::path::{Path, PathBuf};
const PMMR_HASH_FILE: &str = "pmmr_hash.bin";
const PMMR_DATA_FILE: &str = "pmmr_data.bin";
@ -50,7 +51,7 @@ pub const PMMR_FILES: [&str; 4] = [
/// * A prune_list tracks the positions of pruned (and compacted) roots in the
/// MMR.
pub struct PMMRBackend<T: PMMRable> {
data_dir: String,
data_dir: PathBuf,
prunable: bool,
hash_file: DataFile<Hash>,
data_file: DataFile<T::E>,
@ -147,7 +148,7 @@ impl<T: PMMRable> Backend<T> for PMMRBackend<T> {
}
/// Return data file path
fn get_data_file_path(&self) -> &str {
fn get_data_file_path(&self) -> &Path {
self.data_file.path()
}
@ -173,28 +174,32 @@ impl<T: PMMRable> Backend<T> for PMMRBackend<T> {
impl<T: PMMRable> PMMRBackend<T> {
/// Instantiates a new PMMR backend.
/// Use the provided dir to store its files.
pub fn new(
data_dir: String,
pub fn new<P: AsRef<Path>>(
data_dir: P,
prunable: bool,
header: Option<&BlockHeader>,
) -> io::Result<PMMRBackend<T>> {
let hash_file = DataFile::open(&format!("{}/{}", data_dir, PMMR_HASH_FILE))?;
let data_file = DataFile::open(&format!("{}/{}", data_dir, PMMR_DATA_FILE))?;
let data_dir = data_dir.as_ref();
let hash_file = DataFile::open(&data_dir.join(PMMR_HASH_FILE))?;
let data_file = DataFile::open(&data_dir.join(PMMR_DATA_FILE))?;
let leaf_set_path = format!("{}/{}", data_dir, PMMR_LEAF_FILE);
let leaf_set_path = data_dir.join(PMMR_LEAF_FILE);
// If we received a rewound "snapshot" leaf_set file move it into
// place so we use it.
if let Some(header) = header {
let leaf_snapshot_path = format!("{}/{}.{}", data_dir, PMMR_LEAF_FILE, header.hash());
let _leaf_snapshot_path = (data_dir.join(PMMR_LEAF_FILE).to_string_lossy()
+ header.hash().to_string().as_ref())
.into_owned();
let leaf_snapshot_path = PathBuf::from(_leaf_snapshot_path);
LeafSet::copy_snapshot(&leaf_set_path, &leaf_snapshot_path)?;
}
let leaf_set = LeafSet::open(&leaf_set_path)?;
let prune_list = PruneList::open(&format!("{}/{}", data_dir, PMMR_PRUN_FILE))?;
let prune_list = PruneList::open(&data_dir.join(PMMR_PRUN_FILE))?;
Ok(PMMRBackend {
data_dir,
data_dir: data_dir.to_path_buf(),
prunable,
hash_file,
data_file,
@ -278,9 +283,10 @@ impl<T: PMMRable> PMMRBackend<T> {
assert!(self.prunable, "Trying to compact a non-prunable PMMR");
// Paths for tmp hash and data files.
let tmp_prune_file_hash = format!("{}/{}.hashprune", self.data_dir, PMMR_HASH_FILE);
let tmp_prune_file_data = format!("{}/{}.dataprune", self.data_dir, PMMR_DATA_FILE);
let tmp_prune_file_hash =
format!("{}.hashprune", self.data_dir.join(PMMR_HASH_FILE).display());
let tmp_prune_file_data =
format!("{}.dataprune", self.data_dir.join(PMMR_DATA_FILE).display());
// Calculate the sets of leaf positions and node positions to remove based
// on the cutoff_pos provided.
let (leaves_removed, pos_to_rm) = self.pos_to_rm(cutoff_pos, rewind_rm_pos);
@ -293,7 +299,7 @@ impl<T: PMMRable> PMMRBackend<T> {
});
self.hash_file
.save_prune(tmp_prune_file_hash.clone(), &off_to_rm, &prune_noop)?;
.save_prune(&tmp_prune_file_hash, &off_to_rm, &prune_noop)?;
}
// 2. Save compact copy of the data file, skipping removed leaves.
@ -311,7 +317,7 @@ impl<T: PMMRable> PMMRBackend<T> {
});
self.data_file
.save_prune(tmp_prune_file_data.clone(), &off_to_rm, prune_cb)?;
.save_prune(&tmp_prune_file_data, &off_to_rm, prune_cb)?;
}
// 3. Update the prune list and write to disk.
@ -325,16 +331,16 @@ impl<T: PMMRable> PMMRBackend<T> {
// 4. Rename the compact copy of hash file and reopen it.
fs::rename(
tmp_prune_file_hash.clone(),
format!("{}/{}", self.data_dir, PMMR_HASH_FILE),
self.data_dir.join(PMMR_HASH_FILE),
)?;
self.hash_file = DataFile::open(&format!("{}/{}", self.data_dir, PMMR_HASH_FILE))?;
self.hash_file = DataFile::open(self.data_dir.join(PMMR_HASH_FILE))?;
// 5. Rename the compact copy of the data file and reopen it.
fs::rename(
tmp_prune_file_data.clone(),
format!("{}/{}", self.data_dir, PMMR_DATA_FILE),
self.data_dir.join(PMMR_DATA_FILE),
)?;
self.data_file = DataFile::open(&format!("{}/{}", self.data_dir, PMMR_DATA_FILE))?;
self.data_file = DataFile::open(self.data_dir.join(PMMR_DATA_FILE))?;
// 6. Write the leaf_set to disk.
// Optimize the bitmap storage in the process.

View file

@ -22,7 +22,7 @@
//! files.
use std::io::{self, BufWriter, Write};
use std::path::Path;
use std::path::{Path, PathBuf};
use croaring::Bitmap;
@ -41,7 +41,7 @@ use crate::{read_bitmap, save_via_temp_file};
/// backend storage anymore. The PruneList accounts for that mismatch and does
/// the position translation.
pub struct PruneList {
path: Option<String>,
path: Option<PathBuf>,
/// Bitmap representing pruned root node positions.
bitmap: Bitmap,
/// Bitmap representing all pruned node positions (everything under the pruned roots).
@ -63,8 +63,8 @@ impl PruneList {
}
/// Open an existing prune_list or create a new one.
pub fn open(path: &str) -> io::Result<PruneList> {
let file_path = Path::new(&path);
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<PruneList> {
let file_path = PathBuf::from(path.as_ref());
let bitmap = if file_path.exists() {
read_bitmap(&file_path)?
} else {
@ -72,7 +72,7 @@ impl PruneList {
};
let mut prune_list = PruneList {
path: Some(path.to_string()),
path: Some(file_path),
bitmap,
pruned_cache: Bitmap::create(),
shift_cache: vec![],
@ -111,7 +111,7 @@ impl PruneList {
// Write the updated bitmap file to disk.
if let Some(ref path) = self.path {
save_via_temp_file(&path, ".tmp", |w| {
save_via_temp_file(path, ".tmp", |w| {
let mut w = BufWriter::new(w);
w.write_all(&self.bitmap.serialize())?;
w.flush()
@ -264,7 +264,7 @@ impl PruneList {
return;
}
self.pruned_cache = Bitmap::create_with_capacity(self.bitmap.maximum());
for pos in 1..(self.bitmap.maximum() + 1) {
for pos in 1..=self.bitmap.maximum() {
let path = path(pos as u64, self.bitmap.maximum() as u64);
let pruned = path.into_iter().any(|x| self.bitmap.contains(x as u32));
if pruned {
@ -279,3 +279,9 @@ impl PruneList {
self.bitmap.contains(pos as u32)
}
}
impl Default for PruneList {
fn default() -> Self {
Self::new()
}
}

View file

@ -18,6 +18,7 @@ use crate::core::ser::{self, FixedLength, Readable, Writeable};
use std::fs::{self, File, OpenOptions};
use std::io::{self, BufWriter, ErrorKind, Read, Write};
use std::marker;
use std::path::{Path, PathBuf};
/// A no-op function for doing nothing with some pruned data.
pub fn prune_noop(_pruned_data: &[u8]) {}
@ -33,7 +34,7 @@ where
T: FixedLength + Readable + Writeable,
{
/// Open (or create) a file at the provided path on disk.
pub fn open(path: &str) -> io::Result<DataFile<T>> {
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<DataFile<T>> {
let file = AppendOnlyFile::open(path)?;
Ok(DataFile {
file,
@ -96,12 +97,12 @@ where
}
/// Path of the underlying file
pub fn path(&self) -> &str {
pub fn path(&self) -> &Path {
self.file.path()
}
/// Write the file out to disk, pruning removed elements.
pub fn save_prune<F>(&self, target: String, prune_offs: &[u64], prune_cb: F) -> io::Result<()>
pub fn save_prune<F>(&self, target: &str, prune_offs: &[u64], prune_cb: F) -> io::Result<()>
where
F: Fn(&[u8]),
{
@ -123,7 +124,7 @@ where
/// former simply happens by rewriting it, ignoring some of the data. The
/// latter by truncating the underlying file and re-creating the mmap.
pub struct AppendOnlyFile {
path: String,
path: PathBuf,
file: File,
mmap: Option<memmap::Mmap>,
buffer_start: usize,
@ -133,7 +134,7 @@ pub struct AppendOnlyFile {
impl AppendOnlyFile {
/// Open a file (existing or not) as append-only, backed by a mmap.
pub fn open(path: &str) -> io::Result<AppendOnlyFile> {
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<AppendOnlyFile> {
let file = OpenOptions::new()
.read(true)
.append(true)
@ -141,7 +142,7 @@ impl AppendOnlyFile {
.open(&path)?;
let mut aof = AppendOnlyFile {
file,
path: path.to_string(),
path: path.as_ref().to_path_buf(),
mmap: None,
buffer_start: 0,
buffer: vec![],
@ -258,15 +259,16 @@ impl AppendOnlyFile {
/// Saves a copy of the current file content, skipping data at the provided
/// prune indices. The prune Vec must be ordered.
pub fn save_prune<T>(
pub fn save_prune<T, P>(
&self,
target: String,
target: P,
prune_offs: &[u64],
prune_len: u64,
prune_cb: T,
) -> io::Result<()>
where
T: Fn(&[u8]),
P: AsRef<Path>,
{
if prune_offs.is_empty() {
fs::copy(&self.path, &target)?;
@ -322,7 +324,7 @@ impl AppendOnlyFile {
}
/// Path of the underlying file
pub fn path(&self) -> &str {
pub fn path(&self) -> &Path {
&self.path
}
}

View file

@ -17,7 +17,6 @@ use grin_core as core;
use grin_store as store;
use std::fs;
use std::io::prelude::*;
use chrono::prelude::Utc;
use croaring::Bitmap;
@ -822,9 +821,9 @@ fn create_numbered_files(
prefix,
start_index + rewind_file_num
));
let mut file = fs::File::create(path.clone()).unwrap();
let file = fs::File::create(path.clone()).unwrap();
let metadata = file.metadata().unwrap();
filetime::set_file_times(path, time_to_set_ft, time_to_set_ft);
filetime::set_file_times(path, time_to_set_ft, time_to_set_ft).unwrap();
}
}

View file

@ -111,7 +111,8 @@ where
verifier_cache,
false,
Arc::new(Mutex::new(StopState::new())),
).unwrap();
)
.unwrap();
let (tx, rx) = channel();
let retval = WalletProxy {
chain_dir: chain_dir.to_owned(),