Merge pull request #1339 from antiochp/rustfmt_chain_crate

run rustfmt against chain crate
This commit is contained in:
hashmap 2018-08-10 16:08:18 +02:00 committed by GitHub
commit 395c48c844
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 64 additions and 37 deletions

View file

@ -250,7 +250,7 @@ impl Chain {
// block got accepted but we did not extend the head // block got accepted but we did not extend the head
// so its on a fork (or is the start of a new fork) // so its on a fork (or is the start of a new fork)
// broadcast the block out so everyone knows about the fork // broadcast the block out so everyone knows about the fork
// broadcast the block // broadcast the block
self.adapter.block_accepted(&b, opts); self.adapter.block_accepted(&b, opts);
Ok((None, Some(b))) Ok((None, Some(b)))
@ -379,7 +379,7 @@ impl Chain {
trace!( trace!(
LOGGER, LOGGER,
"chain: done check_orphans at {}. # remaining orphans {}", "chain: done check_orphans at {}. # remaining orphans {}",
height-1, height - 1,
self.orphans.len(), self.orphans.len(),
); );
} }
@ -541,12 +541,7 @@ impl Chain {
/// If we're willing to accept that new state, the data stream will be /// If we're willing to accept that new state, the data stream will be
/// read as a zip file, unzipped and the resulting state files should be /// read as a zip file, unzipped and the resulting state files should be
/// rewound to the provided indexes. /// rewound to the provided indexes.
pub fn txhashset_write<T>( pub fn txhashset_write<T>(&self, h: Hash, txhashset_data: File, status: &T) -> Result<(), Error>
&self,
h: Hash,
txhashset_data: File,
status: &T,
) -> Result<(), Error>
where where
T: TxHashsetWriteStatus, T: TxHashsetWriteStatus,
{ {
@ -566,7 +561,10 @@ impl Chain {
// Validate against a read-only extension first. // Validate against a read-only extension first.
// The kernel history validation requires a read-only extension // The kernel history validation requires a read-only extension
// due to the internal rewind behavior. // due to the internal rewind behavior.
debug!(LOGGER, "chain: txhashset_write: rewinding and validating (read-only)"); debug!(
LOGGER,
"chain: txhashset_write: rewinding and validating (read-only)"
);
txhashset::extending_readonly(&mut txhashset, |extension| { txhashset::extending_readonly(&mut txhashset, |extension| {
extension.rewind(&header, &header)?; extension.rewind(&header, &header)?;
extension.validate(&header, false, status)?; extension.validate(&header, false, status)?;
@ -579,7 +577,10 @@ impl Chain {
})?; })?;
// all good, prepare a new batch and update all the required records // all good, prepare a new batch and update all the required records
debug!(LOGGER, "chain: txhashset_write: rewinding a 2nd time (writeable)"); debug!(
LOGGER,
"chain: txhashset_write: rewinding a 2nd time (writeable)"
);
let mut batch = self.store.batch()?; let mut batch = self.store.batch()?;
txhashset::extending(&mut txhashset, &mut batch, |extension| { txhashset::extending(&mut txhashset, &mut batch, |extension| {
extension.rewind(&header, &header)?; extension.rewind(&header, &header)?;
@ -587,7 +588,10 @@ impl Chain {
Ok(()) Ok(())
})?; })?;
debug!(LOGGER, "chain: txhashset_write: finished validating and rebuilding"); debug!(
LOGGER,
"chain: txhashset_write: finished validating and rebuilding"
);
status.on_save(); status.on_save();
// replace the chain txhashset with the newly built one // replace the chain txhashset with the newly built one
@ -605,7 +609,10 @@ impl Chain {
} }
batch.commit()?; batch.commit()?;
debug!(LOGGER, "chain: txhashset_write: finished committing the batch (head etc.)"); debug!(
LOGGER,
"chain: txhashset_write: finished committing the batch (head etc.)"
);
self.check_orphans(header.height + 1); self.check_orphans(header.height + 1);

View file

@ -31,8 +31,8 @@ extern crate serde;
extern crate serde_derive; extern crate serde_derive;
#[macro_use] #[macro_use]
extern crate slog; extern crate slog;
extern crate failure;
extern crate chrono; extern crate chrono;
extern crate failure;
#[macro_use] #[macro_use]
extern crate failure_derive; extern crate failure_derive;

View file

@ -17,9 +17,10 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use chrono::prelude::{Utc}; use chrono::prelude::Utc;
use chrono::Duration; use chrono::Duration;
use chain::OrphanBlockPool;
use core::consensus; use core::consensus;
use core::core::hash::{Hash, Hashed}; use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty; use core::core::target::Difficulty;
@ -29,7 +30,6 @@ use error::{Error, ErrorKind};
use grin_store; use grin_store;
use store; use store;
use txhashset; use txhashset;
use chain::{OrphanBlockPool};
use types::{Options, Tip}; use types::{Options, Tip};
use util::LOGGER; use util::LOGGER;
@ -218,8 +218,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
} }
// TODO: remove CI check from here somehow // TODO: remove CI check from here somehow
if header.timestamp if header.timestamp > Utc::now() + Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))
> Utc::now() + Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))
&& !global::is_automated_testing_mode() && !global::is_automated_testing_mode()
{ {
// refuse blocks more than 12 blocks intervals in future (as in bitcoin) // refuse blocks more than 12 blocks intervals in future (as in bitcoin)

View file

@ -197,7 +197,8 @@ impl ChainStore {
// the full block from the db (if the block is found). // the full block from the db (if the block is found).
// (bool, Bitmap) : (false if bitmap was built and not found in db) // (bool, Bitmap) : (false if bitmap was built and not found in db)
fn get_block_input_bitmap_db(&self, bh: &Hash) -> Result<(bool, Bitmap), Error> { fn get_block_input_bitmap_db(&self, bh: &Hash) -> Result<(bool, Bitmap), Error> {
if let Ok(Some(bytes)) = self.db if let Ok(Some(bytes)) = self
.db
.get(&to_key(BLOCK_INPUT_BITMAP_PREFIX, &mut bh.to_vec())) .get(&to_key(BLOCK_INPUT_BITMAP_PREFIX, &mut bh.to_vec()))
{ {
Ok((true, Bitmap::deserialize(&bytes))) Ok((true, Bitmap::deserialize(&bytes)))
@ -442,7 +443,8 @@ impl Iterator for DifficultyIter {
if let Some(header) = self.header.clone() { if let Some(header) = self.header.clone() {
self.prev_header = self.store.get_block_header(&header.previous).ok(); self.prev_header = self.store.get_block_header(&header.previous).ok();
let prev_difficulty = self.prev_header let prev_difficulty = self
.prev_header
.clone() .clone()
.map_or(Difficulty::zero(), |x| x.total_difficulty); .map_or(Difficulty::zero(), |x| x.total_difficulty);
let difficulty = header.total_difficulty - prev_difficulty; let difficulty = header.total_difficulty - prev_difficulty;

View file

@ -449,11 +449,7 @@ impl<'a> Extension<'a> {
kernel_pos: u64, kernel_pos: u64,
rewind_rm_pos: &Bitmap, rewind_rm_pos: &Bitmap,
) -> Result<(), Error> { ) -> Result<(), Error> {
self.rewind_to_pos( self.rewind_to_pos(output_pos, kernel_pos, rewind_rm_pos)?;
output_pos,
kernel_pos,
rewind_rm_pos,
)?;
Ok(()) Ok(())
} }
@ -466,7 +462,10 @@ impl<'a> Extension<'a> {
/// new tx). /// new tx).
pub fn apply_raw_tx(&mut self, tx: &Transaction) -> Result<(), Error> { pub fn apply_raw_tx(&mut self, tx: &Transaction) -> Result<(), Error> {
// This should *never* be called on a writeable extension... // This should *never* be called on a writeable extension...
assert!(self.rollback, "applied raw_tx to writeable txhashset extension"); assert!(
self.rollback,
"applied raw_tx to writeable txhashset extension"
);
// Checkpoint the MMR positions before we apply the new tx, // Checkpoint the MMR positions before we apply the new tx,
// anything goes wrong we will rewind to these positions. // anything goes wrong we will rewind to these positions.
@ -474,7 +473,8 @@ impl<'a> Extension<'a> {
let kernel_pos = self.kernel_pmmr.unpruned_size(); let kernel_pos = self.kernel_pmmr.unpruned_size();
// Build bitmap of output pos spent (as inputs) by this tx for rewind. // Build bitmap of output pos spent (as inputs) by this tx for rewind.
let rewind_rm_pos = tx.inputs let rewind_rm_pos = tx
.inputs
.iter() .iter()
.filter_map(|x| self.get_output_pos(&x.commitment()).ok()) .filter_map(|x| self.get_output_pos(&x.commitment()).ok())
.map(|x| x as u32) .map(|x| x as u32)
@ -671,7 +671,8 @@ impl<'a> Extension<'a> {
} }
} }
// push new outputs in their MMR and save them in the index // push new outputs in their MMR and save them in the index
let pos = self.output_pmmr let pos = self
.output_pmmr
.push(OutputIdentifier::from_output(out)) .push(OutputIdentifier::from_output(out))
.map_err(&ErrorKind::TxHashSetErr)?; .map_err(&ErrorKind::TxHashSetErr)?;
self.batch.save_output_pos(&out.commitment(), pos)?; self.batch.save_output_pos(&out.commitment(), pos)?;
@ -716,7 +717,8 @@ impl<'a> Extension<'a> {
// then calculate the Merkle Proof based on the known pos // then calculate the Merkle Proof based on the known pos
let pos = self.batch.get_output_pos(&output.commit)?; let pos = self.batch.get_output_pos(&output.commit)?;
let merkle_proof = self.output_pmmr let merkle_proof = self
.output_pmmr
.merkle_proof(pos) .merkle_proof(pos)
.map_err(&ErrorKind::TxHashSetErr)?; .map_err(&ErrorKind::TxHashSetErr)?;
@ -1028,7 +1030,10 @@ impl<'a> Extension<'a> {
/// fast sync where a reorg past the horizon could allow a whole rewrite of /// fast sync where a reorg past the horizon could allow a whole rewrite of
/// the kernel set. /// the kernel set.
pub fn validate_kernel_history(&mut self, header: &BlockHeader) -> Result<(), Error> { pub fn validate_kernel_history(&mut self, header: &BlockHeader) -> Result<(), Error> {
assert!(self.rollback, "verified kernel history on writeable txhashset extension"); assert!(
self.rollback,
"verified kernel history on writeable txhashset extension"
);
let mut current = header.clone(); let mut current = header.clone();
loop { loop {
@ -1049,7 +1054,6 @@ impl<'a> Extension<'a> {
} }
Ok(()) Ok(())
} }
} }
/// Packages the txhashset data files into a zip and returns a Read to the /// Packages the txhashset data files into a zip and returns a Read to the
@ -1066,7 +1070,7 @@ pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
fs::remove_dir_all(&temp_txhashset_path)?; fs::remove_dir_all(&temp_txhashset_path)?;
} }
// Copy file to another dir // Copy file to another dir
file::copy_dir_to(&txhashset_path,&temp_txhashset_path)?; file::copy_dir_to(&txhashset_path, &temp_txhashset_path)?;
// Check and remove file that are not supposed to be there // Check and remove file that are not supposed to be there
check_and_remove_files(&temp_txhashset_path, header)?; check_and_remove_files(&temp_txhashset_path, header)?;
// Compress zip // Compress zip
@ -1081,7 +1085,11 @@ pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
/// Extract the txhashset data from a zip file and writes the content into the /// Extract the txhashset data from a zip file and writes the content into the
/// txhashset storage dir /// txhashset storage dir
pub fn zip_write(root_dir: String, txhashset_data: File, header: &BlockHeader) -> Result<(), Error> { pub fn zip_write(
root_dir: String,
txhashset_data: File,
header: &BlockHeader,
) -> Result<(), Error> {
let txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR); let txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR);
fs::create_dir_all(txhashset_path.clone())?; fs::create_dir_all(txhashset_path.clone())?;
zip::decompress(txhashset_data, &txhashset_path) zip::decompress(txhashset_data, &txhashset_path)
@ -1115,7 +1123,10 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
// Removing unexpected directories if needed // Removing unexpected directories if needed
if !dir_difference.is_empty() { if !dir_difference.is_empty() {
debug!(LOGGER, "Unexpected folder(s) found in txhashset folder, removing."); debug!(
LOGGER,
"Unexpected folder(s) found in txhashset folder, removing."
);
for diff in dir_difference { for diff in dir_difference {
let diff_path = txhashset_path.join(diff); let diff_path = txhashset_path.join(diff);
file::delete(diff_path)?; file::delete(diff_path)?;
@ -1126,9 +1137,13 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
let pmmr_files_expected: HashSet<_> = PMMR_FILES let pmmr_files_expected: HashSet<_> = PMMR_FILES
.iter() .iter()
.cloned() .cloned()
.map(|s| if s.contains("pmmr_leaf.bin") { .map(|s| {
format!("{}.{}", s, header.hash())} if s.contains("pmmr_leaf.bin") {
else {String::from(s) }) format!("{}.{}", s, header.hash())
} else {
String::from(s)
}
})
.collect(); .collect();
let subdirectories = fs::read_dir(txhashset_path)?; let subdirectories = fs::read_dir(txhashset_path)?;
@ -1149,7 +1164,11 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
.cloned() .cloned()
.collect(); .collect();
if !difference.is_empty() { if !difference.is_empty() {
debug!(LOGGER, "Unexpected file(s) found in txhashset subfolder {:?}, removing.", &subdirectory_path); debug!(
LOGGER,
"Unexpected file(s) found in txhashset subfolder {:?}, removing.",
&subdirectory_path
);
for diff in difference { for diff in difference {
let diff_path = subdirectory_path.join(diff); let diff_path = subdirectory_path.join(diff);
file::delete(diff_path)?; file::delete(diff_path)?;