Merge pull request #1340 from antiochp/rustfmt_core_crate

rustfmt on core crate (not run for a while)
This commit is contained in:
hashmap 2018-08-10 16:06:34 +02:00 committed by GitHub
commit f6383aa860
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 63 additions and 49 deletions

View file

@ -14,12 +14,12 @@
//! Blocks and blockheaders //! Blocks and blockheaders
use chrono::naive::{MAX_DATE, MIN_DATE};
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use rand::{thread_rng, Rng}; use rand::{thread_rng, Rng};
use std::collections::HashSet; use std::collections::HashSet;
use std::fmt; use std::fmt;
use std::iter::FromIterator; use std::iter::FromIterator;
use chrono::naive::{MAX_DATE, MIN_DATE};
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use consensus::{self, exceeds_weight, reward, VerifySortOrder, REWARD}; use consensus::{self, exceeds_weight, reward, VerifySortOrder, REWARD};
use core::committed::{self, Committed}; use core::committed::{self, Committed};
@ -191,7 +191,9 @@ impl Readable for BlockHeader {
ser_multiread!(reader, read_u64, read_u64, read_u64); ser_multiread!(reader, read_u64, read_u64, read_u64);
let pow = Proof::read(reader)?; let pow = Proof::read(reader)?;
if timestamp > MAX_DATE.and_hms(0,0,0).timestamp() || timestamp <MIN_DATE.and_hms(0,0,0).timestamp(){ if timestamp > MAX_DATE.and_hms(0, 0, 0).timestamp()
|| timestamp < MIN_DATE.and_hms(0, 0, 0).timestamp()
{
return Err(ser::Error::CorruptedData); return Err(ser::Error::CorruptedData);
} }
@ -512,7 +514,8 @@ impl Block {
let header = self.header.clone(); let header = self.header.clone();
let nonce = thread_rng().next_u64(); let nonce = thread_rng().next_u64();
let mut out_full = self.outputs let mut out_full = self
.outputs
.iter() .iter()
.filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT)) .filter(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT))
.cloned() .cloned()
@ -644,12 +647,14 @@ impl Block {
/// we do not want to cut-through (all coinbase must be preserved) /// we do not want to cut-through (all coinbase must be preserved)
/// ///
pub fn cut_through(self) -> Block { pub fn cut_through(self) -> Block {
let in_set = self.inputs let in_set = self
.inputs
.iter() .iter()
.map(|inp| inp.commitment()) .map(|inp| inp.commitment())
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let out_set = self.outputs let out_set = self
.outputs
.iter() .iter()
.filter(|out| !out.features.contains(OutputFeatures::COINBASE_OUTPUT)) .filter(|out| !out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.map(|out| out.commitment()) .map(|out| out.commitment())
@ -657,12 +662,14 @@ impl Block {
let to_cut_through = in_set.intersection(&out_set).collect::<HashSet<_>>(); let to_cut_through = in_set.intersection(&out_set).collect::<HashSet<_>>();
let new_inputs = self.inputs let new_inputs = self
.inputs
.into_iter() .into_iter()
.filter(|inp| !to_cut_through.contains(&inp.commitment())) .filter(|inp| !to_cut_through.contains(&inp.commitment()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let new_outputs = self.outputs let new_outputs = self
.outputs
.into_iter() .into_iter()
.filter(|out| !to_cut_through.contains(&out.commitment())) .filter(|out| !to_cut_through.contains(&out.commitment()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -738,7 +745,8 @@ impl Block {
// Verify that no input is spending an output from the same block. // Verify that no input is spending an output from the same block.
fn verify_cut_through(&self) -> Result<(), Error> { fn verify_cut_through(&self) -> Result<(), Error> {
for inp in &self.inputs { for inp in &self.inputs {
if self.outputs if self
.outputs
.iter() .iter()
.any(|out| out.commitment() == inp.commitment()) .any(|out| out.commitment() == inp.commitment())
{ {
@ -781,12 +789,14 @@ impl Block {
/// Check the sum of coinbase-marked outputs match /// Check the sum of coinbase-marked outputs match
/// the sum of coinbase-marked kernels accounting for fees. /// the sum of coinbase-marked kernels accounting for fees.
pub fn verify_coinbase(&self) -> Result<(), Error> { pub fn verify_coinbase(&self) -> Result<(), Error> {
let cb_outs = self.outputs let cb_outs = self
.outputs
.iter() .iter()
.filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT)) .filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.collect::<Vec<&Output>>(); .collect::<Vec<&Output>>();
let cb_kerns = self.kernels let cb_kerns = self
.kernels
.iter() .iter()
.filter(|kernel| kernel.features.contains(KernelFeatures::COINBASE_KERNEL)) .filter(|kernel| kernel.features.contains(KernelFeatures::COINBASE_KERNEL))
.collect::<Vec<&TxKernel>>(); .collect::<Vec<&TxKernel>>();
@ -801,10 +811,7 @@ impl Block {
vec![over_commit], vec![over_commit],
)?; )?;
let kerns_sum = secp.commit_sum( let kerns_sum = secp.commit_sum(cb_kerns.iter().map(|x| x.excess).collect(), vec![])?;
cb_kerns.iter().map(|x| x.excess).collect(),
vec![],
)?;
// Verify the kernel sum equals the output sum accounting for block fees. // Verify the kernel sum equals the output sum accounting for block fees.
if kerns_sum != out_adjust_sum { if kerns_sum != out_adjust_sum {

View file

@ -228,7 +228,8 @@ impl<W: ser::Writeable> Hashed for W {
impl<T: Writeable> consensus::VerifySortOrder<T> for Vec<T> { impl<T: Writeable> consensus::VerifySortOrder<T> for Vec<T> {
fn verify_sort_order(&self) -> Result<(), consensus::Error> { fn verify_sort_order(&self) -> Result<(), consensus::Error> {
if self.iter() if self
.iter()
.map(|item| item.hash()) .map(|item| item.hash())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.windows(2) .windows(2)

View file

@ -123,7 +123,7 @@ impl Proof {
impl Readable for Proof { impl Readable for Proof {
fn read(reader: &mut Reader) -> Result<Proof, Error> { fn read(reader: &mut Reader) -> Result<Proof, Error> {
let cuckoo_sizeshift = reader.read_u8()?; let cuckoo_sizeshift = reader.read_u8()?;
if cuckoo_sizeshift == 0 || cuckoo_sizeshift > 64 { if cuckoo_sizeshift == 0 || cuckoo_sizeshift > 64 {
return Err(Error::CorruptedData); return Err(Error::CorruptedData);
} }

View file

@ -64,11 +64,7 @@ where
/// to rewind to as well as bitmaps representing the positions added and /// to rewind to as well as bitmaps representing the positions added and
/// removed since the rewind position. These are what we will "undo" /// removed since the rewind position. These are what we will "undo"
/// during the rewind. /// during the rewind.
fn rewind( fn rewind(&mut self, position: u64, rewind_rm_pos: &Bitmap) -> Result<(), String>;
&mut self,
position: u64,
rewind_rm_pos: &Bitmap,
) -> Result<(), String>;
/// Get a Hash by insertion position. /// Get a Hash by insertion position.
fn get_hash(&self, position: u64) -> Option<Hash>; fn get_hash(&self, position: u64) -> Option<Hash>;
@ -268,7 +264,8 @@ where
while bintree_postorder_height(pos + 1) > height { while bintree_postorder_height(pos + 1) > height {
let left_sibling = bintree_jump_left_sibling(pos); let left_sibling = bintree_jump_left_sibling(pos);
let left_hash = self.backend let left_hash = self
.backend
.get_from_file(left_sibling) .get_from_file(left_sibling)
.ok_or("missing left sibling in tree, should not have been pruned")?; .ok_or("missing left sibling in tree, should not have been pruned")?;
@ -297,11 +294,7 @@ where
/// that had been canceled. Expects a position in the PMMR to rewind and /// that had been canceled. Expects a position in the PMMR to rewind and
/// bitmaps representing the positions added and removed that we want to /// bitmaps representing the positions added and removed that we want to
/// "undo". /// "undo".
pub fn rewind( pub fn rewind(&mut self, position: u64, rewind_rm_pos: &Bitmap) -> Result<(), String> {
&mut self,
position: u64,
rewind_rm_pos: &Bitmap,
) -> Result<(), String> {
// Identify which actual position we should rewind to as the provided // Identify which actual position we should rewind to as the provided
// position is a leaf. We traverse the MMR to inclue any parent(s) that // position is a leaf. We traverse the MMR to inclue any parent(s) that
// need to be included for the MMR to be valid. // need to be included for the MMR to be valid.

View file

@ -318,8 +318,7 @@ impl Readable for Transaction {
// Treat any validation issues as data corruption. // Treat any validation issues as data corruption.
// An example of this would be reading a tx // An example of this would be reading a tx
// that exceeded the allowed number of inputs. // that exceeded the allowed number of inputs.
tx.validate() tx.validate().map_err(|_| ser::Error::CorruptedData)?;
.map_err(|_| ser::Error::CorruptedData)?;
Ok(tx) Ok(tx)
} }
@ -488,7 +487,8 @@ impl Transaction {
// Verify that no input is spending an output from the same block. // Verify that no input is spending an output from the same block.
fn verify_cut_through(&self) -> Result<(), Error> { fn verify_cut_through(&self) -> Result<(), Error> {
for inp in &self.inputs { for inp in &self.inputs {
if self.outputs if self
.outputs
.iter() .iter()
.any(|out| out.commitment() == inp.commitment()) .any(|out| out.commitment() == inp.commitment())
{ {
@ -509,7 +509,11 @@ impl Transaction {
// Verify we have no outputs tagged as COINBASE_OUTPUT. // Verify we have no outputs tagged as COINBASE_OUTPUT.
fn verify_output_features(&self) -> Result<(), Error> { fn verify_output_features(&self) -> Result<(), Error> {
if self.outputs.iter().any(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT)) { if self
.outputs
.iter()
.any(|x| x.features.contains(OutputFeatures::COINBASE_OUTPUT))
{
return Err(Error::InvalidOutputFeatures); return Err(Error::InvalidOutputFeatures);
} }
Ok(()) Ok(())
@ -517,7 +521,11 @@ impl Transaction {
// Verify we have no kernels tagged as COINBASE_KERNEL. // Verify we have no kernels tagged as COINBASE_KERNEL.
fn verify_kernel_features(&self) -> Result<(), Error> { fn verify_kernel_features(&self) -> Result<(), Error> {
if self.kernels.iter().any(|x| x.features.contains(KernelFeatures::COINBASE_KERNEL)) { if self
.kernels
.iter()
.any(|x| x.features.contains(KernelFeatures::COINBASE_KERNEL))
{
return Err(Error::InvalidKernelFeatures); return Err(Error::InvalidKernelFeatures);
} }
Ok(()) Ok(())

View file

@ -14,7 +14,7 @@
//! Definition of the genesis block. Placeholder for now. //! Definition of the genesis block. Placeholder for now.
use chrono::prelude::{Utc, TimeZone}; use chrono::prelude::{TimeZone, Utc};
use consensus; use consensus;
use core; use core;
@ -98,13 +98,12 @@ pub fn genesis_testnet3() -> core::Block {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
nonce: 4956988373127691, nonce: 4956988373127691,
pow: core::Proof::new(vec![ pow: core::Proof::new(vec![
0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f,
0x5af0e3f, 0x6b8fcae, 0x8319b53, 0x845ca8c, 0x8d2a13e, 0x8d6e4cc, 0x6b8fcae, 0x8319b53, 0x845ca8c, 0x8d2a13e, 0x8d6e4cc, 0x9349e8d, 0xa7a33c5,
0x9349e8d, 0xa7a33c5, 0xaeac3cb, 0xb193e23, 0xb502e19, 0xb5d9804, 0xaeac3cb, 0xb193e23, 0xb502e19, 0xb5d9804, 0xc9ac184, 0xd4f4de3, 0xd7a23b8,
0xc9ac184, 0xd4f4de3, 0xd7a23b8, 0xf1d8660, 0xf443756, 0x10b833d2, 0xf1d8660, 0xf443756, 0x10b833d2, 0x11418fc5, 0x11b8aeaf, 0x131836ec, 0x132ab818,
0x11418fc5, 0x11b8aeaf, 0x131836ec, 0x132ab818, 0x13a46a55, 0x13df89fe, 0x13a46a55, 0x13df89fe, 0x145d65b5, 0x166f9c3a, 0x166fe0ef, 0x178cb36f, 0x185baf68,
0x145d65b5, 0x166f9c3a, 0x166fe0ef, 0x178cb36f, 0x185baf68, 0x1bbfe563, 0x1bbfe563, 0x1bd637b4, 0x1cfc8382, 0x1d1ed012, 0x1e391ca5, 0x1e999b4c, 0x1f7c6d21,
0x1bd637b4, 0x1cfc8382, 0x1d1ed012, 0x1e391ca5, 0x1e999b4c, 0x1f7c6d21,
]), ]),
..Default::default() ..Default::default()
}, },

View file

@ -38,8 +38,8 @@ extern crate serde_derive;
extern crate siphasher; extern crate siphasher;
#[macro_use] #[macro_use]
extern crate slog; extern crate slog;
extern crate failure;
extern crate chrono; extern crate chrono;
extern crate failure;
#[macro_use] #[macro_use]
extern crate failure_derive; extern crate failure_derive;

View file

@ -414,15 +414,21 @@ mod test {
assert!( assert!(
!Cuckoo::from_hash(blake2(&[49]).as_bytes(), 20).verify(&Proof::new(vec![0; 42]), 75) !Cuckoo::from_hash(blake2(&[49]).as_bytes(), 20).verify(&Proof::new(vec![0; 42]), 75)
); );
assert!(!Cuckoo::from_hash(blake2(&[49]).as_bytes(), 20) assert!(
.verify(&Proof::new(vec![0xffff; 42]), 75)); !Cuckoo::from_hash(blake2(&[49]).as_bytes(), 20)
.verify(&Proof::new(vec![0xffff; 42]), 75)
);
// wrong data for proof // wrong data for proof
assert!(!Cuckoo::from_hash(blake2(&[50]).as_bytes(), 20) assert!(
.verify(&Proof::new(V1.to_vec().clone()), 75)); !Cuckoo::from_hash(blake2(&[50]).as_bytes(), 20)
.verify(&Proof::new(V1.to_vec().clone()), 75)
);
let mut test_header = [0; 32]; let mut test_header = [0; 32];
test_header[0] = 24; test_header[0] = 24;
assert!(!Cuckoo::from_hash(blake2(&test_header).as_bytes(), 20) assert!(
.verify(&Proof::new(V4.to_vec().clone()), 50)); !Cuckoo::from_hash(blake2(&test_header).as_bytes(), 20)
.verify(&Proof::new(V4.to_vec().clone()), 50)
);
} }
#[test] #[test]

View file

@ -29,9 +29,9 @@
#![warn(missing_docs)] #![warn(missing_docs)]
extern crate blake2_rfc as blake2; extern crate blake2_rfc as blake2;
extern crate chrono;
extern crate rand; extern crate rand;
extern crate serde; extern crate serde;
extern crate chrono;
extern crate grin_util as util; extern crate grin_util as util;