merge T4 into master - rustfmt

This commit is contained in:
yeastplume 2018-10-18 11:23:22 +01:00
parent f94ede9af3
commit 6980278b95
23 changed files with 89 additions and 121 deletions

View file

@ -851,9 +851,7 @@ impl<'a> Extension<'a> {
// Find the "cutoff" pos in the output MMR based on the
// header from 1,000 blocks ago.
let cutoff_height = height
.checked_sub(global::coinbase_maturity())
.unwrap_or(0);
let cutoff_height = height.checked_sub(global::coinbase_maturity()).unwrap_or(0);
let cutoff_header = self.batch.get_header_by_height(cutoff_height)?;
let cutoff_pos = cutoff_header.output_mmr_size;

View file

@ -82,8 +82,7 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("kernel")
.join("strange0"),
)
.unwrap();
).unwrap();
OpenOptions::new()
.create(true)
.write(true)
@ -98,8 +97,7 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("strange_dir")
.join("strange2"),
)
.unwrap();
).unwrap();
fs::create_dir(
Path::new(&db_root)
.join("txhashset")
@ -115,8 +113,7 @@ fn write_file(db_root: String) {
.join("strange_dir")
.join("strange_subdir")
.join("strange3"),
)
.unwrap();
).unwrap();
}
fn txhashset_contains_expected_files(dirname: String, path_buf: PathBuf) -> bool {

View file

@ -170,9 +170,9 @@ mod test {
let foo = Foo(0);
let expected_hash = Hash::from_hex(
"81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c",
).unwrap();
let expected_hash =
Hash::from_hex("81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c")
.unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash = Hash::default();
@ -182,9 +182,9 @@ mod test {
);
let foo = Foo(5);
let expected_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
let expected_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash = Hash::default();
@ -194,14 +194,14 @@ mod test {
);
let foo = Foo(5);
let expected_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
let expected_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash = Hash::from_hex(
"81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c",
).unwrap();
let other_hash =
Hash::from_hex("81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c")
.unwrap();
assert_eq!(
foo.short_id(&other_hash, foo.0),
ShortId::from_hex("3e9cde72a687").unwrap()

View file

@ -1278,9 +1278,9 @@ mod test {
commit: commit,
};
let block_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
let block_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
let nonce = 0;

View file

@ -69,8 +69,7 @@ impl VerifierCache for LruVerifierCache {
.kernel_sig_verification_cache
.get_mut(&x.hash())
.unwrap_or(&mut false)
})
.cloned()
}).cloned()
.collect::<Vec<_>>();
debug!(
LOGGER,
@ -89,8 +88,7 @@ impl VerifierCache for LruVerifierCache {
.rangeproof_verification_cache
.get_mut(&x.proof.hash())
.unwrap_or(&mut false)
})
.cloned()
}).cloned()
.collect::<Vec<_>>();
debug!(
LOGGER,

View file

@ -119,11 +119,10 @@ pub fn genesis_testnet4() -> core::Block {
proof: Proof::new(vec![
0x46f3b4, 0x1135f8c, 0x1a1596f, 0x1e10f71, 0x41c03ea, 0x63fe8e7, 0x65af34f,
0x73c16d3, 0x8216dc3, 0x9bc75d0, 0xae7d9ad, 0xc1cb12b, 0xc65e957, 0xf67a152,
0xfac6559, 0x100c3d71, 0x11eea08b, 0x1225dfbb, 0x124d61a1, 0x132a14b4,
0x13f4ec38, 0x1542d236, 0x155f2df0, 0x1577394e, 0x163c3513, 0x19349845,
0x19d46953, 0x19f65ed4, 0x1a0411b9, 0x1a2fa039, 0x1a72a06c, 0x1b02ddd2,
0x1b594d59, 0x1b7bffd3, 0x1befe12e, 0x1c82e4cd, 0x1d492478, 0x1de132a5,
0x1e578b3c, 0x1ed96855, 0x1f222896, 0x1fea0da6,
0xfac6559, 0x100c3d71, 0x11eea08b, 0x1225dfbb, 0x124d61a1, 0x132a14b4, 0x13f4ec38,
0x1542d236, 0x155f2df0, 0x1577394e, 0x163c3513, 0x19349845, 0x19d46953, 0x19f65ed4,
0x1a0411b9, 0x1a2fa039, 0x1a72a06c, 0x1b02ddd2, 0x1b594d59, 0x1b7bffd3, 0x1befe12e,
0x1c82e4cd, 0x1d492478, 0x1de132a5, 0x1e578b3c, 0x1ed96855, 0x1f222896, 0x1fea0da6,
]),
},
..Default::default()

View file

@ -19,8 +19,7 @@
use consensus::HeaderInfo;
use consensus::{
graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON,
DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE,
SECOND_POW_EDGE_BITS,
DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE, SECOND_POW_EDGE_BITS,
};
use pow::{self, CuckatooContext, EdgeType, PoWContext};
/// An enum collecting sets of parameters used throughout the
@ -72,7 +71,7 @@ pub const STUCK_PEER_KICK_TIME: i64 = 2 * 3600 * 1000;
/// Testnet 4 initial block difficulty
/// 1_000 times natural scale factor for cuckatoo29
pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * (2<<(29-24)) * 29;
pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * (2 << (29 - 24)) * 29;
/// Trigger compaction check on average every day for FAST_SYNC_NODE,
/// roll the dice on every block to decide,

View file

@ -149,10 +149,7 @@ where
T: EdgeType,
{
/// Instantiates new params and calculate edge mask, etc
pub fn new(
edge_bits: u8,
proof_size: usize,
) -> Result<CuckooParams<T>, Error> {
pub fn new(edge_bits: u8, proof_size: usize) -> Result<CuckooParams<T>, Error> {
let num_edges = (1 as u64) << edge_bits;
let edge_mask = to_edge!(num_edges - 1);
Ok(CuckooParams {
@ -165,11 +162,7 @@ where
}
/// Reset the main keys used for siphash from the header and nonce
pub fn reset_header_nonce(
&mut self,
header: Vec<u8>,
nonce: Option<u32>,
) -> Result<(), Error> {
pub fn reset_header_nonce(&mut self, header: Vec<u8>, nonce: Option<u32>) -> Result<(), Error> {
self.siphash_keys = set_header_nonce(header, nonce)?;
Ok(())
}

View file

@ -165,15 +165,9 @@ impl<T> PoWContext<T> for CuckatooContext<T>
where
T: EdgeType,
{
fn new(
edge_bits: u8,
proof_size: usize,
max_sols: u32,
) -> Result<Box<Self>, Error> {
fn new(edge_bits: u8, proof_size: usize, max_sols: u32) -> Result<Box<Self>, Error> {
Ok(Box::new(CuckatooContext::<T>::new_impl(
edge_bits,
proof_size,
max_sols,
edge_bits, proof_size, max_sols,
)?))
}

View file

@ -40,15 +40,9 @@ impl<T> PoWContext<T> for CuckooContext<T>
where
T: EdgeType,
{
fn new(
edge_bits: u8,
proof_size: usize,
max_sols: u32,
) -> Result<Box<Self>, Error> {
fn new(edge_bits: u8, proof_size: usize, max_sols: u32) -> Result<Box<Self>, Error> {
Ok(Box::new(CuckooContext::<T>::new_impl(
edge_bits,
proof_size,
max_sols,
edge_bits, proof_size, max_sols,
)?))
}
@ -324,21 +318,21 @@ mod test {
0x4cef6, 0x4dfdf, 0x5036b, 0x5d528, 0x7d76b, 0x80958, 0x81649, 0x8a064, 0x935fe, 0x93c28,
0x93fc9, 0x9aec5, 0x9c5c8, 0xa00a7, 0xa7256, 0xaa35e, 0xb9e04, 0xc8835, 0xcda49, 0xd72ea,
0xd7f80, 0xdaa3a, 0xdafce, 0xe03fe, 0xe55a2, 0xe6e60, 0xebb9d, 0xf5248, 0xf6a4b, 0xf6d32,
0xf7c61, 0xfd9e9
0xf7c61, 0xfd9e9,
];
static V2: [u64; 42] = [
0xab0, 0x403c, 0x509c, 0x127c0, 0x1a0b3, 0x1ffe4, 0x26180, 0x2a20a, 0x35559, 0x36dd3,
0x3cb20, 0x4992f, 0x55b20, 0x5b507, 0x66e58, 0x6784d, 0x6fda8, 0x7363d, 0x76dd6, 0x7f13b,
0x84672, 0x85724, 0x991cf, 0x9a6fe, 0x9b0c5, 0xa5019, 0xa7207, 0xaf32f, 0xc29f3, 0xc39d3,
0xc78ed, 0xc9e75, 0xcd0db, 0xcd81e, 0xd02e0, 0xd05c4, 0xd8f99, 0xd9359, 0xdff3b, 0xea623,
0xf9100, 0xfc966
0xf9100, 0xfc966,
];
static V3: [u64; 42] = [
0x14ca, 0x1e80, 0x587c, 0xa2d4, 0x14f6b, 0x1b100, 0x1b74c, 0x2477d, 0x29ba4, 0x33f25,
0x4c55f, 0x4d280, 0x50ffa, 0x53900, 0x5cf62, 0x63f66, 0x65623, 0x6fb19, 0x7a19e, 0x82eef,
0x83d2d, 0x88015, 0x8e6c5, 0x91086, 0x97429, 0x9aa27, 0xa01b7, 0xa304b, 0xafa06, 0xb1cb3,
0xbb9fc, 0xbf345, 0xc0761, 0xc0e78, 0xc5b99, 0xc9f09, 0xcc62c, 0xceb6e, 0xd98ad, 0xeecb3,
0xef966, 0xfef9b
0xef966, 0xfef9b,
];
// cuckoo28 at 50% edges of letter 'u'
static V4: [u64; 42] = [
@ -459,7 +453,7 @@ mod test {
T: EdgeType,
{
let h = [0 as u8; 32];
for n in [45 as u32, 49,131,143,151].iter() {
for n in [45 as u32, 49, 131, 143, 151].iter() {
let mut cuckoo_ctx = CuckooContext::<T>::new(16, 42, 10)?;
cuckoo_ctx.set_header_nonce(h.to_vec(), Some(*n), false)?;
let res = cuckoo_ctx.find_cycles()?;

View file

@ -14,7 +14,7 @@
/// Types for a Cuck(at)oo proof of work and its encapsulation as a fully usable
/// proof of work within a block header.
use std::cmp::{min,max};
use std::cmp::{max, min};
use std::ops::{Add, Div, Mul, Sub};
use std::{fmt, iter};
@ -36,11 +36,7 @@ where
T: EdgeType,
{
/// Create new instance of context with appropriate parameters
fn new(
edge_bits: u8,
proof_size: usize,
max_sols: u32,
) -> Result<Box<Self>, Error>;
fn new(edge_bits: u8, proof_size: usize, max_sols: u32) -> Result<Box<Self>, Error>;
/// Sets the header along with an optional nonce at the end
/// solve: whether to set up structures for a solve (true) or just validate (false)
fn set_header_nonce(
@ -408,10 +404,7 @@ impl Readable for Proof {
}
nonces.push(nonce);
}
Ok(Proof {
edge_bits,
nonces,
})
Ok(Proof { edge_bits, nonces })
}
}

View file

@ -123,7 +123,7 @@ fn get_diff_stats(chain_sim: &Vec<HeaderInfo>) -> DiffStats {
let earliest_block_height = tip_height as i64 - last_blocks.len() as i64;
let earliest_ts = last_blocks[0].timestamp;
let latest_ts = last_blocks[last_blocks.len()-1].timestamp;
let latest_ts = last_blocks[last_blocks.len() - 1].timestamp;
let mut i = 1;
@ -558,12 +558,12 @@ fn hard_forks() {
assert!(valid_header_version(0, 1));
assert!(valid_header_version(10, 1));
assert!(!valid_header_version(10, 2));
assert!(valid_header_version(YEAR_HEIGHT/2-1, 1));
assert!(valid_header_version(YEAR_HEIGHT / 2 - 1, 1));
// v2 not active yet
assert!(!valid_header_version(YEAR_HEIGHT/2, 2));
assert!(!valid_header_version(YEAR_HEIGHT/2, 1));
assert!(!valid_header_version(YEAR_HEIGHT / 2, 2));
assert!(!valid_header_version(YEAR_HEIGHT / 2, 1));
assert!(!valid_header_version(YEAR_HEIGHT, 1));
assert!(!valid_header_version(YEAR_HEIGHT/2+1, 2));
assert!(!valid_header_version(YEAR_HEIGHT / 2 + 1, 2));
}
// #[test]

View file

@ -409,7 +409,8 @@ impl ExtendedPrivKey {
hasher.append_sha512(&be_n);
let result = hasher.result_sha512();
let mut sk = SecretKey::from_slice(secp, &result[..32]).map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key).map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key)
.map_err(Error::Ecdsa)?;
Ok(ExtendedPrivKey {
network: self.network,

View file

@ -85,8 +85,7 @@ impl Keychain for ExtKeychain {
} else {
None
}
})
.collect();
}).collect();
let mut neg_keys: Vec<SecretKey> = blind_sum
.negative_key_ids
@ -98,8 +97,7 @@ impl Keychain for ExtKeychain {
} else {
None
}
})
.collect();
}).collect();
pos_keys.extend(
&blind_sum
@ -220,8 +218,7 @@ mod test {
&BlindSum::new()
.add_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
)
.unwrap(),
).unwrap(),
BlindingFactor::from_secret_key(skey3),
);
}

View file

@ -63,13 +63,11 @@ pub fn create() -> Box<View> {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
s.select_down(1)(c);
Some(EventResult::Consumed(None));
})
.on_pre_event('k', move |c| {
}).on_pre_event('k', move |c| {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
s.select_up(1)(c);
Some(EventResult::Consumed(None));
})
.on_pre_event(Key::Tab, move |c| {
}).on_pre_event(Key::Tab, move |c| {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
if s.selected_id().unwrap() == s.len() - 1 {
s.set_selection(0)(c);

View file

@ -444,9 +444,9 @@ where
// write details file
let mut details_file =
File::create(details_file_path).context(ErrorKind::FileWallet(&"Could not create "))?;
let res_json = serde_json::to_string_pretty(&self.details).context(ErrorKind::FileWallet(
"Error serializing wallet details file",
))?;
let res_json = serde_json::to_string_pretty(&self.details).context(
ErrorKind::FileWallet("Error serializing wallet details file"),
)?;
details_file
.write_all(res_json.into_bytes().as_slice())
.context(ErrorKind::FileWallet(&"Error writing wallet details file"))

View file

@ -96,8 +96,8 @@ where
let mut apis = ApiServer::new();
info!(LOGGER, "Starting HTTP Owner API server at {}.", addr);
let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address");
let api_thread = apis
.start(socket_addr, router, tls_config)
let api_thread =
apis.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;
@ -128,8 +128,8 @@ where
let mut apis = ApiServer::new();
info!(LOGGER, "Starting HTTP Foreign API server at {}.", addr);
let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address");
let api_thread = apis
.start(socket_addr, router, tls_config)
let api_thread =
apis.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;

View file

@ -34,7 +34,11 @@ pub struct Error {
#[derive(Clone, Eq, PartialEq, Debug, Fail)]
pub enum ErrorKind {
/// Not enough funds
#[fail(display = "Not enough funds. Required: {}, Available: {}", needed, available)]
#[fail(
display = "Not enough funds. Required: {}, Available: {}",
needed,
available
)]
NotEnoughFunds {
/// available funds
available: u64,
@ -43,7 +47,11 @@ pub enum ErrorKind {
},
/// Fee dispute
#[fail(display = "Fee dispute: sender fee {}, recipient fee {}", sender_fee, recipient_fee)]
#[fail(
display = "Fee dispute: sender fee {}, recipient fee {}",
sender_fee,
recipient_fee
)]
FeeDispute {
/// sender fee
sender_fee: u64,

View file

@ -55,8 +55,7 @@ where
} else {
out.status != OutputStatus::Spent
}
})
.collect::<Vec<_>>();
}).collect::<Vec<_>>();
// only include outputs with a given tx_id if provided
if let Some(id) = tx_id {
@ -73,8 +72,7 @@ where
.map(|out| {
let commit = wallet.get_commitment(&out.key_id).unwrap();
(out, commit)
})
.collect();
}).collect();
Ok(res)
}

View file

@ -409,7 +409,11 @@ where
&mut parent_id.to_bytes().to_vec(),
t.id as u64,
);
self.db.borrow().as_ref().unwrap().put_ser(&tx_log_key, &t)?;
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&tx_log_key, &t)?;
Ok(())
}

View file

@ -46,8 +46,7 @@ fn aggsig_sender_receiver_interaction() {
&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2)),
)
.unwrap();
).unwrap();
keychain
.secp()
@ -253,8 +252,7 @@ fn aggsig_sender_receiver_interaction_offset() {
// subtract the kernel offset here like as would when
// verifying a kernel signature
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)),
)
.unwrap();
).unwrap();
keychain
.secp()
@ -279,8 +277,7 @@ fn aggsig_sender_receiver_interaction_offset() {
// subtract the kernel offset to create an aggsig context
// with our "split" key
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)),
)
.unwrap();
).unwrap();
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();