Small style changes in core crate (#1816)

This commit is contained in:
hashmap 2018-10-23 21:09:16 +02:00 committed by GitHub
parent a433725b5d
commit 3efa7bdac9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 107 additions and 123 deletions

View file

@ -624,9 +624,7 @@ impl<'a> HeaderExtension<'a> {
/// This may be either the header MMR or the sync MMR depending on the
/// extension.
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
self.pmmr
.push(header.clone())
.map_err(&ErrorKind::TxHashSetErr)?;
self.pmmr.push(&header).map_err(&ErrorKind::TxHashSetErr)?;
self.header = header.clone();
Ok(())
}
@ -961,7 +959,7 @@ impl<'a> Extension<'a> {
fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
self.header_pmmr
.push(header.clone())
.push(&header)
.map_err(&ErrorKind::TxHashSetErr)?;
Ok(())
}

View file

@ -296,7 +296,7 @@ where
}
/// Factor by which the secondary proof of work difficulty will be adjusted
pub fn secondary_pow_scaling(height: u64, diff_data: &Vec<HeaderInfo>) -> u32 {
pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 {
// Get the secondary count across the window, in pct (100 * 60 * 2nd_pow_fraction)
let snd_count = 100 * diff_data.iter().filter(|n| n.is_secondary).count() as u64;

View file

@ -286,7 +286,7 @@ impl BlockHeader {
/// Total difficulty accumulated by the proof of work on this header
pub fn total_difficulty(&self) -> Difficulty {
self.pow.total_difficulty.clone()
self.pow.total_difficulty
}
/// The "overage" to use when verifying the kernel sums.
@ -362,10 +362,7 @@ impl Readable for Block {
body.validate_read(true)
.map_err(|_| ser::Error::CorruptedData)?;
Ok(Block {
header: header,
body: body,
})
Ok(Block { header, body })
}
}
@ -465,7 +462,7 @@ impl Block {
/// Build a new empty block from a specified header
pub fn with_header(header: BlockHeader) -> Block {
Block {
header: header,
header,
..Default::default()
}
}
@ -607,15 +604,14 @@ impl Block {
// take the kernel offset for this block (block offset minus previous) and
// verify.body.outputs and kernel sums
let block_kernel_offset = if self.header.total_kernel_offset() == prev_kernel_offset.clone()
{
let block_kernel_offset = if self.header.total_kernel_offset() == *prev_kernel_offset {
// special case when the sum hasn't changed (typically an empty block),
// zero isn't a valid private key but it's a valid blinding factor
BlindingFactor::zero()
} else {
committed::sum_kernel_offsets(
vec![self.header.total_kernel_offset()],
vec![prev_kernel_offset.clone()],
vec![*prev_kernel_offset],
)?
};
let (_utxo_sum, kernel_sum) =

View file

@ -53,8 +53,8 @@ impl Default for BlockSums {
fn default() -> BlockSums {
let zero_commit = secp_static::commit_to_zero_value();
BlockSums {
utxo_sum: zero_commit.clone(),
kernel_sum: zero_commit.clone(),
utxo_sum: zero_commit,
kernel_sum: zero_commit,
}
}
}

View file

@ -86,7 +86,7 @@ impl MerkleProof {
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
let bytes = util::from_hex(hex.to_string()).unwrap();
let res = ser::deserialize(&mut &bytes[..])
.map_err(|_| format!("failed to deserialize a Merkle Proof"))?;
.map_err(|_| "failed to deserialize a Merkle Proof".to_string())?;
Ok(res)
}
@ -102,7 +102,7 @@ impl MerkleProof {
// calculate the peaks once as these are based on overall MMR size
// (and will not change)
let peaks_pos = pmmr::peaks(self.mmr_size);
proof.verify_consume(root, element, node_pos, peaks_pos)
proof.verify_consume(root, element, node_pos, &peaks_pos)
}
/// Consumes the Merkle proof while verifying it.
@ -113,7 +113,7 @@ impl MerkleProof {
root: Hash,
element: &PMMRIndexHashable,
node_pos: u64,
peaks_pos: Vec<u64>,
peaks_pos: &[u64],
) -> Result<(), MerkleProofError> {
let node_hash = if node_pos > self.mmr_size {
element.hash_with_index(self.mmr_size)
@ -123,7 +123,7 @@ impl MerkleProof {
// handle special case of only a single entry in the MMR
// (no siblings to hash together)
if self.path.len() == 0 {
if self.path.is_empty() {
if root == node_hash {
return Ok(());
} else {

View file

@ -42,8 +42,8 @@ where
/// Build a new db backed MMR.
pub fn new(backend: &'a mut B) -> DBPMMR<T, B> {
DBPMMR {
backend,
last_pos: 0,
backend: backend,
_marker: marker::PhantomData,
}
}
@ -52,8 +52,8 @@ where
/// last_pos with the provided db backend.
pub fn at(backend: &'a mut B, last_pos: u64) -> DBPMMR<T, B> {
DBPMMR {
last_pos: last_pos,
backend: backend,
backend,
last_pos,
_marker: marker::PhantomData,
}
}
@ -98,7 +98,7 @@ where
/// Push a new element into the MMR. Computes new related peaks at
/// the same time if applicable.
pub fn push(&mut self, elmt: T) -> Result<u64, String> {
pub fn push(&mut self, elmt: &T) -> Result<u64, String> {
let elmt_pos = self.last_pos + 1;
let mut current_hash = elmt.hash_with_index(elmt_pos - 1);

View file

@ -53,8 +53,8 @@ where
/// Build a new prunable Merkle Mountain Range using the provided backend.
pub fn new(backend: &'a mut B) -> PMMR<T, B> {
PMMR {
backend,
last_pos: 0,
backend: backend,
_marker: marker::PhantomData,
}
}
@ -63,8 +63,8 @@ where
/// last_pos with the provided backend.
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<T, B> {
PMMR {
last_pos: last_pos,
backend: backend,
backend,
last_pos,
_marker: marker::PhantomData,
}
}
@ -90,7 +90,7 @@ where
let rhs = self.bag_the_rhs(peak_pos);
let mut res = peaks(self.last_pos)
.into_iter()
.filter(|x| x < &peak_pos)
.filter(|x| *x < peak_pos)
.filter_map(|x| self.backend.get_from_file(x))
.collect::<Vec<_>>();
res.reverse();
@ -107,7 +107,7 @@ where
pub fn bag_the_rhs(&self, peak_pos: u64) -> Option<Hash> {
let rhs = peaks(self.last_pos)
.into_iter()
.filter(|x| x > &peak_pos)
.filter(|x| *x > peak_pos)
.filter_map(|x| self.backend.get_from_file(x))
.collect::<Vec<_>>();
@ -145,7 +145,7 @@ where
// check we actually have a hash in the MMR at this pos
self.get_hash(pos)
.ok_or(format!("no element at pos {}", pos))?;
.ok_or_else(|| format!("no element at pos {}", pos))?;
let mmr_size = self.unpruned_size();
@ -510,7 +510,7 @@ pub fn peak_map_height(mut pos: u64) -> (u64, u64) {
let mut peak_size = ALL_ONES >> pos.leading_zeros();
let mut bitmap = 0;
while peak_size != 0 {
bitmap = bitmap << 1;
bitmap <<= 1;
if pos >= peak_size {
pos -= peak_size;
bitmap |= 1;

View file

@ -41,8 +41,8 @@ where
/// Build a new readonly PMMR.
pub fn new(backend: &'a B) -> ReadonlyPMMR<T, B> {
ReadonlyPMMR {
backend,
last_pos: 0,
backend: backend,
_marker: marker::PhantomData,
}
}
@ -51,8 +51,8 @@ where
/// last_pos with the provided backend.
pub fn at(backend: &'a B, last_pos: u64) -> ReadonlyPMMR<T, B> {
ReadonlyPMMR {
last_pos: last_pos,
backend: backend,
backend,
last_pos,
_marker: marker::PhantomData,
}
}

View file

@ -43,8 +43,8 @@ where
/// Build a new readonly PMMR.
pub fn new(backend: &'a B) -> RewindablePMMR<T, B> {
RewindablePMMR {
backend,
last_pos: 0,
backend: backend,
_marker: marker::PhantomData,
}
}
@ -53,8 +53,8 @@ where
/// last_pos with the provided backend.
pub fn at(backend: &'a B, last_pos: u64) -> RewindablePMMR<T, B> {
RewindablePMMR {
last_pos: last_pos,
backend: backend,
backend,
last_pos,
_marker: marker::PhantomData,
}
}

View file

@ -177,7 +177,7 @@ impl Readable for TxKernel {
let features =
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
Ok(TxKernel {
features: features,
features,
fee: reader.read_u64()?,
lock_height: reader.read_u64()?,
excess: Commitment::read(reader)?,
@ -230,13 +230,13 @@ impl TxKernel {
/// Builds a new tx kernel with the provided fee.
pub fn with_fee(self, fee: u64) -> TxKernel {
TxKernel { fee: fee, ..self }
TxKernel { fee, ..self }
}
/// Builds a new tx kernel with the provided lock_height.
pub fn with_lock_height(self, lock_height: u64) -> TxKernel {
TxKernel {
lock_height: lock_height,
lock_height,
..self
}
}
@ -356,9 +356,9 @@ impl TransactionBody {
verify_sorted: bool,
) -> Result<TransactionBody, Error> {
let body = TransactionBody {
inputs: inputs,
outputs: outputs,
kernels: kernels,
inputs,
outputs,
kernels,
};
if verify_sorted {
@ -436,7 +436,7 @@ impl TransactionBody {
/// Calculate transaction weight from transaction details
pub fn weight(input_len: usize, output_len: usize, kernel_len: usize) -> u32 {
let mut body_weight = -1 * (input_len as i32) + (4 * output_len as i32) + kernel_len as i32;
let mut body_weight = -(input_len as i32) + (4 * output_len as i32) + kernel_len as i32;
if body_weight < 1 {
body_weight = 1;
}
@ -559,7 +559,7 @@ impl TransactionBody {
};
// Now batch verify all those unverified rangeproofs
if outputs.len() > 0 {
if !outputs.is_empty() {
let mut commits = vec![];
let mut proofs = vec![];
for x in &outputs {
@ -687,10 +687,7 @@ impl Transaction {
/// Creates a new transaction using this transaction as a template
/// and with the specified offset.
pub fn with_offset(self, offset: BlindingFactor) -> Transaction {
Transaction {
offset: offset,
..self
}
Transaction { offset, ..self }
}
/// Builds a new transaction with the provided inputs added. Existing
@ -1072,7 +1069,7 @@ impl Readable for Output {
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
Ok(Output {
features: features,
features,
commit: Commitment::read(reader)?,
proof: RangeProof::read(reader)?,
})
@ -1131,8 +1128,8 @@ impl OutputIdentifier {
/// Build a new output_identifier.
pub fn new(features: OutputFeatures, commit: &Commitment) -> OutputIdentifier {
OutputIdentifier {
features: features,
commit: commit.clone(),
features,
commit: *commit,
}
}
@ -1152,9 +1149,9 @@ impl OutputIdentifier {
/// Converts this identifier to a full output, provided a RangeProof
pub fn into_output(self, proof: RangeProof) -> Output {
Output {
proof,
features: self.features,
commit: self.commit,
proof: proof,
}
}
@ -1196,8 +1193,8 @@ impl Readable for OutputIdentifier {
let features =
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
Ok(OutputIdentifier {
features,
commit: Commitment::read(reader)?,
features: features,
})
}
}

View file

@ -27,10 +27,10 @@ use core::{Output, TxKernel};
pub trait VerifierCache: Sync + Send {
/// Takes a vec of tx kernels and returns those kernels
/// that have not yet been verified.
fn filter_kernel_sig_unverified(&mut self, kernels: &Vec<TxKernel>) -> Vec<TxKernel>;
fn filter_kernel_sig_unverified(&mut self, kernels: &[TxKernel]) -> Vec<TxKernel>;
/// Takes a vec of tx outputs and returns those outputs
/// that have not yet had their rangeproofs verified.
fn filter_rangeproof_unverified(&mut self, outputs: &Vec<Output>) -> Vec<Output>;
fn filter_rangeproof_unverified(&mut self, outputs: &[Output]) -> Vec<Output>;
/// Adds a vec of tx kernels to the cache (used in conjunction with the the filter above).
fn add_kernel_sig_verified(&mut self, kernels: Vec<TxKernel>);
/// Adds a vec of outputs to the cache (used in conjunction with the the filter above).
@ -45,9 +45,6 @@ pub struct LruVerifierCache {
rangeproof_verification_cache: LruCache<Hash, bool>,
}
unsafe impl Sync for LruVerifierCache {}
unsafe impl Send for LruVerifierCache {}
impl LruVerifierCache {
/// TODO how big should these caches be?
/// They need to be *at least* large enough to cover a maxed out block.
@ -60,7 +57,7 @@ impl LruVerifierCache {
}
impl VerifierCache for LruVerifierCache {
fn filter_kernel_sig_unverified(&mut self, kernels: &Vec<TxKernel>) -> Vec<TxKernel> {
fn filter_kernel_sig_unverified(&mut self, kernels: &[TxKernel]) -> Vec<TxKernel> {
let res = kernels
.into_iter()
.filter(|x| {
@ -78,7 +75,7 @@ impl VerifierCache for LruVerifierCache {
res
}
fn filter_rangeproof_unverified(&mut self, outputs: &Vec<Output>) -> Vec<Output> {
fn filter_rangeproof_unverified(&mut self, outputs: &[Output]) -> Vec<Output> {
let res = outputs
.into_iter()
.filter(|x| {

View file

@ -303,8 +303,7 @@ where
if live_intervals[i - 1].timestamp > live_intervals[i].timestamp {
live_intervals[i].timestamp = 0;
} else {
live_intervals[i].timestamp =
live_intervals[i].timestamp - live_intervals[i - 1].timestamp;
live_intervals[i].timestamp -= live_intervals[i - 1].timestamp;
}
}
// Remove genesis "interval"
@ -321,7 +320,7 @@ where
for _ in 0..block_count_difference {
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp);
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff.clone()));
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff));
interval_index = match interval_index {
0 => live_intervals.len() - 1,
_ => interval_index - 1,

View file

@ -78,19 +78,19 @@ where
}
}
pub fn set_header_nonce(header: Vec<u8>, nonce: Option<u32>) -> Result<[u64; 4], Error> {
pub fn set_header_nonce(header: &[u8], nonce: Option<u32>) -> Result<[u64; 4], Error> {
if let Some(n) = nonce {
let len = header.len();
let mut header = header.clone();
let mut header = header.to_owned();
header.truncate(len - mem::size_of::<u32>());
header.write_u32::<LittleEndian>(n)?;
create_siphash_keys(header)
create_siphash_keys(&header)
} else {
create_siphash_keys(header)
create_siphash_keys(&header)
}
}
pub fn create_siphash_keys(header: Vec<u8>) -> Result<[u64; 4], Error> {
pub fn create_siphash_keys(header: &[u8]) -> Result<[u64; 4], Error> {
let h = blake2b(32, &[], &header);
let hb = h.as_bytes();
let mut rdr = Cursor::new(hb);
@ -163,7 +163,7 @@ where
/// Reset the main keys used for siphash from the header and nonce
pub fn reset_header_nonce(&mut self, header: Vec<u8>, nonce: Option<u32>) -> Result<(), Error> {
self.siphash_keys = set_header_nonce(header, nonce)?;
self.siphash_keys = set_header_nonce(&header, nonce)?;
Ok(())
}
@ -175,7 +175,7 @@ where
);
let mut masked = hash_u64 & self.edge_mask.to_u64().ok_or(ErrorKind::IntegerCast)?;
if shift {
masked = masked << 1;
masked <<= 1;
masked |= uorv;
}
Ok(T::from(masked).ok_or(ErrorKind::IntegerCast)?)

View file

@ -54,14 +54,14 @@ where
pub fn new(max_edges: T, max_sols: u32, proof_size: usize) -> Result<Graph<T>, Error> {
let max_nodes = 2 * to_u64!(max_edges);
Ok(Graph {
max_edges: max_edges,
max_nodes: max_nodes,
max_edges,
max_nodes,
max_sols,
proof_size,
links: vec![],
adj_list: vec![],
visited: Bitmap::create(),
max_sols: max_sols,
solutions: vec![],
proof_size: proof_size,
nil: T::max_value(),
})
}
@ -241,7 +241,7 @@ where
/// Simple implementation of algorithm
pub fn find_cycles_iter<'a, I>(&mut self, iter: I) -> Result<Vec<Proof>, Error>
pub fn find_cycles_iter<I>(&mut self, iter: I) -> Result<Vec<Proof>, Error>
where
I: Iterator<Item = u64>,
{
@ -260,7 +260,7 @@ where
for s in &self.graph.solutions {
self.verify_impl(&s)?;
}
if self.graph.solutions.len() == 0 {
if self.graph.solutions.is_empty() {
Err(ErrorKind::NoSolution)?
} else {
Ok(self.graph.solutions.clone())

View file

@ -77,7 +77,7 @@ where
let params = CuckooParams::new(edge_bits, proof_size)?;
let num_nodes = 2 * params.num_edges as usize;
Ok(CuckooContext {
params: params,
params,
graph: vec![T::zero(); num_nodes],
_max_sols: max_sols,
})
@ -190,7 +190,7 @@ where
cycle.insert(Edge { u: us[0], v: vs[0] });
while nu != 0 {
// u's in even position; v's in odd
nu = nu - 1;
nu -= 1;
cycle.insert(Edge {
u: us[((nu + 1) & !1) as usize],
v: us[(nu | 1) as usize],
@ -214,11 +214,11 @@ where
cycle.remove(&edge);
}
}
return if n == self.params.proof_size {
if n == self.params.proof_size {
Ok(sol)
} else {
Err(ErrorKind::NoCycle)?
};
}
}
/// Searches for a solution (simple implementation)

View file

@ -85,7 +85,7 @@ impl From<ErrorKind> for Error {
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner: inner }
Error { inner }
}
}

View file

@ -37,7 +37,7 @@ impl Lean {
// edge bitmap, before trimming all of them are on
let mut edges = Bitmap::create_with_capacity(params.num_edges as u32);
edges.flip_inplace(0..params.num_edges.into());
edges.flip_inplace(0..params.num_edges);
Lean { params, edges }
}

View file

@ -77,7 +77,7 @@ pub fn mine_genesis_block() -> Result<Block, Error> {
}
// total_difficulty on the genesis header *is* the difficulty of that block
let genesis_difficulty = gen.header.pow.total_difficulty.clone();
let genesis_difficulty = gen.header.pow.total_difficulty;
let sz = global::min_edge_bits();
let proof_size = global::proofsize();

View file

@ -62,7 +62,7 @@ pub fn siphash24(v: &[u64; 4], nonce: u64) -> u64 {
round!();
round!();
return v0 ^ v1 ^ v2 ^ v3;
v0 ^ v1 ^ v2 ^ v3
}
#[cfg(test)]

View file

@ -92,10 +92,7 @@ impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::IOErr(ref e, _) => e,
Error::UnexpectedData {
expected: _,
received: _,
} => "unexpected data",
Error::UnexpectedData { .. } => "unexpected data",
Error::CorruptedData => "corrupted data",
Error::TooLargeReadErr => "too large read",
Error::ConsensusError(_) => "consensus error (sort order)",
@ -231,13 +228,13 @@ where
/// Deserializes a Readeable from any std::io::Read implementation.
pub fn deserialize<T: Readable>(source: &mut Read) -> Result<T, Error> {
let mut reader = BinReader { source: source };
let mut reader = BinReader { source };
T::read(&mut reader)
}
/// Serializes a Writeable into any std::io::Write implementation.
pub fn serialize<W: Writeable>(sink: &mut Write, thing: &W) -> Result<(), Error> {
let mut writer = BinWriter { sink: sink };
let mut writer = BinWriter { sink };
thing.write(&mut writer)
}
@ -319,9 +316,7 @@ impl Readable for Commitment {
fn read(reader: &mut Reader) -> Result<Commitment, Error> {
let a = reader.read_fixed_bytes(PEDERSEN_COMMITMENT_SIZE)?;
let mut c = [0; PEDERSEN_COMMITMENT_SIZE];
for i in 0..PEDERSEN_COMMITMENT_SIZE {
c[i] = a[i];
}
c[..PEDERSEN_COMMITMENT_SIZE].clone_from_slice(&a[..PEDERSEN_COMMITMENT_SIZE]);
Ok(Commitment(c))
}
}
@ -368,9 +363,7 @@ impl Readable for RangeProof {
fn read(reader: &mut Reader) -> Result<RangeProof, Error> {
let p = reader.read_limited_vec(MAX_PROOF_SIZE)?;
let mut a = [0; MAX_PROOF_SIZE];
for i in 0..p.len() {
a[i] = p[i];
}
a[..p.len()].clone_from_slice(&p[..]);
Ok(RangeProof {
proof: a,
plen: p.len(),
@ -388,9 +381,7 @@ impl Readable for Signature {
fn read(reader: &mut Reader) -> Result<Signature, Error> {
let a = reader.read_fixed_bytes(AGG_SIGNATURE_SIZE)?;
let mut c = [0; AGG_SIGNATURE_SIZE];
for i in 0..AGG_SIGNATURE_SIZE {
c[i] = a[i];
}
c[..AGG_SIGNATURE_SIZE].clone_from_slice(&a[..AGG_SIGNATURE_SIZE]);
Ok(Signature::from_raw_data(&c).unwrap())
}
}
@ -577,81 +568,81 @@ pub trait AsFixedBytes: Sized + AsRef<[u8]> {
impl<'a> AsFixedBytes for &'a [u8] {
fn len(&self) -> usize {
return 1;
1
}
}
impl AsFixedBytes for Vec<u8> {
fn len(&self) -> usize {
return self.len();
self.len()
}
}
impl AsFixedBytes for [u8; 1] {
fn len(&self) -> usize {
return 1;
1
}
}
impl AsFixedBytes for [u8; 2] {
fn len(&self) -> usize {
return 2;
2
}
}
impl AsFixedBytes for [u8; 4] {
fn len(&self) -> usize {
return 4;
4
}
}
impl AsFixedBytes for [u8; 6] {
fn len(&self) -> usize {
return 6;
6
}
}
impl AsFixedBytes for [u8; 8] {
fn len(&self) -> usize {
return 8;
8
}
}
impl AsFixedBytes for [u8; 20] {
fn len(&self) -> usize {
return 20;
20
}
}
impl AsFixedBytes for [u8; 32] {
fn len(&self) -> usize {
return 32;
32
}
}
impl AsFixedBytes for String {
fn len(&self) -> usize {
return self.len();
self.len()
}
}
impl AsFixedBytes for ::core::hash::Hash {
fn len(&self) -> usize {
return 32;
32
}
}
impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
fn len(&self) -> usize {
return self.plen;
self.plen
}
}
impl AsFixedBytes for ::util::secp::Signature {
fn len(&self) -> usize {
return 64;
64
}
}
impl AsFixedBytes for ::util::secp::pedersen::Commitment {
fn len(&self) -> usize {
return PEDERSEN_COMMITMENT_SIZE;
PEDERSEN_COMMITMENT_SIZE
}
}
impl AsFixedBytes for BlindingFactor {
fn len(&self) -> usize {
return SECRET_KEY_SIZE;
SECRET_KEY_SIZE
}
}
impl AsFixedBytes for ::keychain::Identifier {
fn len(&self) -> usize {
return IDENTIFIER_SIZE;
IDENTIFIER_SIZE
}
}

View file

@ -496,25 +496,31 @@ fn secondary_pow_scale() {
// all primary, factor should increase so it becomes easier to find a high
// difficulty block
assert_eq!(
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()),
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()),
147
);
// all secondary on 90%, factor should go down a bit
hi.is_secondary = true;
assert_eq!(
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()),
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()),
94
);
// all secondary on 1%, factor should go down to bound (divide by 2)
assert_eq!(
secondary_pow_scaling(890_000, &(0..window).map(|_| hi.clone()).collect()),
secondary_pow_scaling(
890_000,
&(0..window).map(|_| hi.clone()).collect::<Vec<_>>()
),
49
);
// same as above, testing lowest bound
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3);
low_hi.is_secondary = true;
assert_eq!(
secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()),
secondary_pow_scaling(
890_000,
&(0..window).map(|_| low_hi.clone()).collect::<Vec<_>>()
),
1
);
// just about the right ratio, also no longer playing with median
@ -525,7 +531,7 @@ fn secondary_pow_scale() {
&(0..(window / 10))
.map(|_| primary_hi.clone())
.chain((0..(window * 9 / 10)).map(|_| hi.clone()))
.collect()
.collect::<Vec<_>>()
),
94
);
@ -536,7 +542,7 @@ fn secondary_pow_scale() {
&(0..(window / 20))
.map(|_| primary_hi.clone())
.chain((0..(window * 95 / 100)).map(|_| hi.clone()))
.collect()
.collect::<Vec<_>>()
),
94
);
@ -547,7 +553,7 @@ fn secondary_pow_scale() {
&(0..(window * 6 / 10))
.map(|_| primary_hi.clone())
.chain((0..(window * 4 / 10)).map(|_| hi.clone()))
.collect()
.collect::<Vec<_>>()
),
84
);