mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 11:31:08 +03:00
Small style changes in core crate (#1816)
This commit is contained in:
parent
a433725b5d
commit
3efa7bdac9
21 changed files with 107 additions and 123 deletions
|
@ -624,9 +624,7 @@ impl<'a> HeaderExtension<'a> {
|
||||||
/// This may be either the header MMR or the sync MMR depending on the
|
/// This may be either the header MMR or the sync MMR depending on the
|
||||||
/// extension.
|
/// extension.
|
||||||
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
|
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
|
||||||
self.pmmr
|
self.pmmr.push(&header).map_err(&ErrorKind::TxHashSetErr)?;
|
||||||
.push(header.clone())
|
|
||||||
.map_err(&ErrorKind::TxHashSetErr)?;
|
|
||||||
self.header = header.clone();
|
self.header = header.clone();
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -961,7 +959,7 @@ impl<'a> Extension<'a> {
|
||||||
|
|
||||||
fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
|
fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
|
||||||
self.header_pmmr
|
self.header_pmmr
|
||||||
.push(header.clone())
|
.push(&header)
|
||||||
.map_err(&ErrorKind::TxHashSetErr)?;
|
.map_err(&ErrorKind::TxHashSetErr)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,7 +296,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Factor by which the secondary proof of work difficulty will be adjusted
|
/// Factor by which the secondary proof of work difficulty will be adjusted
|
||||||
pub fn secondary_pow_scaling(height: u64, diff_data: &Vec<HeaderInfo>) -> u32 {
|
pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 {
|
||||||
// Get the secondary count across the window, in pct (100 * 60 * 2nd_pow_fraction)
|
// Get the secondary count across the window, in pct (100 * 60 * 2nd_pow_fraction)
|
||||||
let snd_count = 100 * diff_data.iter().filter(|n| n.is_secondary).count() as u64;
|
let snd_count = 100 * diff_data.iter().filter(|n| n.is_secondary).count() as u64;
|
||||||
|
|
||||||
|
|
|
@ -286,7 +286,7 @@ impl BlockHeader {
|
||||||
|
|
||||||
/// Total difficulty accumulated by the proof of work on this header
|
/// Total difficulty accumulated by the proof of work on this header
|
||||||
pub fn total_difficulty(&self) -> Difficulty {
|
pub fn total_difficulty(&self) -> Difficulty {
|
||||||
self.pow.total_difficulty.clone()
|
self.pow.total_difficulty
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The "overage" to use when verifying the kernel sums.
|
/// The "overage" to use when verifying the kernel sums.
|
||||||
|
@ -362,10 +362,7 @@ impl Readable for Block {
|
||||||
body.validate_read(true)
|
body.validate_read(true)
|
||||||
.map_err(|_| ser::Error::CorruptedData)?;
|
.map_err(|_| ser::Error::CorruptedData)?;
|
||||||
|
|
||||||
Ok(Block {
|
Ok(Block { header, body })
|
||||||
header: header,
|
|
||||||
body: body,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -465,7 +462,7 @@ impl Block {
|
||||||
/// Build a new empty block from a specified header
|
/// Build a new empty block from a specified header
|
||||||
pub fn with_header(header: BlockHeader) -> Block {
|
pub fn with_header(header: BlockHeader) -> Block {
|
||||||
Block {
|
Block {
|
||||||
header: header,
|
header,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -607,15 +604,14 @@ impl Block {
|
||||||
|
|
||||||
// take the kernel offset for this block (block offset minus previous) and
|
// take the kernel offset for this block (block offset minus previous) and
|
||||||
// verify.body.outputs and kernel sums
|
// verify.body.outputs and kernel sums
|
||||||
let block_kernel_offset = if self.header.total_kernel_offset() == prev_kernel_offset.clone()
|
let block_kernel_offset = if self.header.total_kernel_offset() == *prev_kernel_offset {
|
||||||
{
|
|
||||||
// special case when the sum hasn't changed (typically an empty block),
|
// special case when the sum hasn't changed (typically an empty block),
|
||||||
// zero isn't a valid private key but it's a valid blinding factor
|
// zero isn't a valid private key but it's a valid blinding factor
|
||||||
BlindingFactor::zero()
|
BlindingFactor::zero()
|
||||||
} else {
|
} else {
|
||||||
committed::sum_kernel_offsets(
|
committed::sum_kernel_offsets(
|
||||||
vec![self.header.total_kernel_offset()],
|
vec![self.header.total_kernel_offset()],
|
||||||
vec![prev_kernel_offset.clone()],
|
vec![*prev_kernel_offset],
|
||||||
)?
|
)?
|
||||||
};
|
};
|
||||||
let (_utxo_sum, kernel_sum) =
|
let (_utxo_sum, kernel_sum) =
|
||||||
|
|
|
@ -53,8 +53,8 @@ impl Default for BlockSums {
|
||||||
fn default() -> BlockSums {
|
fn default() -> BlockSums {
|
||||||
let zero_commit = secp_static::commit_to_zero_value();
|
let zero_commit = secp_static::commit_to_zero_value();
|
||||||
BlockSums {
|
BlockSums {
|
||||||
utxo_sum: zero_commit.clone(),
|
utxo_sum: zero_commit,
|
||||||
kernel_sum: zero_commit.clone(),
|
kernel_sum: zero_commit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ impl MerkleProof {
|
||||||
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
|
pub fn from_hex(hex: &str) -> Result<MerkleProof, String> {
|
||||||
let bytes = util::from_hex(hex.to_string()).unwrap();
|
let bytes = util::from_hex(hex.to_string()).unwrap();
|
||||||
let res = ser::deserialize(&mut &bytes[..])
|
let res = ser::deserialize(&mut &bytes[..])
|
||||||
.map_err(|_| format!("failed to deserialize a Merkle Proof"))?;
|
.map_err(|_| "failed to deserialize a Merkle Proof".to_string())?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ impl MerkleProof {
|
||||||
// calculate the peaks once as these are based on overall MMR size
|
// calculate the peaks once as these are based on overall MMR size
|
||||||
// (and will not change)
|
// (and will not change)
|
||||||
let peaks_pos = pmmr::peaks(self.mmr_size);
|
let peaks_pos = pmmr::peaks(self.mmr_size);
|
||||||
proof.verify_consume(root, element, node_pos, peaks_pos)
|
proof.verify_consume(root, element, node_pos, &peaks_pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the Merkle proof while verifying it.
|
/// Consumes the Merkle proof while verifying it.
|
||||||
|
@ -113,7 +113,7 @@ impl MerkleProof {
|
||||||
root: Hash,
|
root: Hash,
|
||||||
element: &PMMRIndexHashable,
|
element: &PMMRIndexHashable,
|
||||||
node_pos: u64,
|
node_pos: u64,
|
||||||
peaks_pos: Vec<u64>,
|
peaks_pos: &[u64],
|
||||||
) -> Result<(), MerkleProofError> {
|
) -> Result<(), MerkleProofError> {
|
||||||
let node_hash = if node_pos > self.mmr_size {
|
let node_hash = if node_pos > self.mmr_size {
|
||||||
element.hash_with_index(self.mmr_size)
|
element.hash_with_index(self.mmr_size)
|
||||||
|
@ -123,7 +123,7 @@ impl MerkleProof {
|
||||||
|
|
||||||
// handle special case of only a single entry in the MMR
|
// handle special case of only a single entry in the MMR
|
||||||
// (no siblings to hash together)
|
// (no siblings to hash together)
|
||||||
if self.path.len() == 0 {
|
if self.path.is_empty() {
|
||||||
if root == node_hash {
|
if root == node_hash {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -42,8 +42,8 @@ where
|
||||||
/// Build a new db backed MMR.
|
/// Build a new db backed MMR.
|
||||||
pub fn new(backend: &'a mut B) -> DBPMMR<T, B> {
|
pub fn new(backend: &'a mut B) -> DBPMMR<T, B> {
|
||||||
DBPMMR {
|
DBPMMR {
|
||||||
|
backend,
|
||||||
last_pos: 0,
|
last_pos: 0,
|
||||||
backend: backend,
|
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,8 +52,8 @@ where
|
||||||
/// last_pos with the provided db backend.
|
/// last_pos with the provided db backend.
|
||||||
pub fn at(backend: &'a mut B, last_pos: u64) -> DBPMMR<T, B> {
|
pub fn at(backend: &'a mut B, last_pos: u64) -> DBPMMR<T, B> {
|
||||||
DBPMMR {
|
DBPMMR {
|
||||||
last_pos: last_pos,
|
backend,
|
||||||
backend: backend,
|
last_pos,
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -98,7 +98,7 @@ where
|
||||||
|
|
||||||
/// Push a new element into the MMR. Computes new related peaks at
|
/// Push a new element into the MMR. Computes new related peaks at
|
||||||
/// the same time if applicable.
|
/// the same time if applicable.
|
||||||
pub fn push(&mut self, elmt: T) -> Result<u64, String> {
|
pub fn push(&mut self, elmt: &T) -> Result<u64, String> {
|
||||||
let elmt_pos = self.last_pos + 1;
|
let elmt_pos = self.last_pos + 1;
|
||||||
let mut current_hash = elmt.hash_with_index(elmt_pos - 1);
|
let mut current_hash = elmt.hash_with_index(elmt_pos - 1);
|
||||||
|
|
||||||
|
|
|
@ -53,8 +53,8 @@ where
|
||||||
/// Build a new prunable Merkle Mountain Range using the provided backend.
|
/// Build a new prunable Merkle Mountain Range using the provided backend.
|
||||||
pub fn new(backend: &'a mut B) -> PMMR<T, B> {
|
pub fn new(backend: &'a mut B) -> PMMR<T, B> {
|
||||||
PMMR {
|
PMMR {
|
||||||
|
backend,
|
||||||
last_pos: 0,
|
last_pos: 0,
|
||||||
backend: backend,
|
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,8 +63,8 @@ where
|
||||||
/// last_pos with the provided backend.
|
/// last_pos with the provided backend.
|
||||||
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<T, B> {
|
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<T, B> {
|
||||||
PMMR {
|
PMMR {
|
||||||
last_pos: last_pos,
|
backend,
|
||||||
backend: backend,
|
last_pos,
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,7 +90,7 @@ where
|
||||||
let rhs = self.bag_the_rhs(peak_pos);
|
let rhs = self.bag_the_rhs(peak_pos);
|
||||||
let mut res = peaks(self.last_pos)
|
let mut res = peaks(self.last_pos)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|x| x < &peak_pos)
|
.filter(|x| *x < peak_pos)
|
||||||
.filter_map(|x| self.backend.get_from_file(x))
|
.filter_map(|x| self.backend.get_from_file(x))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
res.reverse();
|
res.reverse();
|
||||||
|
@ -107,7 +107,7 @@ where
|
||||||
pub fn bag_the_rhs(&self, peak_pos: u64) -> Option<Hash> {
|
pub fn bag_the_rhs(&self, peak_pos: u64) -> Option<Hash> {
|
||||||
let rhs = peaks(self.last_pos)
|
let rhs = peaks(self.last_pos)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|x| x > &peak_pos)
|
.filter(|x| *x > peak_pos)
|
||||||
.filter_map(|x| self.backend.get_from_file(x))
|
.filter_map(|x| self.backend.get_from_file(x))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ where
|
||||||
|
|
||||||
// check we actually have a hash in the MMR at this pos
|
// check we actually have a hash in the MMR at this pos
|
||||||
self.get_hash(pos)
|
self.get_hash(pos)
|
||||||
.ok_or(format!("no element at pos {}", pos))?;
|
.ok_or_else(|| format!("no element at pos {}", pos))?;
|
||||||
|
|
||||||
let mmr_size = self.unpruned_size();
|
let mmr_size = self.unpruned_size();
|
||||||
|
|
||||||
|
@ -510,7 +510,7 @@ pub fn peak_map_height(mut pos: u64) -> (u64, u64) {
|
||||||
let mut peak_size = ALL_ONES >> pos.leading_zeros();
|
let mut peak_size = ALL_ONES >> pos.leading_zeros();
|
||||||
let mut bitmap = 0;
|
let mut bitmap = 0;
|
||||||
while peak_size != 0 {
|
while peak_size != 0 {
|
||||||
bitmap = bitmap << 1;
|
bitmap <<= 1;
|
||||||
if pos >= peak_size {
|
if pos >= peak_size {
|
||||||
pos -= peak_size;
|
pos -= peak_size;
|
||||||
bitmap |= 1;
|
bitmap |= 1;
|
||||||
|
|
|
@ -41,8 +41,8 @@ where
|
||||||
/// Build a new readonly PMMR.
|
/// Build a new readonly PMMR.
|
||||||
pub fn new(backend: &'a B) -> ReadonlyPMMR<T, B> {
|
pub fn new(backend: &'a B) -> ReadonlyPMMR<T, B> {
|
||||||
ReadonlyPMMR {
|
ReadonlyPMMR {
|
||||||
|
backend,
|
||||||
last_pos: 0,
|
last_pos: 0,
|
||||||
backend: backend,
|
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,8 +51,8 @@ where
|
||||||
/// last_pos with the provided backend.
|
/// last_pos with the provided backend.
|
||||||
pub fn at(backend: &'a B, last_pos: u64) -> ReadonlyPMMR<T, B> {
|
pub fn at(backend: &'a B, last_pos: u64) -> ReadonlyPMMR<T, B> {
|
||||||
ReadonlyPMMR {
|
ReadonlyPMMR {
|
||||||
last_pos: last_pos,
|
backend,
|
||||||
backend: backend,
|
last_pos,
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,8 +43,8 @@ where
|
||||||
/// Build a new readonly PMMR.
|
/// Build a new readonly PMMR.
|
||||||
pub fn new(backend: &'a B) -> RewindablePMMR<T, B> {
|
pub fn new(backend: &'a B) -> RewindablePMMR<T, B> {
|
||||||
RewindablePMMR {
|
RewindablePMMR {
|
||||||
|
backend,
|
||||||
last_pos: 0,
|
last_pos: 0,
|
||||||
backend: backend,
|
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,8 +53,8 @@ where
|
||||||
/// last_pos with the provided backend.
|
/// last_pos with the provided backend.
|
||||||
pub fn at(backend: &'a B, last_pos: u64) -> RewindablePMMR<T, B> {
|
pub fn at(backend: &'a B, last_pos: u64) -> RewindablePMMR<T, B> {
|
||||||
RewindablePMMR {
|
RewindablePMMR {
|
||||||
last_pos: last_pos,
|
backend,
|
||||||
backend: backend,
|
last_pos,
|
||||||
_marker: marker::PhantomData,
|
_marker: marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,7 +177,7 @@ impl Readable for TxKernel {
|
||||||
let features =
|
let features =
|
||||||
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
||||||
Ok(TxKernel {
|
Ok(TxKernel {
|
||||||
features: features,
|
features,
|
||||||
fee: reader.read_u64()?,
|
fee: reader.read_u64()?,
|
||||||
lock_height: reader.read_u64()?,
|
lock_height: reader.read_u64()?,
|
||||||
excess: Commitment::read(reader)?,
|
excess: Commitment::read(reader)?,
|
||||||
|
@ -230,13 +230,13 @@ impl TxKernel {
|
||||||
|
|
||||||
/// Builds a new tx kernel with the provided fee.
|
/// Builds a new tx kernel with the provided fee.
|
||||||
pub fn with_fee(self, fee: u64) -> TxKernel {
|
pub fn with_fee(self, fee: u64) -> TxKernel {
|
||||||
TxKernel { fee: fee, ..self }
|
TxKernel { fee, ..self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds a new tx kernel with the provided lock_height.
|
/// Builds a new tx kernel with the provided lock_height.
|
||||||
pub fn with_lock_height(self, lock_height: u64) -> TxKernel {
|
pub fn with_lock_height(self, lock_height: u64) -> TxKernel {
|
||||||
TxKernel {
|
TxKernel {
|
||||||
lock_height: lock_height,
|
lock_height,
|
||||||
..self
|
..self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -356,9 +356,9 @@ impl TransactionBody {
|
||||||
verify_sorted: bool,
|
verify_sorted: bool,
|
||||||
) -> Result<TransactionBody, Error> {
|
) -> Result<TransactionBody, Error> {
|
||||||
let body = TransactionBody {
|
let body = TransactionBody {
|
||||||
inputs: inputs,
|
inputs,
|
||||||
outputs: outputs,
|
outputs,
|
||||||
kernels: kernels,
|
kernels,
|
||||||
};
|
};
|
||||||
|
|
||||||
if verify_sorted {
|
if verify_sorted {
|
||||||
|
@ -436,7 +436,7 @@ impl TransactionBody {
|
||||||
|
|
||||||
/// Calculate transaction weight from transaction details
|
/// Calculate transaction weight from transaction details
|
||||||
pub fn weight(input_len: usize, output_len: usize, kernel_len: usize) -> u32 {
|
pub fn weight(input_len: usize, output_len: usize, kernel_len: usize) -> u32 {
|
||||||
let mut body_weight = -1 * (input_len as i32) + (4 * output_len as i32) + kernel_len as i32;
|
let mut body_weight = -(input_len as i32) + (4 * output_len as i32) + kernel_len as i32;
|
||||||
if body_weight < 1 {
|
if body_weight < 1 {
|
||||||
body_weight = 1;
|
body_weight = 1;
|
||||||
}
|
}
|
||||||
|
@ -559,7 +559,7 @@ impl TransactionBody {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Now batch verify all those unverified rangeproofs
|
// Now batch verify all those unverified rangeproofs
|
||||||
if outputs.len() > 0 {
|
if !outputs.is_empty() {
|
||||||
let mut commits = vec![];
|
let mut commits = vec![];
|
||||||
let mut proofs = vec![];
|
let mut proofs = vec![];
|
||||||
for x in &outputs {
|
for x in &outputs {
|
||||||
|
@ -687,10 +687,7 @@ impl Transaction {
|
||||||
/// Creates a new transaction using this transaction as a template
|
/// Creates a new transaction using this transaction as a template
|
||||||
/// and with the specified offset.
|
/// and with the specified offset.
|
||||||
pub fn with_offset(self, offset: BlindingFactor) -> Transaction {
|
pub fn with_offset(self, offset: BlindingFactor) -> Transaction {
|
||||||
Transaction {
|
Transaction { offset, ..self }
|
||||||
offset: offset,
|
|
||||||
..self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds a new transaction with the provided inputs added. Existing
|
/// Builds a new transaction with the provided inputs added. Existing
|
||||||
|
@ -1072,7 +1069,7 @@ impl Readable for Output {
|
||||||
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
||||||
|
|
||||||
Ok(Output {
|
Ok(Output {
|
||||||
features: features,
|
features,
|
||||||
commit: Commitment::read(reader)?,
|
commit: Commitment::read(reader)?,
|
||||||
proof: RangeProof::read(reader)?,
|
proof: RangeProof::read(reader)?,
|
||||||
})
|
})
|
||||||
|
@ -1131,8 +1128,8 @@ impl OutputIdentifier {
|
||||||
/// Build a new output_identifier.
|
/// Build a new output_identifier.
|
||||||
pub fn new(features: OutputFeatures, commit: &Commitment) -> OutputIdentifier {
|
pub fn new(features: OutputFeatures, commit: &Commitment) -> OutputIdentifier {
|
||||||
OutputIdentifier {
|
OutputIdentifier {
|
||||||
features: features,
|
features,
|
||||||
commit: commit.clone(),
|
commit: *commit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1152,9 +1149,9 @@ impl OutputIdentifier {
|
||||||
/// Converts this identifier to a full output, provided a RangeProof
|
/// Converts this identifier to a full output, provided a RangeProof
|
||||||
pub fn into_output(self, proof: RangeProof) -> Output {
|
pub fn into_output(self, proof: RangeProof) -> Output {
|
||||||
Output {
|
Output {
|
||||||
|
proof,
|
||||||
features: self.features,
|
features: self.features,
|
||||||
commit: self.commit,
|
commit: self.commit,
|
||||||
proof: proof,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1196,8 +1193,8 @@ impl Readable for OutputIdentifier {
|
||||||
let features =
|
let features =
|
||||||
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
|
||||||
Ok(OutputIdentifier {
|
Ok(OutputIdentifier {
|
||||||
|
features,
|
||||||
commit: Commitment::read(reader)?,
|
commit: Commitment::read(reader)?,
|
||||||
features: features,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,10 +27,10 @@ use core::{Output, TxKernel};
|
||||||
pub trait VerifierCache: Sync + Send {
|
pub trait VerifierCache: Sync + Send {
|
||||||
/// Takes a vec of tx kernels and returns those kernels
|
/// Takes a vec of tx kernels and returns those kernels
|
||||||
/// that have not yet been verified.
|
/// that have not yet been verified.
|
||||||
fn filter_kernel_sig_unverified(&mut self, kernels: &Vec<TxKernel>) -> Vec<TxKernel>;
|
fn filter_kernel_sig_unverified(&mut self, kernels: &[TxKernel]) -> Vec<TxKernel>;
|
||||||
/// Takes a vec of tx outputs and returns those outputs
|
/// Takes a vec of tx outputs and returns those outputs
|
||||||
/// that have not yet had their rangeproofs verified.
|
/// that have not yet had their rangeproofs verified.
|
||||||
fn filter_rangeproof_unverified(&mut self, outputs: &Vec<Output>) -> Vec<Output>;
|
fn filter_rangeproof_unverified(&mut self, outputs: &[Output]) -> Vec<Output>;
|
||||||
/// Adds a vec of tx kernels to the cache (used in conjunction with the the filter above).
|
/// Adds a vec of tx kernels to the cache (used in conjunction with the the filter above).
|
||||||
fn add_kernel_sig_verified(&mut self, kernels: Vec<TxKernel>);
|
fn add_kernel_sig_verified(&mut self, kernels: Vec<TxKernel>);
|
||||||
/// Adds a vec of outputs to the cache (used in conjunction with the the filter above).
|
/// Adds a vec of outputs to the cache (used in conjunction with the the filter above).
|
||||||
|
@ -45,9 +45,6 @@ pub struct LruVerifierCache {
|
||||||
rangeproof_verification_cache: LruCache<Hash, bool>,
|
rangeproof_verification_cache: LruCache<Hash, bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Sync for LruVerifierCache {}
|
|
||||||
unsafe impl Send for LruVerifierCache {}
|
|
||||||
|
|
||||||
impl LruVerifierCache {
|
impl LruVerifierCache {
|
||||||
/// TODO how big should these caches be?
|
/// TODO how big should these caches be?
|
||||||
/// They need to be *at least* large enough to cover a maxed out block.
|
/// They need to be *at least* large enough to cover a maxed out block.
|
||||||
|
@ -60,7 +57,7 @@ impl LruVerifierCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VerifierCache for LruVerifierCache {
|
impl VerifierCache for LruVerifierCache {
|
||||||
fn filter_kernel_sig_unverified(&mut self, kernels: &Vec<TxKernel>) -> Vec<TxKernel> {
|
fn filter_kernel_sig_unverified(&mut self, kernels: &[TxKernel]) -> Vec<TxKernel> {
|
||||||
let res = kernels
|
let res = kernels
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|x| {
|
.filter(|x| {
|
||||||
|
@ -78,7 +75,7 @@ impl VerifierCache for LruVerifierCache {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filter_rangeproof_unverified(&mut self, outputs: &Vec<Output>) -> Vec<Output> {
|
fn filter_rangeproof_unverified(&mut self, outputs: &[Output]) -> Vec<Output> {
|
||||||
let res = outputs
|
let res = outputs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|x| {
|
.filter(|x| {
|
||||||
|
|
|
@ -303,8 +303,7 @@ where
|
||||||
if live_intervals[i - 1].timestamp > live_intervals[i].timestamp {
|
if live_intervals[i - 1].timestamp > live_intervals[i].timestamp {
|
||||||
live_intervals[i].timestamp = 0;
|
live_intervals[i].timestamp = 0;
|
||||||
} else {
|
} else {
|
||||||
live_intervals[i].timestamp =
|
live_intervals[i].timestamp -= live_intervals[i - 1].timestamp;
|
||||||
live_intervals[i].timestamp - live_intervals[i - 1].timestamp;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Remove genesis "interval"
|
// Remove genesis "interval"
|
||||||
|
@ -321,7 +320,7 @@ where
|
||||||
|
|
||||||
for _ in 0..block_count_difference {
|
for _ in 0..block_count_difference {
|
||||||
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp);
|
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp);
|
||||||
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff.clone()));
|
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff));
|
||||||
interval_index = match interval_index {
|
interval_index = match interval_index {
|
||||||
0 => live_intervals.len() - 1,
|
0 => live_intervals.len() - 1,
|
||||||
_ => interval_index - 1,
|
_ => interval_index - 1,
|
||||||
|
|
|
@ -78,19 +78,19 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_header_nonce(header: Vec<u8>, nonce: Option<u32>) -> Result<[u64; 4], Error> {
|
pub fn set_header_nonce(header: &[u8], nonce: Option<u32>) -> Result<[u64; 4], Error> {
|
||||||
if let Some(n) = nonce {
|
if let Some(n) = nonce {
|
||||||
let len = header.len();
|
let len = header.len();
|
||||||
let mut header = header.clone();
|
let mut header = header.to_owned();
|
||||||
header.truncate(len - mem::size_of::<u32>());
|
header.truncate(len - mem::size_of::<u32>());
|
||||||
header.write_u32::<LittleEndian>(n)?;
|
header.write_u32::<LittleEndian>(n)?;
|
||||||
create_siphash_keys(header)
|
create_siphash_keys(&header)
|
||||||
} else {
|
} else {
|
||||||
create_siphash_keys(header)
|
create_siphash_keys(&header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_siphash_keys(header: Vec<u8>) -> Result<[u64; 4], Error> {
|
pub fn create_siphash_keys(header: &[u8]) -> Result<[u64; 4], Error> {
|
||||||
let h = blake2b(32, &[], &header);
|
let h = blake2b(32, &[], &header);
|
||||||
let hb = h.as_bytes();
|
let hb = h.as_bytes();
|
||||||
let mut rdr = Cursor::new(hb);
|
let mut rdr = Cursor::new(hb);
|
||||||
|
@ -163,7 +163,7 @@ where
|
||||||
|
|
||||||
/// Reset the main keys used for siphash from the header and nonce
|
/// Reset the main keys used for siphash from the header and nonce
|
||||||
pub fn reset_header_nonce(&mut self, header: Vec<u8>, nonce: Option<u32>) -> Result<(), Error> {
|
pub fn reset_header_nonce(&mut self, header: Vec<u8>, nonce: Option<u32>) -> Result<(), Error> {
|
||||||
self.siphash_keys = set_header_nonce(header, nonce)?;
|
self.siphash_keys = set_header_nonce(&header, nonce)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ where
|
||||||
);
|
);
|
||||||
let mut masked = hash_u64 & self.edge_mask.to_u64().ok_or(ErrorKind::IntegerCast)?;
|
let mut masked = hash_u64 & self.edge_mask.to_u64().ok_or(ErrorKind::IntegerCast)?;
|
||||||
if shift {
|
if shift {
|
||||||
masked = masked << 1;
|
masked <<= 1;
|
||||||
masked |= uorv;
|
masked |= uorv;
|
||||||
}
|
}
|
||||||
Ok(T::from(masked).ok_or(ErrorKind::IntegerCast)?)
|
Ok(T::from(masked).ok_or(ErrorKind::IntegerCast)?)
|
||||||
|
|
|
@ -54,14 +54,14 @@ where
|
||||||
pub fn new(max_edges: T, max_sols: u32, proof_size: usize) -> Result<Graph<T>, Error> {
|
pub fn new(max_edges: T, max_sols: u32, proof_size: usize) -> Result<Graph<T>, Error> {
|
||||||
let max_nodes = 2 * to_u64!(max_edges);
|
let max_nodes = 2 * to_u64!(max_edges);
|
||||||
Ok(Graph {
|
Ok(Graph {
|
||||||
max_edges: max_edges,
|
max_edges,
|
||||||
max_nodes: max_nodes,
|
max_nodes,
|
||||||
|
max_sols,
|
||||||
|
proof_size,
|
||||||
links: vec![],
|
links: vec![],
|
||||||
adj_list: vec![],
|
adj_list: vec![],
|
||||||
visited: Bitmap::create(),
|
visited: Bitmap::create(),
|
||||||
max_sols: max_sols,
|
|
||||||
solutions: vec![],
|
solutions: vec![],
|
||||||
proof_size: proof_size,
|
|
||||||
nil: T::max_value(),
|
nil: T::max_value(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -241,7 +241,7 @@ where
|
||||||
|
|
||||||
/// Simple implementation of algorithm
|
/// Simple implementation of algorithm
|
||||||
|
|
||||||
pub fn find_cycles_iter<'a, I>(&mut self, iter: I) -> Result<Vec<Proof>, Error>
|
pub fn find_cycles_iter<I>(&mut self, iter: I) -> Result<Vec<Proof>, Error>
|
||||||
where
|
where
|
||||||
I: Iterator<Item = u64>,
|
I: Iterator<Item = u64>,
|
||||||
{
|
{
|
||||||
|
@ -260,7 +260,7 @@ where
|
||||||
for s in &self.graph.solutions {
|
for s in &self.graph.solutions {
|
||||||
self.verify_impl(&s)?;
|
self.verify_impl(&s)?;
|
||||||
}
|
}
|
||||||
if self.graph.solutions.len() == 0 {
|
if self.graph.solutions.is_empty() {
|
||||||
Err(ErrorKind::NoSolution)?
|
Err(ErrorKind::NoSolution)?
|
||||||
} else {
|
} else {
|
||||||
Ok(self.graph.solutions.clone())
|
Ok(self.graph.solutions.clone())
|
||||||
|
|
|
@ -77,7 +77,7 @@ where
|
||||||
let params = CuckooParams::new(edge_bits, proof_size)?;
|
let params = CuckooParams::new(edge_bits, proof_size)?;
|
||||||
let num_nodes = 2 * params.num_edges as usize;
|
let num_nodes = 2 * params.num_edges as usize;
|
||||||
Ok(CuckooContext {
|
Ok(CuckooContext {
|
||||||
params: params,
|
params,
|
||||||
graph: vec![T::zero(); num_nodes],
|
graph: vec![T::zero(); num_nodes],
|
||||||
_max_sols: max_sols,
|
_max_sols: max_sols,
|
||||||
})
|
})
|
||||||
|
@ -190,7 +190,7 @@ where
|
||||||
cycle.insert(Edge { u: us[0], v: vs[0] });
|
cycle.insert(Edge { u: us[0], v: vs[0] });
|
||||||
while nu != 0 {
|
while nu != 0 {
|
||||||
// u's in even position; v's in odd
|
// u's in even position; v's in odd
|
||||||
nu = nu - 1;
|
nu -= 1;
|
||||||
cycle.insert(Edge {
|
cycle.insert(Edge {
|
||||||
u: us[((nu + 1) & !1) as usize],
|
u: us[((nu + 1) & !1) as usize],
|
||||||
v: us[(nu | 1) as usize],
|
v: us[(nu | 1) as usize],
|
||||||
|
@ -214,11 +214,11 @@ where
|
||||||
cycle.remove(&edge);
|
cycle.remove(&edge);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return if n == self.params.proof_size {
|
if n == self.params.proof_size {
|
||||||
Ok(sol)
|
Ok(sol)
|
||||||
} else {
|
} else {
|
||||||
Err(ErrorKind::NoCycle)?
|
Err(ErrorKind::NoCycle)?
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Searches for a solution (simple implementation)
|
/// Searches for a solution (simple implementation)
|
||||||
|
|
|
@ -85,7 +85,7 @@ impl From<ErrorKind> for Error {
|
||||||
|
|
||||||
impl From<Context<ErrorKind>> for Error {
|
impl From<Context<ErrorKind>> for Error {
|
||||||
fn from(inner: Context<ErrorKind>) -> Error {
|
fn from(inner: Context<ErrorKind>) -> Error {
|
||||||
Error { inner: inner }
|
Error { inner }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl Lean {
|
||||||
|
|
||||||
// edge bitmap, before trimming all of them are on
|
// edge bitmap, before trimming all of them are on
|
||||||
let mut edges = Bitmap::create_with_capacity(params.num_edges as u32);
|
let mut edges = Bitmap::create_with_capacity(params.num_edges as u32);
|
||||||
edges.flip_inplace(0..params.num_edges.into());
|
edges.flip_inplace(0..params.num_edges);
|
||||||
|
|
||||||
Lean { params, edges }
|
Lean { params, edges }
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ pub fn mine_genesis_block() -> Result<Block, Error> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// total_difficulty on the genesis header *is* the difficulty of that block
|
// total_difficulty on the genesis header *is* the difficulty of that block
|
||||||
let genesis_difficulty = gen.header.pow.total_difficulty.clone();
|
let genesis_difficulty = gen.header.pow.total_difficulty;
|
||||||
|
|
||||||
let sz = global::min_edge_bits();
|
let sz = global::min_edge_bits();
|
||||||
let proof_size = global::proofsize();
|
let proof_size = global::proofsize();
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub fn siphash24(v: &[u64; 4], nonce: u64) -> u64 {
|
||||||
round!();
|
round!();
|
||||||
round!();
|
round!();
|
||||||
|
|
||||||
return v0 ^ v1 ^ v2 ^ v3;
|
v0 ^ v1 ^ v2 ^ v3
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -92,10 +92,7 @@ impl error::Error for Error {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
match *self {
|
match *self {
|
||||||
Error::IOErr(ref e, _) => e,
|
Error::IOErr(ref e, _) => e,
|
||||||
Error::UnexpectedData {
|
Error::UnexpectedData { .. } => "unexpected data",
|
||||||
expected: _,
|
|
||||||
received: _,
|
|
||||||
} => "unexpected data",
|
|
||||||
Error::CorruptedData => "corrupted data",
|
Error::CorruptedData => "corrupted data",
|
||||||
Error::TooLargeReadErr => "too large read",
|
Error::TooLargeReadErr => "too large read",
|
||||||
Error::ConsensusError(_) => "consensus error (sort order)",
|
Error::ConsensusError(_) => "consensus error (sort order)",
|
||||||
|
@ -231,13 +228,13 @@ where
|
||||||
|
|
||||||
/// Deserializes a Readeable from any std::io::Read implementation.
|
/// Deserializes a Readeable from any std::io::Read implementation.
|
||||||
pub fn deserialize<T: Readable>(source: &mut Read) -> Result<T, Error> {
|
pub fn deserialize<T: Readable>(source: &mut Read) -> Result<T, Error> {
|
||||||
let mut reader = BinReader { source: source };
|
let mut reader = BinReader { source };
|
||||||
T::read(&mut reader)
|
T::read(&mut reader)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serializes a Writeable into any std::io::Write implementation.
|
/// Serializes a Writeable into any std::io::Write implementation.
|
||||||
pub fn serialize<W: Writeable>(sink: &mut Write, thing: &W) -> Result<(), Error> {
|
pub fn serialize<W: Writeable>(sink: &mut Write, thing: &W) -> Result<(), Error> {
|
||||||
let mut writer = BinWriter { sink: sink };
|
let mut writer = BinWriter { sink };
|
||||||
thing.write(&mut writer)
|
thing.write(&mut writer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -319,9 +316,7 @@ impl Readable for Commitment {
|
||||||
fn read(reader: &mut Reader) -> Result<Commitment, Error> {
|
fn read(reader: &mut Reader) -> Result<Commitment, Error> {
|
||||||
let a = reader.read_fixed_bytes(PEDERSEN_COMMITMENT_SIZE)?;
|
let a = reader.read_fixed_bytes(PEDERSEN_COMMITMENT_SIZE)?;
|
||||||
let mut c = [0; PEDERSEN_COMMITMENT_SIZE];
|
let mut c = [0; PEDERSEN_COMMITMENT_SIZE];
|
||||||
for i in 0..PEDERSEN_COMMITMENT_SIZE {
|
c[..PEDERSEN_COMMITMENT_SIZE].clone_from_slice(&a[..PEDERSEN_COMMITMENT_SIZE]);
|
||||||
c[i] = a[i];
|
|
||||||
}
|
|
||||||
Ok(Commitment(c))
|
Ok(Commitment(c))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,9 +363,7 @@ impl Readable for RangeProof {
|
||||||
fn read(reader: &mut Reader) -> Result<RangeProof, Error> {
|
fn read(reader: &mut Reader) -> Result<RangeProof, Error> {
|
||||||
let p = reader.read_limited_vec(MAX_PROOF_SIZE)?;
|
let p = reader.read_limited_vec(MAX_PROOF_SIZE)?;
|
||||||
let mut a = [0; MAX_PROOF_SIZE];
|
let mut a = [0; MAX_PROOF_SIZE];
|
||||||
for i in 0..p.len() {
|
a[..p.len()].clone_from_slice(&p[..]);
|
||||||
a[i] = p[i];
|
|
||||||
}
|
|
||||||
Ok(RangeProof {
|
Ok(RangeProof {
|
||||||
proof: a,
|
proof: a,
|
||||||
plen: p.len(),
|
plen: p.len(),
|
||||||
|
@ -388,9 +381,7 @@ impl Readable for Signature {
|
||||||
fn read(reader: &mut Reader) -> Result<Signature, Error> {
|
fn read(reader: &mut Reader) -> Result<Signature, Error> {
|
||||||
let a = reader.read_fixed_bytes(AGG_SIGNATURE_SIZE)?;
|
let a = reader.read_fixed_bytes(AGG_SIGNATURE_SIZE)?;
|
||||||
let mut c = [0; AGG_SIGNATURE_SIZE];
|
let mut c = [0; AGG_SIGNATURE_SIZE];
|
||||||
for i in 0..AGG_SIGNATURE_SIZE {
|
c[..AGG_SIGNATURE_SIZE].clone_from_slice(&a[..AGG_SIGNATURE_SIZE]);
|
||||||
c[i] = a[i];
|
|
||||||
}
|
|
||||||
Ok(Signature::from_raw_data(&c).unwrap())
|
Ok(Signature::from_raw_data(&c).unwrap())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -577,81 +568,81 @@ pub trait AsFixedBytes: Sized + AsRef<[u8]> {
|
||||||
|
|
||||||
impl<'a> AsFixedBytes for &'a [u8] {
|
impl<'a> AsFixedBytes for &'a [u8] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 1;
|
1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for Vec<u8> {
|
impl AsFixedBytes for Vec<u8> {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return self.len();
|
self.len()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 1] {
|
impl AsFixedBytes for [u8; 1] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 1;
|
1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 2] {
|
impl AsFixedBytes for [u8; 2] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 2;
|
2
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 4] {
|
impl AsFixedBytes for [u8; 4] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 4;
|
4
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 6] {
|
impl AsFixedBytes for [u8; 6] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 6;
|
6
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 8] {
|
impl AsFixedBytes for [u8; 8] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 8;
|
8
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 20] {
|
impl AsFixedBytes for [u8; 20] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 20;
|
20
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for [u8; 32] {
|
impl AsFixedBytes for [u8; 32] {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 32;
|
32
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for String {
|
impl AsFixedBytes for String {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return self.len();
|
self.len()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::core::hash::Hash {
|
impl AsFixedBytes for ::core::hash::Hash {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 32;
|
32
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
|
impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return self.plen;
|
self.plen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::util::secp::Signature {
|
impl AsFixedBytes for ::util::secp::Signature {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 64;
|
64
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::util::secp::pedersen::Commitment {
|
impl AsFixedBytes for ::util::secp::pedersen::Commitment {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return PEDERSEN_COMMITMENT_SIZE;
|
PEDERSEN_COMMITMENT_SIZE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for BlindingFactor {
|
impl AsFixedBytes for BlindingFactor {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return SECRET_KEY_SIZE;
|
SECRET_KEY_SIZE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::keychain::Identifier {
|
impl AsFixedBytes for ::keychain::Identifier {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return IDENTIFIER_SIZE;
|
IDENTIFIER_SIZE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -496,25 +496,31 @@ fn secondary_pow_scale() {
|
||||||
// all primary, factor should increase so it becomes easier to find a high
|
// all primary, factor should increase so it becomes easier to find a high
|
||||||
// difficulty block
|
// difficulty block
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()),
|
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()),
|
||||||
147
|
147
|
||||||
);
|
);
|
||||||
// all secondary on 90%, factor should go down a bit
|
// all secondary on 90%, factor should go down a bit
|
||||||
hi.is_secondary = true;
|
hi.is_secondary = true;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()),
|
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()),
|
||||||
94
|
94
|
||||||
);
|
);
|
||||||
// all secondary on 1%, factor should go down to bound (divide by 2)
|
// all secondary on 1%, factor should go down to bound (divide by 2)
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
secondary_pow_scaling(890_000, &(0..window).map(|_| hi.clone()).collect()),
|
secondary_pow_scaling(
|
||||||
|
890_000,
|
||||||
|
&(0..window).map(|_| hi.clone()).collect::<Vec<_>>()
|
||||||
|
),
|
||||||
49
|
49
|
||||||
);
|
);
|
||||||
// same as above, testing lowest bound
|
// same as above, testing lowest bound
|
||||||
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3);
|
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3);
|
||||||
low_hi.is_secondary = true;
|
low_hi.is_secondary = true;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()),
|
secondary_pow_scaling(
|
||||||
|
890_000,
|
||||||
|
&(0..window).map(|_| low_hi.clone()).collect::<Vec<_>>()
|
||||||
|
),
|
||||||
1
|
1
|
||||||
);
|
);
|
||||||
// just about the right ratio, also no longer playing with median
|
// just about the right ratio, also no longer playing with median
|
||||||
|
@ -525,7 +531,7 @@ fn secondary_pow_scale() {
|
||||||
&(0..(window / 10))
|
&(0..(window / 10))
|
||||||
.map(|_| primary_hi.clone())
|
.map(|_| primary_hi.clone())
|
||||||
.chain((0..(window * 9 / 10)).map(|_| hi.clone()))
|
.chain((0..(window * 9 / 10)).map(|_| hi.clone()))
|
||||||
.collect()
|
.collect::<Vec<_>>()
|
||||||
),
|
),
|
||||||
94
|
94
|
||||||
);
|
);
|
||||||
|
@ -536,7 +542,7 @@ fn secondary_pow_scale() {
|
||||||
&(0..(window / 20))
|
&(0..(window / 20))
|
||||||
.map(|_| primary_hi.clone())
|
.map(|_| primary_hi.clone())
|
||||||
.chain((0..(window * 95 / 100)).map(|_| hi.clone()))
|
.chain((0..(window * 95 / 100)).map(|_| hi.clone()))
|
||||||
.collect()
|
.collect::<Vec<_>>()
|
||||||
),
|
),
|
||||||
94
|
94
|
||||||
);
|
);
|
||||||
|
@ -547,7 +553,7 @@ fn secondary_pow_scale() {
|
||||||
&(0..(window * 6 / 10))
|
&(0..(window * 6 / 10))
|
||||||
.map(|_| primary_hi.clone())
|
.map(|_| primary_hi.clone())
|
||||||
.chain((0..(window * 4 / 10)).map(|_| hi.clone()))
|
.chain((0..(window * 4 / 10)).map(|_| hi.clone()))
|
||||||
.collect()
|
.collect::<Vec<_>>()
|
||||||
),
|
),
|
||||||
84
|
84
|
||||||
);
|
);
|
||||||
|
|
Loading…
Reference in a new issue