mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 17:01:09 +03:00
[T4] Secondary proof of work difficulty adjustments (#1709)
* First pass at secondary proof of work difficulty adjustments * Core and chain test fixes * Next difficulty calc now needs a height. Scaling calculation fixes. Setting scaling on mined block. * Change factor to u32 instead of u64. * Cleanup structs used by next_difficulty * Fix header size calc with u32 scaling
This commit is contained in:
parent
e9f62b74d5
commit
43f4f92730
21 changed files with 376 additions and 301 deletions
|
@ -45,9 +45,9 @@ pub enum ErrorKind {
|
||||||
/// Addition of difficulties on all previous block is wrong
|
/// Addition of difficulties on all previous block is wrong
|
||||||
#[fail(display = "Addition of difficulties on all previous blocks is wrong")]
|
#[fail(display = "Addition of difficulties on all previous blocks is wrong")]
|
||||||
WrongTotalDifficulty,
|
WrongTotalDifficulty,
|
||||||
/// Block header sizeshift is lower than our min
|
/// Block header sizeshift is incorrect
|
||||||
#[fail(display = "Cuckoo Size too Low")]
|
#[fail(display = "Cuckoo size shift is invalid")]
|
||||||
LowSizeshift,
|
InvalidSizeshift,
|
||||||
/// Scaling factor between primary and secondary PoW is invalid
|
/// Scaling factor between primary and secondary PoW is invalid
|
||||||
#[fail(display = "Wrong scaling factor")]
|
#[fail(display = "Wrong scaling factor")]
|
||||||
InvalidScaling,
|
InvalidScaling,
|
||||||
|
|
|
@ -36,8 +36,6 @@ use txhashset;
|
||||||
use types::{Options, Tip};
|
use types::{Options, Tip};
|
||||||
use util::LOGGER;
|
use util::LOGGER;
|
||||||
|
|
||||||
use failure::ResultExt;
|
|
||||||
|
|
||||||
/// Contextual information required to process a new block and either reject or
|
/// Contextual information required to process a new block and either reject or
|
||||||
/// accept it.
|
/// accept it.
|
||||||
pub struct BlockContext<'a> {
|
pub struct BlockContext<'a> {
|
||||||
|
@ -364,16 +362,10 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.opts.contains(Options::SKIP_POW) {
|
if !ctx.opts.contains(Options::SKIP_POW) {
|
||||||
|
if !header.pow.is_primary() && !header.pow.is_secondary() {
|
||||||
|
return Err(ErrorKind::InvalidSizeshift.into());
|
||||||
|
}
|
||||||
let shift = header.pow.cuckoo_sizeshift();
|
let shift = header.pow.cuckoo_sizeshift();
|
||||||
// size shift can either be larger than the minimum on the primary PoW
|
|
||||||
// or equal to the seconday PoW size shift
|
|
||||||
if shift != consensus::SECOND_POW_SIZESHIFT && global::min_sizeshift() > shift {
|
|
||||||
return Err(ErrorKind::LowSizeshift.into());
|
|
||||||
}
|
|
||||||
// primary PoW must have a scaling factor of 1
|
|
||||||
if shift != consensus::SECOND_POW_SIZESHIFT && header.pow.scaling_difficulty != 1 {
|
|
||||||
return Err(ErrorKind::InvalidScaling.into());
|
|
||||||
}
|
|
||||||
if !(ctx.pow_verifier)(header, shift).is_ok() {
|
if !(ctx.pow_verifier)(header, shift).is_ok() {
|
||||||
error!(
|
error!(
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
@ -435,17 +427,20 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
// (during testnet1 we use _block_ difficulty here)
|
// (during testnet1 we use _block_ difficulty here)
|
||||||
let child_batch = ctx.batch.child()?;
|
let child_batch = ctx.batch.child()?;
|
||||||
let diff_iter = store::DifficultyIter::from_batch(header.previous, child_batch);
|
let diff_iter = store::DifficultyIter::from_batch(header.previous, child_batch);
|
||||||
let network_difficulty = consensus::next_difficulty(diff_iter)
|
let next_header_info = consensus::next_difficulty(header.height, diff_iter);
|
||||||
.context(ErrorKind::Other("network difficulty".to_owned()))?;
|
if target_difficulty != next_header_info.difficulty {
|
||||||
if target_difficulty != network_difficulty.clone() {
|
info!(
|
||||||
error!(
|
|
||||||
LOGGER,
|
LOGGER,
|
||||||
"validate_header: header target difficulty {} != {}",
|
"validate_header: header target difficulty {} != {}",
|
||||||
target_difficulty.to_num(),
|
target_difficulty.to_num(),
|
||||||
network_difficulty.to_num()
|
next_header_info.difficulty.to_num()
|
||||||
);
|
);
|
||||||
return Err(ErrorKind::WrongTotalDifficulty.into());
|
return Err(ErrorKind::WrongTotalDifficulty.into());
|
||||||
}
|
}
|
||||||
|
// check the secondary PoW scaling factor if applicable
|
||||||
|
if header.pow.scaling_difficulty != next_header_info.secondary_scaling {
|
||||||
|
return Err(ErrorKind::InvalidScaling.into());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -454,10 +449,8 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
|
fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
|
||||||
let prev = ctx.batch.get_block_header(&block.header.previous)?;
|
let prev = ctx.batch.get_block_header(&block.header.previous)?;
|
||||||
block
|
block
|
||||||
.validate(
|
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
|
||||||
&prev.total_kernel_offset,
|
.map_err(|e| ErrorKind::InvalidBlockProof(e))?;
|
||||||
ctx.verifier_cache.clone(),
|
|
||||||
).map_err(|e| ErrorKind::InvalidBlockProof(e))?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ use lru_cache::LruCache;
|
||||||
|
|
||||||
use util::secp::pedersen::Commitment;
|
use util::secp::pedersen::Commitment;
|
||||||
|
|
||||||
use core::consensus::TargetError;
|
use core::consensus::HeaderInfo;
|
||||||
use core::core::hash::{Hash, Hashed};
|
use core::core::hash::{Hash, Hashed};
|
||||||
use core::core::{Block, BlockHeader, BlockSums};
|
use core::core::{Block, BlockHeader, BlockSums};
|
||||||
use core::pow::Difficulty;
|
use core::pow::Difficulty;
|
||||||
|
@ -613,7 +613,7 @@ impl<'a> DifficultyIter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for DifficultyIter<'a> {
|
impl<'a> Iterator for DifficultyIter<'a> {
|
||||||
type Item = Result<(u64, Difficulty), TargetError>;
|
type Item = HeaderInfo;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
// Get both header and previous_header if this is the initial iteration.
|
// Get both header and previous_header if this is the initial iteration.
|
||||||
|
@ -650,8 +650,14 @@ impl<'a> Iterator for DifficultyIter<'a> {
|
||||||
.clone()
|
.clone()
|
||||||
.map_or(Difficulty::zero(), |x| x.total_difficulty());
|
.map_or(Difficulty::zero(), |x| x.total_difficulty());
|
||||||
let difficulty = header.total_difficulty() - prev_difficulty;
|
let difficulty = header.total_difficulty() - prev_difficulty;
|
||||||
|
let scaling = header.pow.scaling_difficulty;
|
||||||
|
|
||||||
Some(Ok((header.timestamp.timestamp() as u64, difficulty)))
|
Some(HeaderInfo::new(
|
||||||
|
header.timestamp.timestamp() as u64,
|
||||||
|
difficulty,
|
||||||
|
scaling,
|
||||||
|
header.pow.is_secondary(),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,17 +82,19 @@ fn data_files() {
|
||||||
|
|
||||||
for n in 1..4 {
|
for n in 1..4 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
||||||
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
|
let mut b =
|
||||||
|
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
|
||||||
|
.unwrap();
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut b.header,
|
&mut b.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
|
@ -64,10 +64,12 @@ fn mine_empty_chain() {
|
||||||
|
|
||||||
for n in 1..4 {
|
for n in 1..4 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
||||||
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
|
let mut b =
|
||||||
|
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
|
||||||
|
.unwrap();
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||||
|
@ -78,7 +80,12 @@ fn mine_empty_chain() {
|
||||||
global::min_sizeshift()
|
global::min_sizeshift()
|
||||||
};
|
};
|
||||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||||
pow::pow_size(&mut b.header, difficulty, global::proofsize(), sizeshift).unwrap();
|
pow::pow_size(
|
||||||
|
&mut b.header,
|
||||||
|
next_header_info.difficulty,
|
||||||
|
global::proofsize(),
|
||||||
|
sizeshift,
|
||||||
|
).unwrap();
|
||||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||||
|
|
||||||
let bhash = b.hash();
|
let bhash = b.hash();
|
||||||
|
@ -379,11 +386,13 @@ fn output_header_mappings() {
|
||||||
|
|
||||||
for n in 1..15 {
|
for n in 1..15 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
|
||||||
reward_outputs.push(reward.0.clone());
|
reward_outputs.push(reward.0.clone());
|
||||||
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
|
let mut b =
|
||||||
|
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
|
||||||
|
.unwrap();
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||||
|
@ -394,7 +403,12 @@ fn output_header_mappings() {
|
||||||
global::min_sizeshift()
|
global::min_sizeshift()
|
||||||
};
|
};
|
||||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||||
pow::pow_size(&mut b.header, difficulty, global::proofsize(), sizeshift).unwrap();
|
pow::pow_size(
|
||||||
|
&mut b.header,
|
||||||
|
next_header_info.difficulty,
|
||||||
|
global::proofsize(),
|
||||||
|
sizeshift,
|
||||||
|
).unwrap();
|
||||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||||
|
|
||||||
chain.process_block(b, chain::Options::MINE).unwrap();
|
chain.process_block(b, chain::Options::MINE).unwrap();
|
||||||
|
@ -506,18 +520,17 @@ fn actual_diff_iter_output() {
|
||||||
let iter = chain.difficulty_iter();
|
let iter = chain.difficulty_iter();
|
||||||
let mut last_time = 0;
|
let mut last_time = 0;
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for i in iter.into_iter() {
|
for elem in iter.into_iter() {
|
||||||
let elem = i.unwrap();
|
|
||||||
if first {
|
if first {
|
||||||
last_time = elem.0;
|
last_time = elem.timestamp;
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
println!(
|
println!(
|
||||||
"next_difficulty time: {}, diff: {}, duration: {} ",
|
"next_difficulty time: {}, diff: {}, duration: {} ",
|
||||||
elem.0,
|
elem.timestamp,
|
||||||
elem.1.to_num(),
|
elem.difficulty.to_num(),
|
||||||
last_time - elem.0
|
last_time - elem.timestamp
|
||||||
);
|
);
|
||||||
last_time = elem.0;
|
last_time = elem.timestamp;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,13 +72,13 @@ fn test_coinbase_maturity() {
|
||||||
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
@ -119,7 +119,7 @@ fn test_coinbase_maturity() {
|
||||||
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap();
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
@ -152,13 +152,13 @@ fn test_coinbase_maturity() {
|
||||||
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
@ -179,13 +179,13 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
|
@ -51,6 +51,14 @@ pub const BLOCK_TIME_SEC: u64 = 60;
|
||||||
/// set to nominal number of block in one day (1440 with 1-minute blocks)
|
/// set to nominal number of block in one day (1440 with 1-minute blocks)
|
||||||
pub const COINBASE_MATURITY: u64 = 24 * 60 * 60 / BLOCK_TIME_SEC;
|
pub const COINBASE_MATURITY: u64 = 24 * 60 * 60 / BLOCK_TIME_SEC;
|
||||||
|
|
||||||
|
/// Ratio the secondary proof of work should take over the primary, as a
|
||||||
|
/// function of block height (time). Starts at 90% losing a percent
|
||||||
|
/// approximately every week (10000 blocks). Represented as an integer
|
||||||
|
/// between 0 and 100.
|
||||||
|
pub fn secondary_pow_ratio(height: u64) -> u64 {
|
||||||
|
90u64.saturating_sub(height / 10000)
|
||||||
|
}
|
||||||
|
|
||||||
/// Cuckoo-cycle proof size (cycle length)
|
/// Cuckoo-cycle proof size (cycle length)
|
||||||
pub const PROOFSIZE: usize = 42;
|
pub const PROOFSIZE: usize = 42;
|
||||||
|
|
||||||
|
@ -108,15 +116,15 @@ pub const HARD_FORK_INTERVAL: u64 = 250_000;
|
||||||
/// 6 months interval scheduled hard forks for the first 2 years.
|
/// 6 months interval scheduled hard forks for the first 2 years.
|
||||||
pub fn valid_header_version(height: u64, version: u16) -> bool {
|
pub fn valid_header_version(height: u64, version: u16) -> bool {
|
||||||
// uncomment below as we go from hard fork to hard fork
|
// uncomment below as we go from hard fork to hard fork
|
||||||
if height < HEADER_V2_HARD_FORK {
|
if height < HARD_FORK_INTERVAL {
|
||||||
version == 1
|
version == 1
|
||||||
} else if height < HARD_FORK_INTERVAL {
|
/* } else if height < 2 * HARD_FORK_INTERVAL {
|
||||||
version == 2
|
version == 2
|
||||||
} else if height < 2 * HARD_FORK_INTERVAL {
|
} else if height < 3 * HARD_FORK_INTERVAL {
|
||||||
version == 3
|
version == 3
|
||||||
/* } else if height < 3 * HARD_FORK_INTERVAL {
|
} else if height < 4 * HARD_FORK_INTERVAL {
|
||||||
version == 4 */
|
version == 4
|
||||||
/* } else if height >= 4 * HARD_FORK_INTERVAL {
|
} else if height >= 5 * HARD_FORK_INTERVAL {
|
||||||
version > 4 */
|
version > 4 */
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -164,20 +172,62 @@ impl fmt::Display for Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Error when computing the next difficulty adjustment.
|
/// Minimal header information required for the Difficulty calculation to
|
||||||
#[derive(Debug, Clone, Fail)]
|
/// take place
|
||||||
pub struct TargetError(pub String);
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub struct HeaderInfo {
|
||||||
|
/// Timestamp of the header, 1 when not used (returned info)
|
||||||
|
pub timestamp: u64,
|
||||||
|
/// Network difficulty or next difficulty to use
|
||||||
|
pub difficulty: Difficulty,
|
||||||
|
/// Network secondary PoW factor or factor to use
|
||||||
|
pub secondary_scaling: u32,
|
||||||
|
/// Whether the header is a secondary proof of work
|
||||||
|
pub is_secondary: bool,
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for TargetError {
|
impl HeaderInfo {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
/// Default constructor
|
||||||
write!(f, "Error computing new difficulty: {}", self.0)
|
pub fn new(
|
||||||
|
timestamp: u64,
|
||||||
|
difficulty: Difficulty,
|
||||||
|
secondary_scaling: u32,
|
||||||
|
is_secondary: bool,
|
||||||
|
) -> HeaderInfo {
|
||||||
|
HeaderInfo {
|
||||||
|
timestamp,
|
||||||
|
difficulty,
|
||||||
|
secondary_scaling,
|
||||||
|
is_secondary,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Constructor from a timestamp and difficulty, setting a default secondary
|
||||||
|
/// PoW factor
|
||||||
|
pub fn from_ts_diff(timestamp: u64, difficulty: Difficulty) -> HeaderInfo {
|
||||||
|
HeaderInfo {
|
||||||
|
timestamp,
|
||||||
|
difficulty,
|
||||||
|
secondary_scaling: 1,
|
||||||
|
is_secondary: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Constructor from a difficulty and secondary factor, setting a default
|
||||||
|
/// timestamp
|
||||||
|
pub fn from_diff_scaling(difficulty: Difficulty, secondary_scaling: u32) -> HeaderInfo {
|
||||||
|
HeaderInfo {
|
||||||
|
timestamp: 1,
|
||||||
|
difficulty,
|
||||||
|
secondary_scaling,
|
||||||
|
is_secondary: false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the proof-of-work difficulty that the next block should comply
|
/// Computes the proof-of-work difficulty that the next block should comply
|
||||||
/// with. Takes an iterator over past blocks, from latest (highest height) to
|
/// with. Takes an iterator over past block headers information, from latest
|
||||||
/// oldest (lowest height). The iterator produces pairs of timestamp and
|
/// (highest height) to oldest (lowest height).
|
||||||
/// difficulty for each block.
|
|
||||||
///
|
///
|
||||||
/// The difficulty calculation is based on both Digishield and GravityWave
|
/// The difficulty calculation is based on both Digishield and GravityWave
|
||||||
/// family of difficulty computation, coming to something very close to Zcash.
|
/// family of difficulty computation, coming to something very close to Zcash.
|
||||||
|
@ -185,9 +235,12 @@ impl fmt::Display for TargetError {
|
||||||
/// DIFFICULTY_ADJUST_WINDOW blocks. The corresponding timespan is calculated
|
/// DIFFICULTY_ADJUST_WINDOW blocks. The corresponding timespan is calculated
|
||||||
/// by using the difference between the median timestamps at the beginning
|
/// by using the difference between the median timestamps at the beginning
|
||||||
/// and the end of the window.
|
/// and the end of the window.
|
||||||
pub fn next_difficulty<T>(cursor: T) -> Result<Difficulty, TargetError>
|
///
|
||||||
|
/// The secondary proof-of-work factor is calculated along the same lines, as
|
||||||
|
/// an adjustment on the deviation against the ideal value.
|
||||||
|
pub fn next_difficulty<T>(height: u64, cursor: T) -> HeaderInfo
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = Result<(u64, Difficulty), TargetError>>,
|
T: IntoIterator<Item = HeaderInfo>,
|
||||||
{
|
{
|
||||||
// Create vector of difficulty data running from earliest
|
// Create vector of difficulty data running from earliest
|
||||||
// to latest, and pad with simulated pre-genesis data to allow earlier
|
// to latest, and pad with simulated pre-genesis data to allow earlier
|
||||||
|
@ -195,27 +248,20 @@ where
|
||||||
// length will be DIFFICULTY_ADJUST_WINDOW+MEDIAN_TIME_WINDOW
|
// length will be DIFFICULTY_ADJUST_WINDOW+MEDIAN_TIME_WINDOW
|
||||||
let diff_data = global::difficulty_data_to_vector(cursor);
|
let diff_data = global::difficulty_data_to_vector(cursor);
|
||||||
|
|
||||||
|
// First, get the ratio of secondary PoW vs primary
|
||||||
|
let sec_pow_scaling = secondary_pow_scaling(height, &diff_data);
|
||||||
|
|
||||||
// Obtain the median window for the earlier time period
|
// Obtain the median window for the earlier time period
|
||||||
// the first MEDIAN_TIME_WINDOW elements
|
// the first MEDIAN_TIME_WINDOW elements
|
||||||
let mut window_earliest: Vec<u64> = diff_data
|
let earliest_ts = time_window_median(&diff_data, 0, MEDIAN_TIME_WINDOW as usize);
|
||||||
.iter()
|
|
||||||
.take(MEDIAN_TIME_WINDOW as usize)
|
|
||||||
.map(|n| n.clone().unwrap().0)
|
|
||||||
.collect();
|
|
||||||
// pick median
|
|
||||||
window_earliest.sort();
|
|
||||||
let earliest_ts = window_earliest[MEDIAN_TIME_INDEX as usize];
|
|
||||||
|
|
||||||
// Obtain the median window for the latest time period
|
// Obtain the median window for the latest time period
|
||||||
// i.e. the last MEDIAN_TIME_WINDOW elements
|
// i.e. the last MEDIAN_TIME_WINDOW elements
|
||||||
let mut window_latest: Vec<u64> = diff_data
|
let latest_ts = time_window_median(
|
||||||
.iter()
|
&diff_data,
|
||||||
.skip(DIFFICULTY_ADJUST_WINDOW as usize)
|
DIFFICULTY_ADJUST_WINDOW as usize,
|
||||||
.map(|n| n.clone().unwrap().0)
|
MEDIAN_TIME_WINDOW as usize,
|
||||||
.collect();
|
);
|
||||||
// pick median
|
|
||||||
window_latest.sort();
|
|
||||||
let latest_ts = window_latest[MEDIAN_TIME_INDEX as usize];
|
|
||||||
|
|
||||||
// median time delta
|
// median time delta
|
||||||
let ts_delta = latest_ts - earliest_ts;
|
let ts_delta = latest_ts - earliest_ts;
|
||||||
|
@ -224,7 +270,7 @@ where
|
||||||
let diff_sum = diff_data
|
let diff_sum = diff_data
|
||||||
.iter()
|
.iter()
|
||||||
.skip(MEDIAN_TIME_WINDOW as usize)
|
.skip(MEDIAN_TIME_WINDOW as usize)
|
||||||
.fold(0, |sum, d| sum + d.clone().unwrap().1.to_num());
|
.fold(0, |sum, d| sum + d.difficulty.to_num());
|
||||||
|
|
||||||
// Apply dampening except when difficulty is near 1
|
// Apply dampening except when difficulty is near 1
|
||||||
let ts_damp = if diff_sum < DAMP_FACTOR * DIFFICULTY_ADJUST_WINDOW {
|
let ts_damp = if diff_sum < DAMP_FACTOR * DIFFICULTY_ADJUST_WINDOW {
|
||||||
|
@ -242,9 +288,49 @@ where
|
||||||
ts_damp
|
ts_damp
|
||||||
};
|
};
|
||||||
|
|
||||||
let difficulty = diff_sum * BLOCK_TIME_SEC / adj_ts;
|
let difficulty = max(diff_sum * BLOCK_TIME_SEC / adj_ts, 1);
|
||||||
|
|
||||||
Ok(Difficulty::from_num(max(difficulty, 1)))
|
HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Factor by which the secondary proof of work difficulty will be adjusted
|
||||||
|
fn secondary_pow_scaling(height: u64, diff_data: &Vec<HeaderInfo>) -> u32 {
|
||||||
|
// median of past scaling factors, scaling is 1 if none found
|
||||||
|
let mut scalings = diff_data
|
||||||
|
.iter()
|
||||||
|
.map(|n| n.secondary_scaling)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if scalings.len() == 0 {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
scalings.sort();
|
||||||
|
let scaling_median = scalings[scalings.len() / 2] as u64;
|
||||||
|
let secondary_count = diff_data.iter().filter(|n| n.is_secondary).count() as u64;
|
||||||
|
|
||||||
|
// what's the ideal ratio at the current height
|
||||||
|
let ratio = secondary_pow_ratio(height);
|
||||||
|
|
||||||
|
// adjust the past median based on ideal ratio vs actual ratio
|
||||||
|
let scaling = scaling_median * secondary_count * 100 / ratio / diff_data.len() as u64;
|
||||||
|
if scaling == 0 {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
scaling as u32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Median timestamp within the time window starting at `from` with the
|
||||||
|
/// provided `length`.
|
||||||
|
fn time_window_median(diff_data: &Vec<HeaderInfo>, from: usize, length: usize) -> u64 {
|
||||||
|
let mut window_latest: Vec<u64> = diff_data
|
||||||
|
.iter()
|
||||||
|
.skip(from)
|
||||||
|
.take(length)
|
||||||
|
.map(|n| n.timestamp)
|
||||||
|
.collect();
|
||||||
|
// pick median
|
||||||
|
window_latest.sort();
|
||||||
|
window_latest[MEDIAN_TIME_INDEX as usize]
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consensus rule that collections of items are sorted lexicographically.
|
/// Consensus rule that collections of items are sorted lexicographically.
|
||||||
|
@ -252,6 +338,3 @@ pub trait VerifySortOrder<T> {
|
||||||
/// Verify a collection of items is sorted as required.
|
/// Verify a collection of items is sorted as required.
|
||||||
fn verify_sort_order(&self) -> Result<(), Error>;
|
fn verify_sort_order(&self) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Height for the v2 headers hard fork, with extended proof of work in header
|
|
||||||
pub const HEADER_V2_HARD_FORK: u64 = 95_000;
|
|
||||||
|
|
|
@ -139,7 +139,7 @@ pub struct BlockHeader {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serialized size of fixed part of a BlockHeader, i.e. without pow
|
/// Serialized size of fixed part of a BlockHeader, i.e. without pow
|
||||||
fn fixed_size_of_serialized_header(version: u16) -> usize {
|
fn fixed_size_of_serialized_header(_version: u16) -> usize {
|
||||||
let mut size: usize = 0;
|
let mut size: usize = 0;
|
||||||
size += mem::size_of::<u16>(); // version
|
size += mem::size_of::<u16>(); // version
|
||||||
size += mem::size_of::<u64>(); // height
|
size += mem::size_of::<u64>(); // height
|
||||||
|
@ -152,9 +152,7 @@ fn fixed_size_of_serialized_header(version: u16) -> usize {
|
||||||
size += mem::size_of::<u64>(); // output_mmr_size
|
size += mem::size_of::<u64>(); // output_mmr_size
|
||||||
size += mem::size_of::<u64>(); // kernel_mmr_size
|
size += mem::size_of::<u64>(); // kernel_mmr_size
|
||||||
size += mem::size_of::<Difficulty>(); // total_difficulty
|
size += mem::size_of::<Difficulty>(); // total_difficulty
|
||||||
if version >= 2 {
|
size += mem::size_of::<u32>(); // scaling_difficulty
|
||||||
size += mem::size_of::<u64>(); // scaling_difficulty
|
|
||||||
}
|
|
||||||
size += mem::size_of::<u64>(); // nonce
|
size += mem::size_of::<u64>(); // nonce
|
||||||
size
|
size
|
||||||
}
|
}
|
||||||
|
@ -208,19 +206,12 @@ impl Readable for BlockHeader {
|
||||||
let (version, height) = ser_multiread!(reader, read_u16, read_u64);
|
let (version, height) = ser_multiread!(reader, read_u16, read_u64);
|
||||||
let previous = Hash::read(reader)?;
|
let previous = Hash::read(reader)?;
|
||||||
let timestamp = reader.read_i64()?;
|
let timestamp = reader.read_i64()?;
|
||||||
let mut total_difficulty = None;
|
|
||||||
if version == 1 {
|
|
||||||
total_difficulty = Some(Difficulty::read(reader)?);
|
|
||||||
}
|
|
||||||
let output_root = Hash::read(reader)?;
|
let output_root = Hash::read(reader)?;
|
||||||
let range_proof_root = Hash::read(reader)?;
|
let range_proof_root = Hash::read(reader)?;
|
||||||
let kernel_root = Hash::read(reader)?;
|
let kernel_root = Hash::read(reader)?;
|
||||||
let total_kernel_offset = BlindingFactor::read(reader)?;
|
let total_kernel_offset = BlindingFactor::read(reader)?;
|
||||||
let (output_mmr_size, kernel_mmr_size) = ser_multiread!(reader, read_u64, read_u64);
|
let (output_mmr_size, kernel_mmr_size) = ser_multiread!(reader, read_u64, read_u64);
|
||||||
let mut pow = ProofOfWork::read(version, reader)?;
|
let pow = ProofOfWork::read(version, reader)?;
|
||||||
if version == 1 {
|
|
||||||
pow.total_difficulty = total_difficulty.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
if timestamp > MAX_DATE.and_hms(0, 0, 0).timestamp()
|
if timestamp > MAX_DATE.and_hms(0, 0, 0).timestamp()
|
||||||
|| timestamp < MIN_DATE.and_hms(0, 0, 0).timestamp()
|
|| timestamp < MIN_DATE.and_hms(0, 0, 0).timestamp()
|
||||||
|
@ -254,10 +245,6 @@ impl BlockHeader {
|
||||||
[write_fixed_bytes, &self.previous],
|
[write_fixed_bytes, &self.previous],
|
||||||
[write_i64, self.timestamp.timestamp()]
|
[write_i64, self.timestamp.timestamp()]
|
||||||
);
|
);
|
||||||
if self.version == 1 {
|
|
||||||
// written as part of the ProofOfWork in later versions
|
|
||||||
writer.write_u64(self.pow.total_difficulty.to_num())?;
|
|
||||||
}
|
|
||||||
ser_multiwrite!(
|
ser_multiwrite!(
|
||||||
writer,
|
writer,
|
||||||
[write_fixed_bytes, &self.output_root],
|
[write_fixed_bytes, &self.output_root],
|
||||||
|
@ -501,18 +488,11 @@ impl Block {
|
||||||
let now = Utc::now().timestamp();
|
let now = Utc::now().timestamp();
|
||||||
let timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now, 0), Utc);
|
let timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now, 0), Utc);
|
||||||
|
|
||||||
let version = if prev.height + 1 < consensus::HEADER_V2_HARD_FORK {
|
|
||||||
1
|
|
||||||
} else {
|
|
||||||
2
|
|
||||||
};
|
|
||||||
|
|
||||||
// Now build the block with all the above information.
|
// Now build the block with all the above information.
|
||||||
// Note: We have not validated the block here.
|
// Note: We have not validated the block here.
|
||||||
// Caller must validate the block as necessary.
|
// Caller must validate the block as necessary.
|
||||||
Block {
|
Block {
|
||||||
header: BlockHeader {
|
header: BlockHeader {
|
||||||
version,
|
|
||||||
height: prev.height + 1,
|
height: prev.height + 1,
|
||||||
timestamp,
|
timestamp,
|
||||||
previous: prev.hash(),
|
previous: prev.hash(),
|
||||||
|
|
|
@ -16,13 +16,14 @@
|
||||||
//! having to pass them all over the place, but aren't consensus values.
|
//! having to pass them all over the place, but aren't consensus values.
|
||||||
//! should be used sparingly.
|
//! should be used sparingly.
|
||||||
|
|
||||||
use consensus::TargetError;
|
use consensus::HeaderInfo;
|
||||||
use consensus::{
|
use consensus::{
|
||||||
BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON, DEFAULT_MIN_SIZESHIFT,
|
BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON, DEFAULT_MIN_SIZESHIFT,
|
||||||
DIFFICULTY_ADJUST_WINDOW, EASINESS, INITIAL_DIFFICULTY, MEDIAN_TIME_WINDOW, PROOFSIZE,
|
DIFFICULTY_ADJUST_WINDOW, EASINESS, INITIAL_DIFFICULTY, MEDIAN_TIME_WINDOW, PROOFSIZE,
|
||||||
REFERENCE_SIZESHIFT,
|
REFERENCE_SIZESHIFT,
|
||||||
};
|
};
|
||||||
use pow::{self, CuckatooContext, Difficulty, EdgeType, PoWContext};
|
use pow::{self, CuckatooContext, EdgeType, PoWContext};
|
||||||
|
|
||||||
/// An enum collecting sets of parameters used throughout the
|
/// An enum collecting sets of parameters used throughout the
|
||||||
/// code wherever mining is needed. This should allow for
|
/// code wherever mining is needed. This should allow for
|
||||||
/// different sets of parameters for different purposes,
|
/// different sets of parameters for different purposes,
|
||||||
|
@ -260,14 +261,13 @@ pub fn get_genesis_nonce() -> u64 {
|
||||||
/// vector and pads if needed (which will) only be needed for the first few
|
/// vector and pads if needed (which will) only be needed for the first few
|
||||||
/// blocks after genesis
|
/// blocks after genesis
|
||||||
|
|
||||||
pub fn difficulty_data_to_vector<T>(cursor: T) -> Vec<Result<(u64, Difficulty), TargetError>>
|
pub fn difficulty_data_to_vector<T>(cursor: T) -> Vec<HeaderInfo>
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = Result<(u64, Difficulty), TargetError>>,
|
T: IntoIterator<Item = HeaderInfo>,
|
||||||
{
|
{
|
||||||
// Convert iterator to vector, so we can append to it if necessary
|
// Convert iterator to vector, so we can append to it if necessary
|
||||||
let needed_block_count = (MEDIAN_TIME_WINDOW + DIFFICULTY_ADJUST_WINDOW) as usize;
|
let needed_block_count = (MEDIAN_TIME_WINDOW + DIFFICULTY_ADJUST_WINDOW) as usize;
|
||||||
let mut last_n: Vec<Result<(u64, Difficulty), TargetError>> =
|
let mut last_n: Vec<HeaderInfo> = cursor.into_iter().take(needed_block_count).collect();
|
||||||
cursor.into_iter().take(needed_block_count).collect();
|
|
||||||
|
|
||||||
// Sort blocks from earliest to latest (to keep conceptually easier)
|
// Sort blocks from earliest to latest (to keep conceptually easier)
|
||||||
last_n.reverse();
|
last_n.reverse();
|
||||||
|
@ -277,16 +277,17 @@ where
|
||||||
let block_count_difference = needed_block_count - last_n.len();
|
let block_count_difference = needed_block_count - last_n.len();
|
||||||
if block_count_difference > 0 {
|
if block_count_difference > 0 {
|
||||||
// Collect any real data we have
|
// Collect any real data we have
|
||||||
let mut live_intervals: Vec<(u64, Difficulty)> = last_n
|
let mut live_intervals: Vec<HeaderInfo> = last_n
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| (b.clone().unwrap().0, b.clone().unwrap().1))
|
.map(|b| HeaderInfo::from_ts_diff(b.timestamp, b.difficulty))
|
||||||
.collect();
|
.collect();
|
||||||
for i in (1..live_intervals.len()).rev() {
|
for i in (1..live_intervals.len()).rev() {
|
||||||
// prevents issues with very fast automated test chains
|
// prevents issues with very fast automated test chains
|
||||||
if live_intervals[i - 1].0 > live_intervals[i].0 {
|
if live_intervals[i - 1].timestamp > live_intervals[i].timestamp {
|
||||||
live_intervals[i].0 = 0;
|
live_intervals[i].timestamp = 0;
|
||||||
} else {
|
} else {
|
||||||
live_intervals[i].0 = live_intervals[i].0 - live_intervals[i - 1].0;
|
live_intervals[i].timestamp =
|
||||||
|
live_intervals[i].timestamp - live_intervals[i - 1].timestamp;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Remove genesis "interval"
|
// Remove genesis "interval"
|
||||||
|
@ -294,16 +295,16 @@ where
|
||||||
live_intervals.remove(0);
|
live_intervals.remove(0);
|
||||||
} else {
|
} else {
|
||||||
//if it's just genesis, adjust the interval
|
//if it's just genesis, adjust the interval
|
||||||
live_intervals[0].0 = BLOCK_TIME_SEC;
|
live_intervals[0].timestamp = BLOCK_TIME_SEC;
|
||||||
}
|
}
|
||||||
let mut interval_index = live_intervals.len() - 1;
|
let mut interval_index = live_intervals.len() - 1;
|
||||||
let mut last_ts = last_n.first().as_ref().unwrap().as_ref().unwrap().0;
|
let mut last_ts = last_n.first().unwrap().timestamp;
|
||||||
let last_diff = live_intervals[live_intervals.len() - 1].1;
|
let last_diff = live_intervals[live_intervals.len() - 1].difficulty;
|
||||||
// fill in simulated blocks with values from the previous real block
|
// fill in simulated blocks with values from the previous real block
|
||||||
|
|
||||||
for _ in 0..block_count_difference {
|
for _ in 0..block_count_difference {
|
||||||
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].0);
|
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp);
|
||||||
last_n.insert(0, Ok((last_ts, last_diff.clone())));
|
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff.clone()));
|
||||||
interval_index = match interval_index {
|
interval_index = match interval_index {
|
||||||
0 => live_intervals.len() - 1,
|
0 => live_intervals.len() - 1,
|
||||||
_ => interval_index - 1,
|
_ => interval_index - 1,
|
||||||
|
|
|
@ -88,8 +88,6 @@ impl Lean {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
use pow::common;
|
|
||||||
use pow::cuckatoo::*;
|
|
||||||
use pow::types::PoWContext;
|
use pow::types::PoWContext;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -84,7 +84,7 @@ impl Difficulty {
|
||||||
/// Computes the difficulty from a hash. Divides the maximum target by the
|
/// Computes the difficulty from a hash. Divides the maximum target by the
|
||||||
/// provided hash and applies the Cuckoo sizeshift adjustment factor (see
|
/// provided hash and applies the Cuckoo sizeshift adjustment factor (see
|
||||||
/// https://lists.launchpad.net/mimblewimble/msg00494.html).
|
/// https://lists.launchpad.net/mimblewimble/msg00494.html).
|
||||||
pub fn from_proof_adjusted(proof: &Proof) -> Difficulty {
|
fn from_proof_adjusted(proof: &Proof) -> Difficulty {
|
||||||
// Adjust the difficulty based on a 2^(N-M)*(N-1) factor, with M being
|
// Adjust the difficulty based on a 2^(N-M)*(N-1) factor, with M being
|
||||||
// the minimum sizeshift and N the provided sizeshift
|
// the minimum sizeshift and N the provided sizeshift
|
||||||
let shift = proof.cuckoo_sizeshift;
|
let shift = proof.cuckoo_sizeshift;
|
||||||
|
@ -96,9 +96,9 @@ impl Difficulty {
|
||||||
/// Same as `from_proof_adjusted` but instead of an adjustment based on
|
/// Same as `from_proof_adjusted` but instead of an adjustment based on
|
||||||
/// cycle size, scales based on a provided factor. Used by dual PoW system
|
/// cycle size, scales based on a provided factor. Used by dual PoW system
|
||||||
/// to scale one PoW against the other.
|
/// to scale one PoW against the other.
|
||||||
pub fn from_proof_scaled(proof: &Proof, scaling: u64) -> Difficulty {
|
fn from_proof_scaled(proof: &Proof, scaling: u32) -> Difficulty {
|
||||||
// Scaling between 2 proof of work algos
|
// Scaling between 2 proof of work algos
|
||||||
Difficulty::from_num(proof.raw_difficulty() * scaling)
|
Difficulty::from_num(proof.raw_difficulty() * scaling as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the difficulty into a u64
|
/// Converts the difficulty into a u64
|
||||||
|
@ -219,7 +219,7 @@ pub struct ProofOfWork {
|
||||||
/// Total accumulated difficulty since genesis block
|
/// Total accumulated difficulty since genesis block
|
||||||
pub total_difficulty: Difficulty,
|
pub total_difficulty: Difficulty,
|
||||||
/// Difficulty scaling factor between the different proofs of work
|
/// Difficulty scaling factor between the different proofs of work
|
||||||
pub scaling_difficulty: u64,
|
pub scaling_difficulty: u32,
|
||||||
/// Nonce increment used to mine this block.
|
/// Nonce increment used to mine this block.
|
||||||
pub nonce: u64,
|
pub nonce: u64,
|
||||||
/// Proof of work data.
|
/// Proof of work data.
|
||||||
|
@ -240,13 +240,9 @@ impl Default for ProofOfWork {
|
||||||
|
|
||||||
impl ProofOfWork {
|
impl ProofOfWork {
|
||||||
/// Read implementation, can't define as trait impl as we need a version
|
/// Read implementation, can't define as trait impl as we need a version
|
||||||
pub fn read(ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
|
pub fn read(_ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
|
||||||
let (total_difficulty, scaling_difficulty) = if ver == 1 {
|
let total_difficulty = Difficulty::read(reader)?;
|
||||||
// read earlier in the header on older versions
|
let scaling_difficulty = reader.read_u32()?;
|
||||||
(Difficulty::one(), 1)
|
|
||||||
} else {
|
|
||||||
(Difficulty::read(reader)?, reader.read_u64()?)
|
|
||||||
};
|
|
||||||
let nonce = reader.read_u64()?;
|
let nonce = reader.read_u64()?;
|
||||||
let proof = Proof::read(reader)?;
|
let proof = Proof::read(reader)?;
|
||||||
Ok(ProofOfWork {
|
Ok(ProofOfWork {
|
||||||
|
@ -269,14 +265,12 @@ impl ProofOfWork {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the pre-hash portion of the header
|
/// Write the pre-hash portion of the header
|
||||||
pub fn write_pre_pow<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
|
pub fn write_pre_pow<W: Writer>(&self, _ver: u16, writer: &mut W) -> Result<(), ser::Error> {
|
||||||
if ver > 1 {
|
|
||||||
ser_multiwrite!(
|
ser_multiwrite!(
|
||||||
writer,
|
writer,
|
||||||
[write_u64, self.total_difficulty.to_num()],
|
[write_u64, self.total_difficulty.to_num()],
|
||||||
[write_u64, self.scaling_difficulty]
|
[write_u32, self.scaling_difficulty]
|
||||||
);
|
);
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -295,6 +289,21 @@ impl ProofOfWork {
|
||||||
pub fn cuckoo_sizeshift(&self) -> u8 {
|
pub fn cuckoo_sizeshift(&self) -> u8 {
|
||||||
self.proof.cuckoo_sizeshift
|
self.proof.cuckoo_sizeshift
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether this proof of work is for the primary algorithm (as opposed
|
||||||
|
/// to secondary). Only depends on the size shift at this time.
|
||||||
|
pub fn is_primary(&self) -> bool {
|
||||||
|
// 2 conditions are redundant right now but not necessarily in
|
||||||
|
// the future
|
||||||
|
self.proof.cuckoo_sizeshift != SECOND_POW_SIZESHIFT
|
||||||
|
&& self.proof.cuckoo_sizeshift >= global::min_sizeshift()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this proof of work is for the secondary algorithm (as opposed
|
||||||
|
/// to primary). Only depends on the size shift at this time.
|
||||||
|
pub fn is_secondary(&self) -> bool {
|
||||||
|
self.proof.cuckoo_sizeshift == SECOND_POW_SIZESHIFT
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A Cuckoo Cycle proof of work, consisting of the shift to get the graph
|
/// A Cuckoo Cycle proof of work, consisting of the shift to get the graph
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub mod common;
|
||||||
|
|
||||||
use chrono::Duration;
|
use chrono::Duration;
|
||||||
use common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
|
use common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
|
||||||
use grin_core::consensus::{self, BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
|
use grin_core::consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
|
||||||
use grin_core::core::block::Error;
|
use grin_core::core::block::Error;
|
||||||
use grin_core::core::hash::Hashed;
|
use grin_core::core::hash::Hashed;
|
||||||
use grin_core::core::id::ShortIdentifiable;
|
use grin_core::core::id::ShortIdentifiable;
|
||||||
|
@ -257,7 +257,7 @@ fn empty_block_serialized_size() {
|
||||||
let b = new_block(vec![], &keychain, &prev, &key_id);
|
let b = new_block(vec![], &keychain, &prev, &key_id);
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
let target_len = 1_224;
|
let target_len = 1_228;
|
||||||
assert_eq!(vec.len(), target_len);
|
assert_eq!(vec.len(), target_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,7 +270,7 @@ fn block_single_tx_serialized_size() {
|
||||||
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
|
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
let target_len = 2_806;
|
let target_len = 2_810;
|
||||||
assert_eq!(vec.len(), target_len);
|
assert_eq!(vec.len(), target_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,7 +283,7 @@ fn empty_compact_block_serialized_size() {
|
||||||
let cb: CompactBlock = b.into();
|
let cb: CompactBlock = b.into();
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
||||||
let target_len = 1_232;
|
let target_len = 1_236;
|
||||||
assert_eq!(vec.len(), target_len);
|
assert_eq!(vec.len(), target_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -297,7 +297,7 @@ fn compact_block_single_tx_serialized_size() {
|
||||||
let cb: CompactBlock = b.into();
|
let cb: CompactBlock = b.into();
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
||||||
let target_len = 1_238;
|
let target_len = 1_242;
|
||||||
assert_eq!(vec.len(), target_len);
|
assert_eq!(vec.len(), target_len);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -316,7 +316,7 @@ fn block_10_tx_serialized_size() {
|
||||||
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
|
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
let target_len = 17_044;
|
let target_len = 17_048;
|
||||||
assert_eq!(vec.len(), target_len,);
|
assert_eq!(vec.len(), target_len,);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -335,7 +335,7 @@ fn compact_block_10_tx_serialized_size() {
|
||||||
let cb: CompactBlock = b.into();
|
let cb: CompactBlock = b.into();
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
ser::serialize(&mut vec, &cb).expect("serialization failed");
|
||||||
let target_len = 1_292;
|
let target_len = 1_296;
|
||||||
assert_eq!(vec.len(), target_len,);
|
assert_eq!(vec.len(), target_len,);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -429,26 +429,3 @@ fn serialize_deserialize_compact_block() {
|
||||||
assert_eq!(cb1.header, cb2.header);
|
assert_eq!(cb1.header, cb2.header);
|
||||||
assert_eq!(cb1.kern_ids(), cb2.kern_ids());
|
assert_eq!(cb1.kern_ids(), cb2.kern_ids());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn empty_block_v2_switch() {
|
|
||||||
let keychain = ExtKeychain::from_random_seed().unwrap();
|
|
||||||
let mut prev = BlockHeader::default();
|
|
||||||
prev.height = consensus::HEADER_V2_HARD_FORK - 1;
|
|
||||||
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
|
||||||
let b = new_block(vec![], &keychain, &prev, &key_id);
|
|
||||||
let mut vec = Vec::new();
|
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
|
||||||
let target_len = 1_232;
|
|
||||||
assert_eq!(b.header.version, 2);
|
|
||||||
assert_eq!(vec.len(), target_len);
|
|
||||||
|
|
||||||
// another try right before v2
|
|
||||||
prev.height = consensus::HEADER_V2_HARD_FORK - 2;
|
|
||||||
let b = new_block(vec![], &keychain, &prev, &key_id);
|
|
||||||
let mut vec = Vec::new();
|
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
|
||||||
let target_len = 1_224;
|
|
||||||
assert_eq!(b.header.version, 1);
|
|
||||||
assert_eq!(vec.len(), target_len);
|
|
||||||
}
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ extern crate chrono;
|
||||||
|
|
||||||
use chrono::prelude::Utc;
|
use chrono::prelude::Utc;
|
||||||
use core::consensus::{
|
use core::consensus::{
|
||||||
next_difficulty, valid_header_version, TargetError, BLOCK_TIME_WINDOW, DAMP_FACTOR,
|
next_difficulty, valid_header_version, HeaderInfo, BLOCK_TIME_WINDOW, DAMP_FACTOR,
|
||||||
DIFFICULTY_ADJUST_WINDOW, MEDIAN_TIME_INDEX, MEDIAN_TIME_WINDOW, UPPER_TIME_BOUND,
|
DIFFICULTY_ADJUST_WINDOW, MEDIAN_TIME_INDEX, MEDIAN_TIME_WINDOW, UPPER_TIME_BOUND,
|
||||||
};
|
};
|
||||||
use core::global;
|
use core::global;
|
||||||
|
@ -77,51 +77,51 @@ impl Display for DiffBlock {
|
||||||
|
|
||||||
// Builds an iterator for next difficulty calculation with the provided
|
// Builds an iterator for next difficulty calculation with the provided
|
||||||
// constant time interval, difficulty and total length.
|
// constant time interval, difficulty and total length.
|
||||||
fn repeat(
|
fn repeat(interval: u64, diff: HeaderInfo, len: u64, cur_time: Option<u64>) -> Vec<HeaderInfo> {
|
||||||
interval: u64,
|
|
||||||
diff: u64,
|
|
||||||
len: u64,
|
|
||||||
cur_time: Option<u64>,
|
|
||||||
) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
|
||||||
let cur_time = match cur_time {
|
let cur_time = match cur_time {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
None => Utc::now().timestamp() as u64,
|
None => Utc::now().timestamp() as u64,
|
||||||
};
|
};
|
||||||
// watch overflow here, length shouldn't be ridiculous anyhow
|
// watch overflow here, length shouldn't be ridiculous anyhow
|
||||||
assert!(len < std::usize::MAX as u64);
|
assert!(len < std::usize::MAX as u64);
|
||||||
let diffs = vec![Difficulty::from_num(diff); len as usize];
|
let diffs = vec![diff.difficulty.clone(); len as usize];
|
||||||
let times = (0..(len as usize)).map(|n| n * interval as usize).rev();
|
let times = (0..(len as usize)).map(|n| n * interval as usize).rev();
|
||||||
let pairs = times.zip(diffs.iter());
|
let pairs = times.zip(diffs.iter());
|
||||||
pairs
|
pairs
|
||||||
.map(|(t, d)| Ok((cur_time + t as u64, d.clone())))
|
.map(|(t, d)| {
|
||||||
.collect::<Vec<_>>()
|
HeaderInfo::new(
|
||||||
|
cur_time + t as u64,
|
||||||
|
d.clone(),
|
||||||
|
diff.secondary_scaling,
|
||||||
|
diff.is_secondary,
|
||||||
|
)
|
||||||
|
}).collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a new chain with a genesis at a simulated difficulty
|
// Creates a new chain with a genesis at a simulated difficulty
|
||||||
fn create_chain_sim(diff: u64) -> Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)> {
|
fn create_chain_sim(diff: u64) -> Vec<(HeaderInfo, DiffStats)> {
|
||||||
println!(
|
println!(
|
||||||
"adding create: {}, {}",
|
"adding create: {}, {}",
|
||||||
Utc::now().timestamp(),
|
Utc::now().timestamp(),
|
||||||
Difficulty::from_num(diff)
|
Difficulty::from_num(diff)
|
||||||
);
|
);
|
||||||
let return_vec = vec![Ok((
|
let return_vec = vec![HeaderInfo::from_ts_diff(
|
||||||
Utc::now().timestamp() as u64,
|
Utc::now().timestamp() as u64,
|
||||||
Difficulty::from_num(diff),
|
Difficulty::from_num(diff),
|
||||||
))];
|
)];
|
||||||
let diff_stats = get_diff_stats(&return_vec);
|
let diff_stats = get_diff_stats(&return_vec);
|
||||||
vec![(
|
vec![(
|
||||||
Ok((Utc::now().timestamp() as u64, Difficulty::from_num(diff))),
|
HeaderInfo::from_ts_diff(Utc::now().timestamp() as u64, Difficulty::from_num(diff)),
|
||||||
diff_stats,
|
diff_stats,
|
||||||
)]
|
)]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> DiffStats {
|
fn get_diff_stats(chain_sim: &Vec<HeaderInfo>) -> DiffStats {
|
||||||
// Fill out some difficulty stats for convenience
|
// Fill out some difficulty stats for convenience
|
||||||
let diff_iter = chain_sim.clone();
|
let diff_iter = chain_sim.clone();
|
||||||
let last_blocks: Vec<Result<(u64, Difficulty), TargetError>> =
|
let last_blocks: Vec<HeaderInfo> = global::difficulty_data_to_vector(diff_iter.iter().cloned());
|
||||||
global::difficulty_data_to_vector(diff_iter.clone());
|
|
||||||
|
|
||||||
let mut last_time = last_blocks[0].clone().unwrap().0;
|
let mut last_time = last_blocks[0].timestamp;
|
||||||
let tip_height = chain_sim.len();
|
let tip_height = chain_sim.len();
|
||||||
let earliest_block_height = tip_height as i64 - last_blocks.len() as i64;
|
let earliest_block_height = tip_height as i64 - last_blocks.len() as i64;
|
||||||
|
|
||||||
|
@ -131,7 +131,7 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
.clone()
|
.clone()
|
||||||
.iter()
|
.iter()
|
||||||
.take(MEDIAN_TIME_WINDOW as usize)
|
.take(MEDIAN_TIME_WINDOW as usize)
|
||||||
.map(|n| n.clone().unwrap().0)
|
.map(|n| n.clone().timestamp)
|
||||||
.collect();
|
.collect();
|
||||||
// pick median
|
// pick median
|
||||||
window_earliest.sort();
|
window_earliest.sort();
|
||||||
|
@ -143,7 +143,7 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
.clone()
|
.clone()
|
||||||
.iter()
|
.iter()
|
||||||
.skip(DIFFICULTY_ADJUST_WINDOW as usize)
|
.skip(DIFFICULTY_ADJUST_WINDOW as usize)
|
||||||
.map(|n| n.clone().unwrap().0)
|
.map(|n| n.clone().timestamp)
|
||||||
.collect();
|
.collect();
|
||||||
// pick median
|
// pick median
|
||||||
window_latest.sort();
|
window_latest.sort();
|
||||||
|
@ -151,9 +151,8 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
|
|
||||||
let mut i = 1;
|
let mut i = 1;
|
||||||
|
|
||||||
let sum_blocks: Vec<Result<(u64, Difficulty), TargetError>> = global::difficulty_data_to_vector(
|
let sum_blocks: Vec<HeaderInfo> = global::difficulty_data_to_vector(diff_iter.iter().cloned())
|
||||||
diff_iter,
|
.into_iter()
|
||||||
).into_iter()
|
|
||||||
.skip(MEDIAN_TIME_WINDOW as usize)
|
.skip(MEDIAN_TIME_WINDOW as usize)
|
||||||
.take(DIFFICULTY_ADJUST_WINDOW as usize)
|
.take(DIFFICULTY_ADJUST_WINDOW as usize)
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -162,15 +161,14 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
.iter()
|
.iter()
|
||||||
//.skip(1)
|
//.skip(1)
|
||||||
.map(|n| {
|
.map(|n| {
|
||||||
let (time, diff) = n.clone().unwrap();
|
let dur = n.timestamp - last_time;
|
||||||
let dur = time - last_time;
|
|
||||||
let height = earliest_block_height + i + 1;
|
let height = earliest_block_height + i + 1;
|
||||||
i += 1;
|
i += 1;
|
||||||
last_time = time;
|
last_time = n.timestamp;
|
||||||
DiffBlock {
|
DiffBlock {
|
||||||
block_number: height,
|
block_number: height,
|
||||||
difficulty: diff.to_num(),
|
difficulty: n.difficulty.to_num(),
|
||||||
time: time,
|
time: n.timestamp,
|
||||||
duration: dur,
|
duration: dur,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -180,25 +178,23 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
let block_diff_sum = sum_entries.iter().fold(0, |sum, d| sum + d.difficulty);
|
let block_diff_sum = sum_entries.iter().fold(0, |sum, d| sum + d.difficulty);
|
||||||
|
|
||||||
i = 1;
|
i = 1;
|
||||||
last_time = last_blocks[0].clone().unwrap().0;
|
last_time = last_blocks[0].clone().timestamp;
|
||||||
|
|
||||||
let diff_entries: Vec<DiffBlock> = last_blocks
|
let diff_entries: Vec<DiffBlock> = last_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.map(|n| {
|
.map(|n| {
|
||||||
let (time, diff) = n.clone().unwrap();
|
let dur = n.timestamp - last_time;
|
||||||
let dur = time - last_time;
|
|
||||||
let height = earliest_block_height + i;
|
let height = earliest_block_height + i;
|
||||||
i += 1;
|
i += 1;
|
||||||
last_time = time;
|
last_time = n.timestamp;
|
||||||
DiffBlock {
|
DiffBlock {
|
||||||
block_number: height,
|
block_number: height,
|
||||||
difficulty: diff.to_num(),
|
difficulty: n.difficulty.to_num(),
|
||||||
time: time,
|
time: n.timestamp,
|
||||||
duration: dur,
|
duration: dur,
|
||||||
}
|
}
|
||||||
})
|
}).collect();
|
||||||
.collect();
|
|
||||||
|
|
||||||
DiffStats {
|
DiffStats {
|
||||||
height: tip_height as u64,
|
height: tip_height as u64,
|
||||||
|
@ -218,26 +214,28 @@ fn get_diff_stats(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) -> Di
|
||||||
// from the difficulty adjustment at interval seconds from the previous block
|
// from the difficulty adjustment at interval seconds from the previous block
|
||||||
fn add_block(
|
fn add_block(
|
||||||
interval: u64,
|
interval: u64,
|
||||||
chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)>,
|
chain_sim: Vec<(HeaderInfo, DiffStats)>,
|
||||||
) -> Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)> {
|
) -> Vec<(HeaderInfo, DiffStats)> {
|
||||||
let mut ret_chain_sim = chain_sim.clone();
|
let mut ret_chain_sim = chain_sim.clone();
|
||||||
let mut return_chain: Vec<(Result<(u64, Difficulty), TargetError>)> =
|
let mut return_chain: Vec<HeaderInfo> = chain_sim.clone().iter().map(|e| e.0.clone()).collect();
|
||||||
chain_sim.clone().iter().map(|e| e.0.clone()).collect();
|
|
||||||
// get last interval
|
// get last interval
|
||||||
let diff = next_difficulty(return_chain.clone()).unwrap();
|
let diff = next_difficulty(1, return_chain.clone());
|
||||||
let last_elem = chain_sim.first().as_ref().unwrap().0.as_ref().unwrap();
|
let last_elem = chain_sim.first().unwrap().clone().0;
|
||||||
let time = last_elem.0 + interval;
|
let time = last_elem.timestamp + interval;
|
||||||
return_chain.insert(0, Ok((time, diff)));
|
return_chain.insert(0, HeaderInfo::from_ts_diff(time, diff.difficulty));
|
||||||
let diff_stats = get_diff_stats(&return_chain);
|
let diff_stats = get_diff_stats(&return_chain);
|
||||||
ret_chain_sim.insert(0, (Ok((time, diff)), diff_stats));
|
ret_chain_sim.insert(
|
||||||
|
0,
|
||||||
|
(HeaderInfo::from_ts_diff(time, diff.difficulty), diff_stats),
|
||||||
|
);
|
||||||
ret_chain_sim
|
ret_chain_sim
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds many defined blocks
|
// Adds many defined blocks
|
||||||
fn add_blocks(
|
fn add_blocks(
|
||||||
intervals: Vec<u64>,
|
intervals: Vec<u64>,
|
||||||
chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)>,
|
chain_sim: Vec<(HeaderInfo, DiffStats)>,
|
||||||
) -> Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)> {
|
) -> Vec<(HeaderInfo, DiffStats)> {
|
||||||
let mut return_chain = chain_sim.clone();
|
let mut return_chain = chain_sim.clone();
|
||||||
for i in intervals {
|
for i in intervals {
|
||||||
return_chain = add_block(i, return_chain.clone());
|
return_chain = add_block(i, return_chain.clone());
|
||||||
|
@ -248,9 +246,9 @@ fn add_blocks(
|
||||||
// Adds another n 'blocks' to the iterator, with difficulty calculated
|
// Adds another n 'blocks' to the iterator, with difficulty calculated
|
||||||
fn add_block_repeated(
|
fn add_block_repeated(
|
||||||
interval: u64,
|
interval: u64,
|
||||||
chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)>,
|
chain_sim: Vec<(HeaderInfo, DiffStats)>,
|
||||||
iterations: usize,
|
iterations: usize,
|
||||||
) -> Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)> {
|
) -> Vec<(HeaderInfo, DiffStats)> {
|
||||||
let mut return_chain = chain_sim.clone();
|
let mut return_chain = chain_sim.clone();
|
||||||
for _ in 0..iterations {
|
for _ in 0..iterations {
|
||||||
return_chain = add_block(interval, return_chain.clone());
|
return_chain = add_block(interval, return_chain.clone());
|
||||||
|
@ -260,7 +258,7 @@ fn add_block_repeated(
|
||||||
|
|
||||||
// Prints the contents of the iterator and its difficulties.. useful for
|
// Prints the contents of the iterator and its difficulties.. useful for
|
||||||
// tweaking
|
// tweaking
|
||||||
fn print_chain_sim(chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), DiffStats)>) {
|
fn print_chain_sim(chain_sim: Vec<(HeaderInfo, DiffStats)>) {
|
||||||
let mut chain_sim = chain_sim.clone();
|
let mut chain_sim = chain_sim.clone();
|
||||||
chain_sim.reverse();
|
chain_sim.reverse();
|
||||||
let mut last_time = 0;
|
let mut last_time = 0;
|
||||||
|
@ -272,18 +270,18 @@ fn print_chain_sim(chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), Dif
|
||||||
println!("UPPER_TIME_BOUND: {}", UPPER_TIME_BOUND);
|
println!("UPPER_TIME_BOUND: {}", UPPER_TIME_BOUND);
|
||||||
println!("DAMP_FACTOR: {}", DAMP_FACTOR);
|
println!("DAMP_FACTOR: {}", DAMP_FACTOR);
|
||||||
chain_sim.iter().enumerate().for_each(|(i, b)| {
|
chain_sim.iter().enumerate().for_each(|(i, b)| {
|
||||||
let block = b.0.as_ref().unwrap();
|
let block = b.0.clone();
|
||||||
let stats = b.1.clone();
|
let stats = b.1.clone();
|
||||||
if first {
|
if first {
|
||||||
last_time = block.0;
|
last_time = block.timestamp;
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
println!(
|
println!(
|
||||||
"Height: {}, Time: {}, Interval: {}, Network difficulty:{}, Average Block Time: {}, Average Difficulty {}, Block Time Sum: {}, Block Diff Sum: {}, Latest Timestamp: {}, Earliest Timestamp: {}, Timestamp Delta: {}",
|
"Height: {}, Time: {}, Interval: {}, Network difficulty:{}, Average Block Time: {}, Average Difficulty {}, Block Time Sum: {}, Block Diff Sum: {}, Latest Timestamp: {}, Earliest Timestamp: {}, Timestamp Delta: {}",
|
||||||
i,
|
i,
|
||||||
block.0,
|
block.timestamp,
|
||||||
block.0 - last_time,
|
block.timestamp - last_time,
|
||||||
block.1,
|
block.difficulty,
|
||||||
stats.average_block_time,
|
stats.average_block_time,
|
||||||
stats.average_difficulty,
|
stats.average_difficulty,
|
||||||
stats.block_time_sum,
|
stats.block_time_sum,
|
||||||
|
@ -297,22 +295,17 @@ fn print_chain_sim(chain_sim: Vec<((Result<(u64, Difficulty), TargetError>), Dif
|
||||||
for i in sb {
|
for i in sb {
|
||||||
println!(" {}", i);
|
println!(" {}", i);
|
||||||
}
|
}
|
||||||
last_time = block.0;
|
last_time = block.timestamp;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repeat_offs(
|
fn repeat_offs(from: u64, interval: u64, diff: u64, len: u64) -> Vec<HeaderInfo> {
|
||||||
from: u64,
|
repeat(
|
||||||
interval: u64,
|
interval,
|
||||||
diff: u64,
|
HeaderInfo::from_ts_diff(1, Difficulty::from_num(diff)),
|
||||||
len: u64,
|
len,
|
||||||
) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
Some(from),
|
||||||
map_vec!(repeat(interval, diff, len, Some(from)), |e| {
|
)
|
||||||
match e.clone() {
|
|
||||||
Err(e) => Err(e),
|
|
||||||
Ok((t, d)) => Ok((t, d)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks different next_target adjustments and difficulty boundaries
|
/// Checks different next_target adjustments and difficulty boundaries
|
||||||
|
@ -415,32 +408,51 @@ fn next_target_adjustment() {
|
||||||
global::set_mining_mode(global::ChainTypes::AutomatedTesting);
|
global::set_mining_mode(global::ChainTypes::AutomatedTesting);
|
||||||
let cur_time = Utc::now().timestamp() as u64;
|
let cur_time = Utc::now().timestamp() as u64;
|
||||||
|
|
||||||
|
let diff_one = Difficulty::one();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(vec![Ok((cur_time, Difficulty::one()))]).unwrap(),
|
next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]),
|
||||||
Difficulty::one()
|
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]),
|
||||||
|
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(60, 1, DIFFICULTY_ADJUST_WINDOW, None)).unwrap(),
|
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
|
||||||
Difficulty::one()
|
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
|
||||||
|
);
|
||||||
|
hi.is_secondary = true;
|
||||||
|
assert_eq!(
|
||||||
|
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
|
||||||
|
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
|
||||||
|
);
|
||||||
|
hi.secondary_scaling = 100;
|
||||||
|
assert_eq!(
|
||||||
|
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
|
||||||
|
HeaderInfo::from_diff_scaling(Difficulty::one(), 93),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check we don't get stuck on difficulty 1
|
// Check we don't get stuck on difficulty 1
|
||||||
|
let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1);
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
next_difficulty(repeat(1, 10, DIFFICULTY_ADJUST_WINDOW, None)).unwrap(),
|
next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty,
|
||||||
Difficulty::one()
|
Difficulty::one()
|
||||||
);
|
);
|
||||||
|
|
||||||
// just enough data, right interval, should stay constant
|
// just enough data, right interval, should stay constant
|
||||||
let just_enough = DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW;
|
let just_enough = DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW;
|
||||||
|
hi.difficulty = Difficulty::from_num(1000);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(60, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(60, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1000)
|
Difficulty::from_num(1000)
|
||||||
);
|
);
|
||||||
|
|
||||||
// checking averaging works
|
// checking averaging works
|
||||||
|
hi.difficulty = Difficulty::from_num(500);
|
||||||
let sec = DIFFICULTY_ADJUST_WINDOW / 2 + MEDIAN_TIME_WINDOW;
|
let sec = DIFFICULTY_ADJUST_WINDOW / 2 + MEDIAN_TIME_WINDOW;
|
||||||
let mut s1 = repeat(60, 500, sec, Some(cur_time));
|
let mut s1 = repeat(60, hi.clone(), sec, Some(cur_time));
|
||||||
let mut s2 = repeat_offs(
|
let mut s2 = repeat_offs(
|
||||||
cur_time + (sec * 60) as u64,
|
cur_time + (sec * 60) as u64,
|
||||||
60,
|
60,
|
||||||
|
@ -448,51 +460,56 @@ fn next_target_adjustment() {
|
||||||
DIFFICULTY_ADJUST_WINDOW / 2,
|
DIFFICULTY_ADJUST_WINDOW / 2,
|
||||||
);
|
);
|
||||||
s2.append(&mut s1);
|
s2.append(&mut s1);
|
||||||
assert_eq!(next_difficulty(s2).unwrap(), Difficulty::from_num(1000));
|
assert_eq!(
|
||||||
|
next_difficulty(1, s2).difficulty,
|
||||||
|
Difficulty::from_num(1000)
|
||||||
|
);
|
||||||
|
|
||||||
// too slow, diff goes down
|
// too slow, diff goes down
|
||||||
|
hi.difficulty = Difficulty::from_num(1000);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(90, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(857)
|
Difficulty::from_num(857)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(120, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(120, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(750)
|
Difficulty::from_num(750)
|
||||||
);
|
);
|
||||||
|
|
||||||
// too fast, diff goes up
|
// too fast, diff goes up
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(55, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(55, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1028)
|
Difficulty::from_num(1028)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(45, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1090)
|
Difficulty::from_num(1090)
|
||||||
);
|
);
|
||||||
|
|
||||||
// hitting lower time bound, should always get the same result below
|
// hitting lower time bound, should always get the same result below
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(0, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1500)
|
Difficulty::from_num(1500)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(0, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1500)
|
Difficulty::from_num(1500)
|
||||||
);
|
);
|
||||||
|
|
||||||
// hitting higher time bound, should always get the same result above
|
// hitting higher time bound, should always get the same result above
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(300, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(300, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(500)
|
Difficulty::from_num(500)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(400, 1000, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(400, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(500)
|
Difficulty::from_num(500)
|
||||||
);
|
);
|
||||||
|
|
||||||
// We should never drop below 1
|
// We should never drop below 1
|
||||||
|
hi.difficulty = Difficulty::zero();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
next_difficulty(repeat(90, 0, just_enough, None)).unwrap(),
|
next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty,
|
||||||
Difficulty::from_num(1)
|
Difficulty::from_num(1)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -502,9 +519,9 @@ fn hard_forks() {
|
||||||
assert!(valid_header_version(0, 1));
|
assert!(valid_header_version(0, 1));
|
||||||
assert!(valid_header_version(10, 1));
|
assert!(valid_header_version(10, 1));
|
||||||
assert!(!valid_header_version(10, 2));
|
assert!(!valid_header_version(10, 2));
|
||||||
assert!(valid_header_version(100_000, 2));
|
assert!(valid_header_version(249_999, 1));
|
||||||
assert!(valid_header_version(249_999, 2));
|
// v2 not active yet
|
||||||
assert!(valid_header_version(250_000, 3));
|
assert!(!valid_header_version(250_000, 2));
|
||||||
assert!(!valid_header_version(250_000, 1));
|
assert!(!valid_header_version(250_000, 1));
|
||||||
assert!(!valid_header_version(500_000, 1));
|
assert!(!valid_header_version(500_000, 1));
|
||||||
assert!(!valid_header_version(250_001, 2));
|
assert!(!valid_header_version(250_001, 2));
|
||||||
|
|
|
@ -459,8 +459,7 @@ fn simple_block() {
|
||||||
&key_id,
|
&key_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
b.validate(&BlindingFactor::zero(), vc.clone())
|
b.validate(&BlindingFactor::zero(), vc.clone()).unwrap();
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -488,8 +487,7 @@ fn test_block_with_timelocked_tx() {
|
||||||
let previous_header = BlockHeader::default();
|
let previous_header = BlockHeader::default();
|
||||||
|
|
||||||
let b = new_block(vec![&tx1], &keychain, &previous_header, &key_id3.clone());
|
let b = new_block(vec![&tx1], &keychain, &previous_header, &key_id3.clone());
|
||||||
b.validate(&BlindingFactor::zero(), vc.clone())
|
b.validate(&BlindingFactor::zero(), vc.clone()).unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// now try adding a timelocked tx where lock height is greater than current
|
// now try adding a timelocked tx where lock height is greater than current
|
||||||
// block height
|
// block height
|
||||||
|
|
|
@ -268,7 +268,7 @@ impl MessageHandler for Protocol {
|
||||||
let mut tmp_zip = BufWriter::new(File::create(file)?);
|
let mut tmp_zip = BufWriter::new(File::create(file)?);
|
||||||
let total_size = sm_arch.bytes as usize;
|
let total_size = sm_arch.bytes as usize;
|
||||||
let mut downloaded_size: usize = 0;
|
let mut downloaded_size: usize = 0;
|
||||||
let mut request_size = 48_000;
|
let mut request_size = cmp::min(48_000, sm_arch.bytes) as usize;
|
||||||
while request_size > 0 {
|
while request_size > 0 {
|
||||||
downloaded_size += msg.copy_attachment(request_size, &mut tmp_zip)?;
|
downloaded_size += msg.copy_attachment(request_size, &mut tmp_zip)?;
|
||||||
request_size = cmp::min(48_000, total_size - downloaded_size);
|
request_size = cmp::min(48_000, total_size - downloaded_size);
|
||||||
|
@ -337,23 +337,23 @@ fn headers_header_size(conn: &mut TcpStream, msg_len: u64) -> Result<u64, Error>
|
||||||
|
|
||||||
// support size of Cuckoo: from Cuckoo 30 to Cuckoo 36, with version 2
|
// support size of Cuckoo: from Cuckoo 30 to Cuckoo 36, with version 2
|
||||||
// having slightly larger headers
|
// having slightly larger headers
|
||||||
let minimum_size = core::serialized_size_of_header(1, global::min_sizeshift());
|
let min_size = core::serialized_size_of_header(1, global::min_sizeshift());
|
||||||
let maximum_size = core::serialized_size_of_header(2, global::min_sizeshift() + 6);
|
let max_size = min_size + 6;
|
||||||
if average_header_size < minimum_size as u64 || average_header_size > maximum_size as u64 {
|
if average_header_size < min_size as u64 || average_header_size > max_size as u64 {
|
||||||
debug!(
|
debug!(
|
||||||
LOGGER,
|
LOGGER,
|
||||||
"headers_header_size - size of Vec: {}, average_header_size: {}, min: {}, max: {}",
|
"headers_header_size - size of Vec: {}, average_header_size: {}, min: {}, max: {}",
|
||||||
total_headers,
|
total_headers,
|
||||||
average_header_size,
|
average_header_size,
|
||||||
minimum_size,
|
min_size,
|
||||||
maximum_size,
|
max_size,
|
||||||
);
|
);
|
||||||
return Err(Error::Connection(io::Error::new(
|
return Err(Error::Connection(io::Error::new(
|
||||||
io::ErrorKind::InvalidData,
|
io::ErrorKind::InvalidData,
|
||||||
"headers_header_size",
|
"headers_header_size",
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
return Ok(maximum_size as u64);
|
return Ok(max_size as u64);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the Headers streaming body from the underlying connection
|
/// Read the Headers streaming body from the underlying connection
|
||||||
|
|
|
@ -166,8 +166,7 @@ impl ServerConfig {
|
||||||
// check [server.p2p_config.capabilities] with 'archive_mode' in [server]
|
// check [server.p2p_config.capabilities] with 'archive_mode' in [server]
|
||||||
if let Some(archive) = self.archive_mode {
|
if let Some(archive) = self.archive_mode {
|
||||||
// note: slog not available before config loaded, only print here.
|
// note: slog not available before config loaded, only print here.
|
||||||
if archive
|
if archive != self
|
||||||
!= self
|
|
||||||
.p2p_config
|
.p2p_config
|
||||||
.capabilities
|
.capabilities
|
||||||
.contains(p2p::Capabilities::FULL_HIST)
|
.contains(p2p::Capabilities::FULL_HIST)
|
||||||
|
|
|
@ -30,7 +30,6 @@ use common::stats::{DiffBlock, DiffStats, PeerStats, ServerStateInfo, ServerStat
|
||||||
use common::types::{Error, ServerConfig, StratumServerConfig, SyncState};
|
use common::types::{Error, ServerConfig, StratumServerConfig, SyncState};
|
||||||
use core::core::hash::Hashed;
|
use core::core::hash::Hashed;
|
||||||
use core::core::verifier_cache::{LruVerifierCache, VerifierCache};
|
use core::core::verifier_cache::{LruVerifierCache, VerifierCache};
|
||||||
use core::pow::Difficulty;
|
|
||||||
use core::{consensus, genesis, global, pow};
|
use core::{consensus, genesis, global, pow};
|
||||||
use grin::{dandelion_monitor, seed, sync};
|
use grin::{dandelion_monitor, seed, sync};
|
||||||
use mining::stratumserver;
|
use mining::stratumserver;
|
||||||
|
@ -397,14 +396,14 @@ impl Server {
|
||||||
// code clean. This may be handy for testing but not really needed
|
// code clean. This may be handy for testing but not really needed
|
||||||
// for release
|
// for release
|
||||||
let diff_stats = {
|
let diff_stats = {
|
||||||
let last_blocks: Vec<Result<(u64, Difficulty), consensus::TargetError>> =
|
let last_blocks: Vec<consensus::HeaderInfo> =
|
||||||
global::difficulty_data_to_vector(self.chain.difficulty_iter())
|
global::difficulty_data_to_vector(self.chain.difficulty_iter())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.skip(consensus::MEDIAN_TIME_WINDOW as usize)
|
.skip(consensus::MEDIAN_TIME_WINDOW as usize)
|
||||||
.take(consensus::DIFFICULTY_ADJUST_WINDOW as usize)
|
.take(consensus::DIFFICULTY_ADJUST_WINDOW as usize)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut last_time = last_blocks[0].clone().unwrap().0;
|
let mut last_time = last_blocks[0].timestamp;
|
||||||
let tip_height = self.chain.head().unwrap().height as i64;
|
let tip_height = self.chain.head().unwrap().height as i64;
|
||||||
let earliest_block_height = tip_height as i64 - last_blocks.len() as i64;
|
let earliest_block_height = tip_height as i64 - last_blocks.len() as i64;
|
||||||
|
|
||||||
|
@ -414,15 +413,14 @@ impl Server {
|
||||||
.iter()
|
.iter()
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.map(|n| {
|
.map(|n| {
|
||||||
let (time, diff) = n.clone().unwrap();
|
let dur = n.timestamp - last_time;
|
||||||
let dur = time - last_time;
|
|
||||||
let height = earliest_block_height + i + 1;
|
let height = earliest_block_height + i + 1;
|
||||||
i += 1;
|
i += 1;
|
||||||
last_time = time;
|
last_time = n.timestamp;
|
||||||
DiffBlock {
|
DiffBlock {
|
||||||
block_number: height,
|
block_number: height,
|
||||||
difficulty: diff.to_num(),
|
difficulty: n.difficulty.to_num(),
|
||||||
time: time,
|
time: n.timestamp,
|
||||||
duration: dur,
|
duration: dur,
|
||||||
}
|
}
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
|
@ -185,7 +185,7 @@ fn needs_syncing(
|
||||||
// sum the last 5 difficulties to give us the threshold
|
// sum the last 5 difficulties to give us the threshold
|
||||||
let threshold = chain
|
let threshold = chain
|
||||||
.difficulty_iter()
|
.difficulty_iter()
|
||||||
.filter_map(|x| x.map(|(_, x)| x).ok())
|
.map(|x| x.difficulty)
|
||||||
.take(5)
|
.take(5)
|
||||||
.fold(Difficulty::zero(), |sum, val| sum + val);
|
.fold(Difficulty::zero(), |sum, val| sum + val);
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ fn build_block(
|
||||||
|
|
||||||
// Determine the difficulty our block should be at.
|
// Determine the difficulty our block should be at.
|
||||||
// Note: do not keep the difficulty_iter in scope (it has an active batch).
|
// Note: do not keep the difficulty_iter in scope (it has an active batch).
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let difficulty = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
|
|
||||||
// extract current transaction from the pool
|
// extract current transaction from the pool
|
||||||
// TODO - we have a lot of unwrap() going on in this fn...
|
// TODO - we have a lot of unwrap() going on in this fn...
|
||||||
|
@ -126,13 +126,14 @@ fn build_block(
|
||||||
};
|
};
|
||||||
|
|
||||||
let (output, kernel, block_fees) = get_coinbase(wallet_listener_url, block_fees)?;
|
let (output, kernel, block_fees) = get_coinbase(wallet_listener_url, block_fees)?;
|
||||||
let mut b = core::Block::with_reward(&head, txs, output, kernel, difficulty.clone())?;
|
let mut b = core::Block::with_reward(&head, txs, output, kernel, difficulty.difficulty)?;
|
||||||
|
|
||||||
// making sure we're not spending time mining a useless block
|
// making sure we're not spending time mining a useless block
|
||||||
b.validate(&head.total_kernel_offset, verifier_cache)?;
|
b.validate(&head.total_kernel_offset, verifier_cache)?;
|
||||||
|
|
||||||
b.header.pow.nonce = thread_rng().gen();
|
b.header.pow.nonce = thread_rng().gen();
|
||||||
b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc);;
|
b.header.pow.scaling_difficulty = difficulty.secondary_scaling;
|
||||||
|
b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc);
|
||||||
|
|
||||||
let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num();
|
let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num();
|
||||||
debug!(
|
debug!(
|
||||||
|
|
|
@ -85,7 +85,7 @@ fn get_outputs_by_pmmr_index_local(
|
||||||
/// Adds a block with a given reward to the chain and mines it
|
/// Adds a block with a given reward to the chain and mines it
|
||||||
pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbData) {
|
pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbData) {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
||||||
let out_bin = util::from_hex(reward.output).unwrap();
|
let out_bin = util::from_hex(reward.output).unwrap();
|
||||||
let kern_bin = util::from_hex(reward.kernel).unwrap();
|
let kern_bin = util::from_hex(reward.kernel).unwrap();
|
||||||
let output = ser::deserialize(&mut &out_bin[..]).unwrap();
|
let output = ser::deserialize(&mut &out_bin[..]).unwrap();
|
||||||
|
@ -93,14 +93,14 @@ pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbDa
|
||||||
let mut b = core::core::Block::new(
|
let mut b = core::core::Block::new(
|
||||||
&prev,
|
&prev,
|
||||||
txs.into_iter().cloned().collect(),
|
txs.into_iter().cloned().collect(),
|
||||||
difficulty.clone(),
|
next_header_info.clone().difficulty,
|
||||||
(output, kernel),
|
(output, kernel),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut b.header,
|
&mut b.header,
|
||||||
difficulty,
|
next_header_info.difficulty,
|
||||||
global::proofsize(),
|
global::proofsize(),
|
||||||
global::min_sizeshift(),
|
global::min_sizeshift(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
|
@ -180,9 +180,9 @@ where
|
||||||
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper"),
|
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let tx_bin = util::from_hex(wrapper.tx_hex).context(libwallet::ErrorKind::ClientCallback(
|
let tx_bin = util::from_hex(wrapper.tx_hex).context(
|
||||||
"Error parsing TxWrapper: tx_bin",
|
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper: tx_bin"),
|
||||||
))?;
|
)?;
|
||||||
|
|
||||||
let tx: Transaction = ser::deserialize(&mut &tx_bin[..]).context(
|
let tx: Transaction = ser::deserialize(&mut &tx_bin[..]).context(
|
||||||
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper: tx"),
|
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper: tx"),
|
||||||
|
|
Loading…
Reference in a new issue