improve minimum difficulty handling (#1791)

* rename MIN_DIFFICULTY to UNIT_DIFFICULTY; define Difficulty::unit, fix INITIAL_DIFFICULTY
* improve minimum difficulty handling
* replace all Difficulty::one by ::min
* revert secondary scaling default in HeaderInfo; rename scaling_difficulty; refactor difficulty_data_to_vector
This commit is contained in:
Ignotus Peverell 2018-10-27 10:37:44 -07:00 committed by GitHub
commit 46051ee174
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 125 additions and 152 deletions

View file

@ -509,8 +509,8 @@ pub struct BlockHeaderPrintable {
pub cuckoo_solution: Vec<u64>, pub cuckoo_solution: Vec<u64>,
/// Total accumulated difficulty since genesis block /// Total accumulated difficulty since genesis block
pub total_difficulty: u64, pub total_difficulty: u64,
/// Difficulty scaling factor between the different proofs of work /// Variable difficulty scaling factor for secondary proof of work
pub scaling_difficulty: u32, pub secondary_scaling: u32,
/// Total kernel offset since genesis block /// Total kernel offset since genesis block
pub total_kernel_offset: String, pub total_kernel_offset: String,
} }
@ -531,7 +531,7 @@ impl BlockHeaderPrintable {
edge_bits: h.pow.edge_bits(), edge_bits: h.pow.edge_bits(),
cuckoo_solution: h.pow.proof.nonces.clone(), cuckoo_solution: h.pow.proof.nonces.clone(),
total_difficulty: h.pow.total_difficulty.to_num(), total_difficulty: h.pow.total_difficulty.to_num(),
scaling_difficulty: h.pow.scaling_difficulty, secondary_scaling: h.pow.secondary_scaling,
total_kernel_offset: h.total_kernel_offset.to_hex(), total_kernel_offset: h.total_kernel_offset.to_hex(),
} }
} }

View file

@ -436,7 +436,12 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
return Err(ErrorKind::WrongTotalDifficulty.into()); return Err(ErrorKind::WrongTotalDifficulty.into());
} }
// check the secondary PoW scaling factor if applicable // check the secondary PoW scaling factor if applicable
if header.pow.scaling_difficulty != next_header_info.secondary_scaling { if header.pow.secondary_scaling != next_header_info.secondary_scaling {
info!(
"validate_header: header secondary scaling {} != {}",
header.pow.secondary_scaling,
next_header_info.secondary_scaling
);
return Err(ErrorKind::InvalidScaling.into()); return Err(ErrorKind::InvalidScaling.into());
} }
} }

View file

@ -651,7 +651,7 @@ impl<'a> Iterator for DifficultyIter<'a> {
.clone() .clone()
.map_or(Difficulty::zero(), |x| x.total_difficulty()); .map_or(Difficulty::zero(), |x| x.total_difficulty());
let difficulty = header.total_difficulty() - prev_difficulty; let difficulty = header.total_difficulty() - prev_difficulty;
let scaling = header.pow.scaling_difficulty; let scaling = header.pow.secondary_scaling;
Some(HeaderInfo::new( Some(HeaderInfo::new(
header.timestamp.timestamp() as u64, header.timestamp.timestamp() as u64,

View file

@ -70,7 +70,7 @@ impl Tip {
height: 0, height: 0,
last_block_h: gbh, last_block_h: gbh,
prev_block_h: gbh, prev_block_h: gbh,
total_difficulty: Difficulty::one(), total_difficulty: Difficulty::min(),
} }
} }

View file

@ -90,7 +90,7 @@ fn data_files() {
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
.unwrap(); .unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60); b.header.timestamp = prev.timestamp + Duration::seconds(60);
b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut b, false).unwrap(); chain.set_txhashset_roots(&mut b, false).unwrap();

View file

@ -72,7 +72,7 @@ fn mine_empty_chain() {
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
.unwrap(); .unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60); b.header.timestamp = prev.timestamp + Duration::seconds(60);
b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut b, false).unwrap(); chain.set_txhashset_roots(&mut b, false).unwrap();
@ -396,7 +396,7 @@ fn output_header_mappings() {
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
.unwrap(); .unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60); b.header.timestamp = prev.timestamp + Duration::seconds(60);
b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut b, false).unwrap(); chain.set_txhashset_roots(&mut b, false).unwrap();

View file

@ -53,7 +53,7 @@ fn test_various_store_indices() {
let genesis = pow::mine_genesis_block().unwrap(); let genesis = pow::mine_genesis_block().unwrap();
let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap();
let block = Block::new(&genesis.header, vec![], Difficulty::one(), reward).unwrap(); let block = Block::new(&genesis.header, vec![], Difficulty::min(), reward).unwrap();
let block_hash = block.hash(); let block_hash = block.hash();
{ {

View file

@ -72,9 +72,9 @@ fn test_coinbase_maturity() {
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block, false).unwrap(); chain.set_txhashset_roots(&mut block, false).unwrap();
@ -118,10 +118,10 @@ fn test_coinbase_maturity() {
let txs = vec![coinbase_txn.clone()]; let txs = vec![coinbase_txn.clone()];
let fees = txs.iter().map(|tx| tx.fee()).sum(); let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id3, fees, prev.height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id3, fees, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block, false).unwrap(); chain.set_txhashset_roots(&mut block, false).unwrap();
@ -151,10 +151,10 @@ fn test_coinbase_maturity() {
let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap(); let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block, false).unwrap(); chain.set_txhashset_roots(&mut block, false).unwrap();
@ -178,10 +178,10 @@ fn test_coinbase_maturity() {
let fees = txs.iter().map(|tx| tx.fee()).sum(); let fees = txs.iter().map(|tx| tx.fee()).sum();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
let reward = libtx::reward::output(&keychain, &key_id4, fees, prev.height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id4, fees, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut block, false).unwrap(); chain.set_txhashset_roots(&mut block, false).unwrap();

View file

@ -50,7 +50,7 @@ fn block() -> Block {
let reward = reward::output(&keychain, &key_id, 0, header.height).unwrap(); let reward = reward::output(&keychain, &key_id, 0, header.height).unwrap();
Block::new(&header, txs, Difficulty::one(), reward).unwrap() Block::new(&header, txs, Difficulty::min(), reward).unwrap()
} }
fn compact_block() -> CompactBlock { fn compact_block() -> CompactBlock {

View file

@ -158,18 +158,18 @@ pub fn graph_weight(edge_bits: u8) -> u64 {
(2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64) (2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64)
} }
/// minimum possible difficulty equal to graph_weight(SECOND_POW_EDGE_BITS) /// minimum difficulty to avoid getting stuck when trying to increase subject to dampening
pub const MIN_DIFFICULTY: u64 = pub const MIN_DIFFICULTY: u64 = DAMP_FACTOR;
/// unit difficulty, equal to graph_weight(SECOND_POW_EDGE_BITS)
pub const UNIT_DIFFICULTY: u64 =
((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64); ((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64);
/// The initial difficulty at launch. This should be over-estimated /// The initial difficulty at launch. This should be over-estimated
/// and difficulty should come down at launch rather than up /// and difficulty should come down at launch rather than up
/// Currently grossly over-estimated at 10% of current /// Currently grossly over-estimated at 10% of current
/// ethereum GPUs (assuming 1GPU can solve a block at diff 1 in one block interval) /// ethereum GPUs (assuming 1GPU can solve a block at diff 1 in one block interval)
/// FOR MAINNET, use pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * UNIT_DIFFICULTY;
/// pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * MIN_DIFFICULTY;
/// Pick MUCH more modest value for TESTNET4:
pub const INITIAL_DIFFICULTY: u64 = 1_000 * MIN_DIFFICULTY;
/// Consensus errors /// Consensus errors
#[derive(Clone, Debug, Eq, PartialEq, Fail)] #[derive(Clone, Debug, Eq, PartialEq, Fail)]
@ -237,12 +237,12 @@ impl HeaderInfo {
} }
} }
/// TODO: Doc /// Move value linearly toward a goal
pub fn damp(actual: u64, goal: u64, damp_factor: u64) -> u64 { pub fn damp(actual: u64, goal: u64, damp_factor: u64) -> u64 {
(1 * actual + (damp_factor - 1) * goal) / damp_factor (1 * actual + (damp_factor - 1) * goal) / damp_factor
} }
/// TODO: Doc /// limit value to be within some factor from a goal
pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 { pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 {
max(goal / clamp_factor, min(actual, goal * clamp_factor)) max(goal / clamp_factor, min(actual, goal * clamp_factor))
} }
@ -290,7 +290,8 @@ where
BLOCK_TIME_WINDOW, BLOCK_TIME_WINDOW,
CLAMP_FACTOR, CLAMP_FACTOR,
); );
let difficulty = max(1, diff_sum * BLOCK_TIME_SEC / adj_ts); // minimum difficulty avoids getting stuck due to dampening
let difficulty = max(MIN_DIFFICULTY, diff_sum * BLOCK_TIME_SEC / adj_ts);
HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling) HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling)
} }
@ -319,7 +320,8 @@ pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 {
); );
let scale = scale_sum * target_pct / adj_count; let scale = scale_sum * target_pct / adj_count;
max(1, min(scale, MAX_SECONDARY_SCALING)) as u32 // minimum difficulty avoids getting stuck due to dampening
max(MIN_DIFFICULTY, min(scale, MAX_SECONDARY_SCALING)) as u32
} }
/// Consensus rule that collections of items are sorted lexicographically. /// Consensus rule that collections of items are sorted lexicographically.

View file

@ -156,7 +156,7 @@ fn fixed_size_of_serialized_header(_version: u16) -> usize {
size += mem::size_of::<u64>(); // output_mmr_size size += mem::size_of::<u64>(); // output_mmr_size
size += mem::size_of::<u64>(); // kernel_mmr_size size += mem::size_of::<u64>(); // kernel_mmr_size
size += mem::size_of::<Difficulty>(); // total_difficulty size += mem::size_of::<Difficulty>(); // total_difficulty
size += mem::size_of::<u32>(); // scaling_difficulty size += mem::size_of::<u32>(); // secondary_scaling
size += mem::size_of::<u64>(); // nonce size += mem::size_of::<u64>(); // nonce
size size
} }

View file

@ -44,8 +44,8 @@ pub fn genesis_testnet1() -> core::Block {
height: 0, height: 0,
timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0), timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0),
pow: ProofOfWork { pow: ProofOfWork {
total_difficulty: Difficulty::one(), total_difficulty: Difficulty::min(),
scaling_difficulty: 1, secondary_scaling: 1,
nonce: 28205, nonce: 28205,
proof: Proof::new(vec![ proof: Proof::new(vec![
0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74, 0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74,
@ -67,7 +67,7 @@ pub fn genesis_testnet2() -> core::Block {
timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0), timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0),
pow: ProofOfWork { pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1, secondary_scaling: 1,
nonce: 1060, nonce: 1060,
proof: Proof::new(vec![ proof: Proof::new(vec![
0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2, 0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2,
@ -90,7 +90,7 @@ pub fn genesis_testnet3() -> core::Block {
timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0), timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0),
pow: ProofOfWork { pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1, secondary_scaling: 1,
nonce: 4956988373127691, nonce: 4956988373127691,
proof: Proof::new(vec![ proof: Proof::new(vec![
0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f, 0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f,
@ -114,7 +114,7 @@ pub fn genesis_testnet4() -> core::Block {
timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0), timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0),
pow: ProofOfWork { pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: global::initial_graph_weight(), secondary_scaling: global::initial_graph_weight(),
nonce: 8612241555342799290, nonce: 8612241555342799290,
proof: Proof::new(vec![ proof: Proof::new(vec![
0x46f3b4, 0x1135f8c, 0x1a1596f, 0x1e10f71, 0x41c03ea, 0x63fe8e7, 0x65af34f, 0x46f3b4, 0x1135f8c, 0x1a1596f, 0x1e10f71, 0x41c03ea, 0x63fe8e7, 0x65af34f,
@ -137,7 +137,7 @@ pub fn genesis_main() -> core::Block {
timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0), timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0),
pow: ProofOfWork { pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1, secondary_scaling: 1,
nonce: global::get_genesis_nonce(), nonce: global::get_genesis_nonce(),
proof: Proof::zero(consensus::PROOFSIZE), proof: Proof::zero(consensus::PROOFSIZE),
}, },

View file

@ -20,6 +20,7 @@ use consensus::HeaderInfo;
use consensus::{ use consensus::{
graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON, graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON,
DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE, SECOND_POW_EDGE_BITS, DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE, SECOND_POW_EDGE_BITS,
UNIT_DIFFICULTY
}; };
use pow::{self, CuckatooContext, EdgeType, PoWContext}; use pow::{self, CuckatooContext, EdgeType, PoWContext};
/// An enum collecting sets of parameters used throughout the /// An enum collecting sets of parameters used throughout the
@ -77,7 +78,7 @@ pub const PEER_EXPIRATION_REMOVE_TIME: i64 = PEER_EXPIRATION_DAYS * 24 * 3600;
/// Testnet 4 initial block difficulty /// Testnet 4 initial block difficulty
/// 1_000 times natural scale factor for cuckatoo29 /// 1_000 times natural scale factor for cuckatoo29
pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * (2 << (29 - 24)) * 29; pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * UNIT_DIFFICULTY;
/// Trigger compaction check on average every day for all nodes. /// Trigger compaction check on average every day for all nodes.
/// Randomized per node - roll the dice on every block to decide. /// Randomized per node - roll the dice on every block to decide.
@ -286,46 +287,25 @@ where
let needed_block_count = DIFFICULTY_ADJUST_WINDOW as usize + 1; let needed_block_count = DIFFICULTY_ADJUST_WINDOW as usize + 1;
let mut last_n: Vec<HeaderInfo> = cursor.into_iter().take(needed_block_count).collect(); let mut last_n: Vec<HeaderInfo> = cursor.into_iter().take(needed_block_count).collect();
// Sort blocks from earliest to latest (to keep conceptually easier)
last_n.reverse();
// Only needed just after blockchain launch... basically ensures there's // Only needed just after blockchain launch... basically ensures there's
// always enough data by simulating perfectly timed pre-genesis // always enough data by simulating perfectly timed pre-genesis
// blocks at the genesis difficulty as needed. // blocks at the genesis difficulty as needed.
let block_count_difference = needed_block_count - last_n.len(); let n = last_n.len();
if block_count_difference > 0 { if needed_block_count > n {
// Collect any real data we have let last_ts_delta = if n > 1 {
let mut live_intervals: Vec<HeaderInfo> = last_n last_n[0].timestamp - last_n[1].timestamp
.iter()
.map(|b| HeaderInfo::from_ts_diff(b.timestamp, b.difficulty))
.collect();
for i in (1..live_intervals.len()).rev() {
// prevents issues with very fast automated test chains
if live_intervals[i - 1].timestamp > live_intervals[i].timestamp {
live_intervals[i].timestamp = 0;
} else { } else {
live_intervals[i].timestamp -= live_intervals[i - 1].timestamp; BLOCK_TIME_SEC
}
}
// Remove genesis "interval"
if live_intervals.len() > 1 {
live_intervals.remove(0);
} else {
//if it's just genesis, adjust the interval
live_intervals[0].timestamp = BLOCK_TIME_SEC;
}
let mut interval_index = live_intervals.len() - 1;
let mut last_ts = last_n.first().unwrap().timestamp;
let last_diff = live_intervals[live_intervals.len() - 1].difficulty;
// fill in simulated blocks with values from the previous real block
for _ in 0..block_count_difference {
last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp);
last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff));
interval_index = match interval_index {
0 => live_intervals.len() - 1,
_ => interval_index - 1,
}; };
let last_diff = last_n[0].difficulty;
// fill in simulated blocks with values from the previous real block
let mut last_ts = last_n.last().unwrap().timestamp;
for _ in n..needed_block_count {
last_ts = last_ts.saturating_sub(last_ts_delta);
last_n.push(HeaderInfo::from_ts_diff(last_ts, last_diff.clone()));
} }
} }
last_n.reverse();
last_n last_n
} }

View file

@ -141,12 +141,12 @@ mod test {
b.header.pow.nonce = 485; b.header.pow.nonce = 485;
pow_size( pow_size(
&mut b.header, &mut b.header,
Difficulty::one(), Difficulty::min(),
global::proofsize(), global::proofsize(),
global::min_edge_bits(), global::min_edge_bits(),
).unwrap(); ).unwrap();
assert!(b.header.pow.nonce != 310); assert!(b.header.pow.nonce != 310);
assert!(b.header.pow.to_difficulty() >= Difficulty::one()); assert!(b.header.pow.to_difficulty() >= Difficulty::min());
assert!(verify_size(&b.header, global::min_edge_bits()).is_ok()); assert!(verify_size(&b.header, global::min_edge_bits()).is_ok());
} }
} }

View file

@ -21,7 +21,7 @@ use std::{fmt, iter};
use rand::{thread_rng, Rng}; use rand::{thread_rng, Rng};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use consensus::{graph_weight, SECOND_POW_EDGE_BITS}; use consensus::{graph_weight, SECOND_POW_EDGE_BITS, MIN_DIFFICULTY};
use core::hash::Hashed; use core::hash::Hashed;
use global; use global;
use ser::{self, Readable, Reader, Writeable, Writer}; use ser::{self, Readable, Reader, Writeable, Writer};
@ -64,10 +64,14 @@ impl Difficulty {
Difficulty { num: 0 } Difficulty { num: 0 }
} }
/// Difficulty of one, which is the minimum difficulty /// Difficulty of MIN_DIFFICULTY
/// (when the hash equals the max target) pub fn min() -> Difficulty {
pub fn one() -> Difficulty { Difficulty { num: MIN_DIFFICULTY }
Difficulty { num: 1 } }
/// Difficulty unit, which is the graph weight of minimal graph
pub fn unit() -> Difficulty {
Difficulty { num: global::initial_graph_weight() as u64 }
} }
/// Convert a `u32` into a `Difficulty` /// Convert a `u32` into a `Difficulty`
@ -209,8 +213,8 @@ impl<'de> de::Visitor<'de> for DiffVisitor {
pub struct ProofOfWork { pub struct ProofOfWork {
/// Total accumulated difficulty since genesis block /// Total accumulated difficulty since genesis block
pub total_difficulty: Difficulty, pub total_difficulty: Difficulty,
/// Difficulty scaling factor between the different proofs of work /// Variable difficulty scaling factor fo secondary proof of work
pub scaling_difficulty: u32, pub secondary_scaling: u32,
/// Nonce increment used to mine this block. /// Nonce increment used to mine this block.
pub nonce: u64, pub nonce: u64,
/// Proof of work data. /// Proof of work data.
@ -221,8 +225,8 @@ impl Default for ProofOfWork {
fn default() -> ProofOfWork { fn default() -> ProofOfWork {
let proof_size = global::proofsize(); let proof_size = global::proofsize();
ProofOfWork { ProofOfWork {
total_difficulty: Difficulty::one(), total_difficulty: Difficulty::min(),
scaling_difficulty: 1, secondary_scaling: 1,
nonce: 0, nonce: 0,
proof: Proof::zero(proof_size), proof: Proof::zero(proof_size),
} }
@ -233,12 +237,12 @@ impl ProofOfWork {
/// Read implementation, can't define as trait impl as we need a version /// Read implementation, can't define as trait impl as we need a version
pub fn read(_ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> { pub fn read(_ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
let total_difficulty = Difficulty::read(reader)?; let total_difficulty = Difficulty::read(reader)?;
let scaling_difficulty = reader.read_u32()?; let secondary_scaling = reader.read_u32()?;
let nonce = reader.read_u64()?; let nonce = reader.read_u64()?;
let proof = Proof::read(reader)?; let proof = Proof::read(reader)?;
Ok(ProofOfWork { Ok(ProofOfWork {
total_difficulty, total_difficulty,
scaling_difficulty, secondary_scaling,
nonce, nonce,
proof, proof,
}) })
@ -260,7 +264,7 @@ impl ProofOfWork {
ser_multiwrite!( ser_multiwrite!(
writer, writer,
[write_u64, self.total_difficulty.to_num()], [write_u64, self.total_difficulty.to_num()],
[write_u32, self.scaling_difficulty] [write_u32, self.secondary_scaling]
); );
Ok(()) Ok(())
} }
@ -270,7 +274,7 @@ impl ProofOfWork {
// 2 proof of works, Cuckoo29 (for now) and Cuckoo30+, which are scaled // 2 proof of works, Cuckoo29 (for now) and Cuckoo30+, which are scaled
// differently (scaling not controlled for now) // differently (scaling not controlled for now)
if self.proof.edge_bits == SECOND_POW_EDGE_BITS { if self.proof.edge_bits == SECOND_POW_EDGE_BITS {
Difficulty::from_proof_scaled(&self.proof, self.scaling_difficulty) Difficulty::from_proof_scaled(&self.proof, self.secondary_scaling)
} else { } else {
Difficulty::from_proof_adjusted(&self.proof) Difficulty::from_proof_adjusted(&self.proof)
} }

View file

@ -92,7 +92,7 @@ where
Block::new( Block::new(
&previous_header, &previous_header,
txs.into_iter().cloned().collect(), txs.into_iter().cloned().collect(),
Difficulty::one(), Difficulty::min(),
reward_output, reward_output,
).unwrap() ).unwrap()
} }

View file

@ -381,55 +381,38 @@ fn adjustment_scenarios() {
fn next_target_adjustment() { fn next_target_adjustment() {
global::set_mining_mode(global::ChainTypes::AutomatedTesting); global::set_mining_mode(global::ChainTypes::AutomatedTesting);
let cur_time = Utc::now().timestamp() as u64; let cur_time = Utc::now().timestamp() as u64;
let diff_min = Difficulty::min();
let diff_one = Difficulty::one(); // Check we don't get stuck on difficulty <= MIN_DIFFICULTY (at 4x faster blocks at least)
assert_eq!( let mut hi = HeaderInfo::from_diff_scaling(diff_min, MIN_DIFFICULTY as u32);
next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]), hi.is_secondary = false;
HeaderInfo::from_diff_scaling(Difficulty::one(), 1), let hinext = next_difficulty(1, repeat(15, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None));
); assert_ne!(hinext.difficulty, diff_min);
assert_eq!(
next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]),
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
);
let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1); // Check we don't get stuck on scale MIN_DIFFICULTY, when primary frequency is too high
assert_eq!( assert_ne!(hinext.secondary_scaling, MIN_DIFFICULTY as u32);
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
);
hi.is_secondary = true;
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(Difficulty::one(), 1),
);
hi.secondary_scaling = 100;
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(Difficulty::one(), 96),
);
// Check we don't get stuck on difficulty 1
let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1);
assert_ne!(
next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty,
Difficulty::one()
);
// just enough data, right interval, should stay constant // just enough data, right interval, should stay constant
let just_enough = DIFFICULTY_ADJUST_WINDOW + 1; let just_enough = DIFFICULTY_ADJUST_WINDOW + 1;
hi.difficulty = Difficulty::from_num(1000); hi.difficulty = Difficulty::from_num(10000);
assert_eq!( assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), just_enough, None)).difficulty, next_difficulty(1, repeat(BLOCK_TIME_SEC, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1000) Difficulty::from_num(10000)
);
// check pre difficulty_data_to_vector effect on retargetting
assert_eq!(
next_difficulty(1, vec![HeaderInfo::from_ts_diff(42, hi.difficulty)]).difficulty,
Difficulty::from_num(14913)
); );
// checking averaging works // checking averaging works
hi.difficulty = Difficulty::from_num(500); hi.difficulty = Difficulty::from_num(500);
let sec = DIFFICULTY_ADJUST_WINDOW / 2; let sec = DIFFICULTY_ADJUST_WINDOW / 2;
let mut s1 = repeat(60, hi.clone(), sec, Some(cur_time)); let mut s1 = repeat(BLOCK_TIME_SEC, hi.clone(), sec, Some(cur_time));
let mut s2 = repeat_offs( let mut s2 = repeat_offs(
cur_time + (sec * 60) as u64, cur_time + (sec * BLOCK_TIME_SEC) as u64,
60, BLOCK_TIME_SEC,
1500, 1500,
DIFFICULTY_ADJUST_WINDOW / 2, DIFFICULTY_ADJUST_WINDOW / 2,
); );
@ -459,16 +442,16 @@ fn next_target_adjustment() {
next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty, next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1090) Difficulty::from_num(1090)
); );
assert_eq!(
next_difficulty(1, repeat(30, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1200)
);
// hitting lower time bound, should always get the same result below // hitting lower time bound, should always get the same result below
assert_eq!( assert_eq!(
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty, next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1500) Difficulty::from_num(1500)
); );
assert_eq!(
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1500)
);
// hitting higher time bound, should always get the same result above // hitting higher time bound, should always get the same result above
assert_eq!( assert_eq!(
@ -480,11 +463,11 @@ fn next_target_adjustment() {
Difficulty::from_num(500) Difficulty::from_num(500)
); );
// We should never drop below 1 // We should never drop below minimum
hi.difficulty = Difficulty::zero(); hi.difficulty = Difficulty::zero();
assert_eq!( assert_eq!(
next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty, next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1) Difficulty::min()
); );
} }
@ -495,6 +478,7 @@ fn secondary_pow_scale() {
// all primary, factor should increase so it becomes easier to find a high // all primary, factor should increase so it becomes easier to find a high
// difficulty block // difficulty block
hi.is_secondary = false;
assert_eq!( assert_eq!(
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()), secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::<Vec<_>>()),
147 147
@ -514,17 +498,15 @@ fn secondary_pow_scale() {
49 49
); );
// same as above, testing lowest bound // same as above, testing lowest bound
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3); let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32);
low_hi.is_secondary = true; low_hi.is_secondary = true;
assert_eq!( assert_eq!(
secondary_pow_scaling( secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect::<Vec<_>>()),
890_000, MIN_DIFFICULTY as u32
&(0..window).map(|_| low_hi.clone()).collect::<Vec<_>>()
),
1
); );
// just about the right ratio, also no longer playing with median // just about the right ratio, also no longer playing with median
let primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50); let mut primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50);
primary_hi.is_secondary = false;
assert_eq!( assert_eq!(
secondary_pow_scaling( secondary_pow_scaling(
1, 1,
@ -535,7 +517,7 @@ fn secondary_pow_scale() {
), ),
94 94
); );
// 95% secondary, should come down based on 100 median // 95% secondary, should come down based on 97.5 average
assert_eq!( assert_eq!(
secondary_pow_scaling( secondary_pow_scaling(
1, 1,
@ -546,7 +528,7 @@ fn secondary_pow_scale() {
), ),
94 94
); );
// 40% secondary, should come up based on 50 median // 40% secondary, should come up based on 70 average
assert_eq!( assert_eq!(
secondary_pow_scaling( secondary_pow_scaling(
1, 1,

View file

@ -82,7 +82,7 @@ Optionally return results as "compact blocks" by passing `?compact` query.
| - edge_bits | number | Size of the cuckoo graph (2_log of number of edges) | | - edge_bits | number | Size of the cuckoo graph (2_log of number of edges) |
| - cuckoo_solution | []number | The Cuckoo solution for this block | | - cuckoo_solution | []number | The Cuckoo solution for this block |
| - total_difficulty | number | Total accumulated difficulty since genesis block | | - total_difficulty | number | Total accumulated difficulty since genesis block |
| - scaling_difficulty | number | Difficulty scaling factor between the different proofs of work | | - secondary_scaling | number | Variable difficulty scaling factor for secondary proof of work |
| - total_kernel_offset | string | Total kernel offset since genesis block | | - total_kernel_offset | string | Total kernel offset since genesis block |
| inputs | []string | Input transactions | | inputs | []string | Input transactions |
| outputs | []object | Outputs transactions | | outputs | []object | Outputs transactions |

View file

@ -195,7 +195,7 @@ pub struct DummyAdapter {}
impl ChainAdapter for DummyAdapter { impl ChainAdapter for DummyAdapter {
fn total_difficulty(&self) -> Difficulty { fn total_difficulty(&self) -> Difficulty {
Difficulty::one() Difficulty::min()
} }
fn total_height(&self) -> u64 { fn total_height(&self) -> u64 {
0 0

View file

@ -73,7 +73,7 @@ fn peer_handshake() {
let mut peer = Peer::connect( let mut peer = Peer::connect(
&mut socket, &mut socket,
p2p::Capabilities::UNKNOWN, p2p::Capabilities::UNKNOWN,
Difficulty::one(), Difficulty::min(),
my_addr, my_addr,
&p2p::handshake::Handshake::new(Hash::from_vec(&vec![]), p2p_config.clone()), &p2p::handshake::Handshake::new(Hash::from_vec(&vec![]), p2p_config.clone()),
net_adapter, net_adapter,
@ -84,10 +84,10 @@ fn peer_handshake() {
peer.start(socket); peer.start(socket);
thread::sleep(time::Duration::from_secs(1)); thread::sleep(time::Duration::from_secs(1));
peer.send_ping(Difficulty::one(), 0).unwrap(); peer.send_ping(Difficulty::min(), 0).unwrap();
thread::sleep(time::Duration::from_secs(1)); thread::sleep(time::Duration::from_secs(1));
let server_peer = server.peers.get_connected_peer(&my_addr).unwrap(); let server_peer = server.peers.get_connected_peer(&my_addr).unwrap();
assert_eq!(server_peer.info.total_difficulty(), Difficulty::one()); assert_eq!(server_peer.info.total_difficulty(), Difficulty::min());
assert!(server.peers.peer_count() > 0); assert!(server.peers.peer_count() > 0);
} }

View file

@ -55,7 +55,7 @@ fn test_transaction_pool_block_building() {
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let fee = txs.iter().map(|x| x.fee()).sum(); let fee = txs.iter().map(|x| x.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap();
let block = Block::new(&prev_header, txs, Difficulty::one(), reward).unwrap(); let block = Block::new(&prev_header, txs, Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);
block.header block.header
@ -116,7 +116,7 @@ fn test_transaction_pool_block_building() {
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let fees = txs.iter().map(|tx| tx.fee()).sum(); let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
Block::new(&header, txs, Difficulty::one(), reward) Block::new(&header, txs, Difficulty::min(), reward)
}.unwrap(); }.unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);

View file

@ -53,7 +53,7 @@ fn test_transaction_pool_block_reconciliation() {
let height = 1; let height = 1;
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
let block = Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); let block = Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);
@ -68,7 +68,7 @@ fn test_transaction_pool_block_reconciliation() {
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let fees = initial_tx.fee(); let fees = initial_tx.fee();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
let block = Block::new(&header, vec![initial_tx], Difficulty::one(), reward).unwrap(); let block = Block::new(&header, vec![initial_tx], Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);
@ -158,7 +158,7 @@ fn test_transaction_pool_block_reconciliation() {
let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let fees = block_txs.iter().map(|tx| tx.fee()).sum(); let fees = block_txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
let block = Block::new(&header, block_txs, Difficulty::one(), reward).unwrap(); let block = Block::new(&header, block_txs, Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);
block block

View file

@ -54,7 +54,7 @@ fn test_the_transaction_pool() {
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
let mut block = let mut block =
Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block); chain.update_db_for_block(&block);

View file

@ -126,7 +126,7 @@ fn build_block(
b.validate(&head.total_kernel_offset, verifier_cache)?; b.validate(&head.total_kernel_offset, verifier_cache)?;
b.header.pow.nonce = thread_rng().gen(); b.header.pow.nonce = thread_rng().gen();
b.header.pow.scaling_difficulty = difficulty.secondary_scaling; b.header.pow.secondary_scaling = difficulty.secondary_scaling;
b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc); b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc);
let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num(); let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num();

View file

@ -98,7 +98,7 @@ pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbDa
(output, kernel), (output, kernel),
).unwrap(); ).unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60); b.header.timestamp = prev.timestamp + Duration::seconds(60);
b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
chain.set_txhashset_roots(&mut b, false).unwrap(); chain.set_txhashset_roots(&mut b, false).unwrap();
pow::pow_size( pow::pow_size(
&mut b.header, &mut b.header,