From e24e65399bb4e83d1728f856a69a0722d957e0ff Mon Sep 17 00:00:00 2001 From: John Tromp Date: Thu, 18 Oct 2018 14:20:00 +0200 Subject: [PATCH 1/9] rename MIN_DIFFICULTY to UNIT_DIFFICULTY; define Difficulty::unit, fix INITIAL_DIFFICULTY --- core/src/consensus.rs | 9 +++------ core/src/global.rs | 3 ++- core/src/pow/types.rs | 5 +++++ core/tests/consensus.rs | 13 +++++++------ 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/core/src/consensus.rs b/core/src/consensus.rs index aecfb1371..dbc68789c 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -158,18 +158,15 @@ pub fn graph_weight(edge_bits: u8) -> u64 { (2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64) } -/// minimum possible difficulty equal to graph_weight(SECOND_POW_EDGE_BITS) -pub const MIN_DIFFICULTY: u64 = +/// unit difficulty, equal to graph_weight(SECOND_POW_EDGE_BITS) +pub const UNIT_DIFFICULTY: u64 = ((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64); /// The initial difficulty at launch. This should be over-estimated /// and difficulty should come down at launch rather than up /// Currently grossly over-estimated at 10% of current /// ethereum GPUs (assuming 1GPU can solve a block at diff 1 in one block interval) -/// FOR MAINNET, use -/// pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * MIN_DIFFICULTY; -/// Pick MUCH more modest value for TESTNET4: -pub const INITIAL_DIFFICULTY: u64 = 1_000 * MIN_DIFFICULTY; +pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * UNIT_DIFFICULTY; /// Consensus errors #[derive(Clone, Debug, Eq, PartialEq, Fail)] diff --git a/core/src/global.rs b/core/src/global.rs index bb1e6baa7..0002d82b2 100644 --- a/core/src/global.rs +++ b/core/src/global.rs @@ -20,6 +20,7 @@ use consensus::HeaderInfo; use consensus::{ graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON, DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE, SECOND_POW_EDGE_BITS, + UNIT_DIFFICULTY }; use pow::{self, CuckatooContext, EdgeType, PoWContext}; /// An enum collecting sets of parameters used throughout the @@ -71,7 +72,7 @@ pub const STUCK_PEER_KICK_TIME: i64 = 2 * 3600 * 1000; /// Testnet 4 initial block difficulty /// 1_000 times natural scale factor for cuckatoo29 -pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * (2 << (29 - 24)) * 29; +pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * UNIT_DIFFICULTY; /// Trigger compaction check on average every day for FAST_SYNC_NODE, /// roll the dice on every block to decide, diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index 505957c5e..96ba1d52a 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -70,6 +70,11 @@ impl Difficulty { Difficulty { num: 1 } } + /// Difficulty unit, which is the graph weight of minimal graph + pub fn unit() -> Difficulty { + Difficulty { num: global::initial_graph_weight() } + } + /// Convert a `u32` into a `Difficulty` pub fn from_num(num: u64) -> Difficulty { // can't have difficulty lower than 1 diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index 41f3d583f..039753a94 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -383,36 +383,37 @@ fn next_target_adjustment() { let cur_time = Utc::now().timestamp() as u64; let diff_one = Difficulty::one(); + let diff_unit = Difficulty::unit(); assert_eq!( next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), + HeaderInfo::from_diff_scaling(diff_one, diff_unit), ); assert_eq!( next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), + HeaderInfo::from_diff_scaling(diff_one, 1), ); let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1); assert_eq!( next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), + HeaderInfo::from_diff_scaling(diff_one, 1), ); hi.is_secondary = true; assert_eq!( next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), + HeaderInfo::from_diff_scaling(diff_one, 1), ); hi.secondary_scaling = 100; assert_eq!( next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 96), + HeaderInfo::from_diff_scaling(diff_one, 96), ); // Check we don't get stuck on difficulty 1 let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1); assert_ne!( next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty, - Difficulty::one() + diff_one ); // just enough data, right interval, should stay constant From 41293e9ef73ec0e9b9be899f9c6a808c418786ed Mon Sep 17 00:00:00 2001 From: John Tromp Date: Thu, 18 Oct 2018 20:37:33 +0200 Subject: [PATCH 2/9] improve minimum difficulty handling --- chain/tests/test_coinbase_maturity.rs | 8 +-- core/src/consensus.rs | 17 +++--- core/src/pow/types.rs | 9 +++- core/tests/consensus.rs | 75 ++++++++++----------------- 4 files changed, 49 insertions(+), 60 deletions(-) diff --git a/chain/tests/test_coinbase_maturity.rs b/chain/tests/test_coinbase_maturity.rs index ef7fef954..c5005e251 100644 --- a/chain/tests/test_coinbase_maturity.rs +++ b/chain/tests/test_coinbase_maturity.rs @@ -70,7 +70,7 @@ fn test_coinbase_maturity() { let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; @@ -116,7 +116,7 @@ fn test_coinbase_maturity() { let txs = vec![coinbase_txn.clone()]; let fees = txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id3, fees, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; @@ -149,7 +149,7 @@ fn test_coinbase_maturity() { let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; @@ -176,7 +176,7 @@ fn test_coinbase_maturity() { let fees = txs.iter().map(|tx| tx.fee()).sum(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let reward = libtx::reward::output(&keychain, &key_id4, fees, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; diff --git a/core/src/consensus.rs b/core/src/consensus.rs index dbc68789c..2ba2389ab 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -158,6 +158,9 @@ pub fn graph_weight(edge_bits: u8) -> u64 { (2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64) } +/// minimum difficulty to avoid getting stuck when trying to increase subject to dampening +pub const MIN_DIFFICULTY: u64 = DAMP_FACTOR; + /// unit difficulty, equal to graph_weight(SECOND_POW_EDGE_BITS) pub const UNIT_DIFFICULTY: u64 = ((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64); @@ -218,7 +221,7 @@ impl HeaderInfo { timestamp, difficulty, secondary_scaling: global::initial_graph_weight(), - is_secondary: false, + is_secondary: true, } } @@ -229,17 +232,17 @@ impl HeaderInfo { timestamp: 1, difficulty, secondary_scaling, - is_secondary: false, + is_secondary: true, } } } -/// TODO: Doc +/// Move value linearly toward a goal pub fn damp(actual: u64, goal: u64, damp_factor: u64) -> u64 { (1 * actual + (damp_factor - 1) * goal) / damp_factor } -/// TODO: Doc +/// limit value to be within some factor from a goal pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 { max(goal / clamp_factor, min(actual, goal * clamp_factor)) } @@ -287,7 +290,8 @@ where BLOCK_TIME_WINDOW, CLAMP_FACTOR, ); - let difficulty = max(1, diff_sum * BLOCK_TIME_SEC / adj_ts); + // minimum of 3 avoids getting stuck due to dampening + let difficulty = max(3, diff_sum * BLOCK_TIME_SEC / adj_ts); HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling) } @@ -316,7 +320,8 @@ pub fn secondary_pow_scaling(height: u64, diff_data: &Vec) -> u32 { ); let scale = scale_sum * target_pct / adj_count; - max(1, min(scale, MAX_SECONDARY_SCALING)) as u32 + // minimum of 3 avoids getting stuck due to dampening + max(3, min(scale, MAX_SECONDARY_SCALING)) as u32 } /// Consensus rule that collections of items are sorted lexicographically. diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index 96ba1d52a..6c4e143ef 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -21,7 +21,7 @@ use std::{fmt, iter}; use rand::{thread_rng, Rng}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use consensus::{graph_weight, SECOND_POW_EDGE_BITS}; +use consensus::{graph_weight, SECOND_POW_EDGE_BITS, MIN_DIFFICULTY}; use core::hash::Hashed; use global; use ser::{self, Readable, Reader, Writeable, Writer}; @@ -70,9 +70,14 @@ impl Difficulty { Difficulty { num: 1 } } + /// Difficulty of MIN_DIFFICULTY + pub fn min() -> Difficulty { + Difficulty { num: MIN_DIFFICULTY } + } + /// Difficulty unit, which is the graph weight of minimal graph pub fn unit() -> Difficulty { - Difficulty { num: global::initial_graph_weight() } + Difficulty { num: global::initial_graph_weight() as u64 } } /// Convert a `u32` into a `Difficulty` diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index 039753a94..478559975 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -381,56 +381,33 @@ fn adjustment_scenarios() { fn next_target_adjustment() { global::set_mining_mode(global::ChainTypes::AutomatedTesting); let cur_time = Utc::now().timestamp() as u64; - let diff_one = Difficulty::one(); - let diff_unit = Difficulty::unit(); - assert_eq!( - next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]), - HeaderInfo::from_diff_scaling(diff_one, diff_unit), - ); - assert_eq!( - next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]), - HeaderInfo::from_diff_scaling(diff_one, 1), - ); + let diff_min = Difficulty::min(); - let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1); - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(diff_one, 1), - ); - hi.is_secondary = true; - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(diff_one, 1), - ); - hi.secondary_scaling = 100; - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(diff_one, 96), - ); + // Check we don't get stuck on difficulty <= MIN_DIFFICULTY (at 4x faster blocks at least) + let mut hi = HeaderInfo::from_diff_scaling(diff_min, MIN_DIFFICULTY as u32); + hi.is_secondary = false; + let hinext = next_difficulty(1, repeat(15, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)); + assert_ne!(hinext.difficulty, diff_min); - // Check we don't get stuck on difficulty 1 - let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1); - assert_ne!( - next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty, - diff_one - ); + // Check we don't get stuck on scale MIN_DIFFICULTY, when primary frequency is too high + assert_ne!(hinext.secondary_scaling, MIN_DIFFICULTY as u32); // just enough data, right interval, should stay constant let just_enough = DIFFICULTY_ADJUST_WINDOW + 1; - hi.difficulty = Difficulty::from_num(1000); + hi.difficulty = Difficulty::from_num(10000); assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1000) + next_difficulty(1, repeat(BLOCK_TIME_SEC, hi.clone(), just_enough, None)).difficulty, + Difficulty::from_num(10000) ); // checking averaging works hi.difficulty = Difficulty::from_num(500); let sec = DIFFICULTY_ADJUST_WINDOW / 2; - let mut s1 = repeat(60, hi.clone(), sec, Some(cur_time)); + let mut s1 = repeat(BLOCK_TIME_SEC, hi.clone(), sec, Some(cur_time)); let mut s2 = repeat_offs( - cur_time + (sec * 60) as u64, - 60, + cur_time + (sec * BLOCK_TIME_SEC) as u64, + BLOCK_TIME_SEC, 1500, DIFFICULTY_ADJUST_WINDOW / 2, ); @@ -460,16 +437,16 @@ fn next_target_adjustment() { next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty, Difficulty::from_num(1090) ); + assert_eq!( + next_difficulty(1, repeat(30, hi.clone(), just_enough, None)).difficulty, + Difficulty::from_num(1200) + ); // hitting lower time bound, should always get the same result below assert_eq!( next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty, Difficulty::from_num(1500) ); - assert_eq!( - next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1500) - ); // hitting higher time bound, should always get the same result above assert_eq!( @@ -481,11 +458,11 @@ fn next_target_adjustment() { Difficulty::from_num(500) ); - // We should never drop below 1 + // We should never drop below minimum hi.difficulty = Difficulty::zero(); assert_eq!( next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1) + Difficulty::min() ); } @@ -496,6 +473,7 @@ fn secondary_pow_scale() { // all primary, factor should increase so it becomes easier to find a high // difficulty block + hi.is_secondary = false; assert_eq!( secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()), 147 @@ -512,14 +490,15 @@ fn secondary_pow_scale() { 49 ); // same as above, testing lowest bound - let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3); + let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32); low_hi.is_secondary = true; assert_eq!( secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()), - 1 + MIN_DIFFICULTY as u32 ); // just about the right ratio, also no longer playing with median - let primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50); + let mut primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50); + primary_hi.is_secondary = false; assert_eq!( secondary_pow_scaling( 1, @@ -530,7 +509,7 @@ fn secondary_pow_scale() { ), 94 ); - // 95% secondary, should come down based on 100 median + // 95% secondary, should come down based on 97.5 average assert_eq!( secondary_pow_scaling( 1, @@ -541,7 +520,7 @@ fn secondary_pow_scale() { ), 94 ); - // 40% secondary, should come up based on 50 median + // 40% secondary, should come up based on 70 average assert_eq!( secondary_pow_scaling( 1, From 846b38308cccc8f3de2d5f985b2a5140dd64647e Mon Sep 17 00:00:00 2001 From: John Tromp Date: Thu, 18 Oct 2018 21:18:16 +0200 Subject: [PATCH 3/9] replace all Difficulty::one by ::min --- chain/src/types.rs | 2 +- chain/tests/store_indices.rs | 2 +- core/fuzz/src/main.rs | 2 +- core/src/consensus.rs | 8 ++++---- core/src/genesis.rs | 2 +- core/src/pow/types.rs | 2 +- core/tests/common/mod.rs | 2 +- core/tests/consensus.rs | 1 - p2p/src/serv.rs | 2 +- p2p/tests/peer_handshake.rs | 6 +++--- pool/tests/block_building.rs | 4 ++-- pool/tests/block_reconciliation.rs | 6 +++--- pool/tests/transaction_pool.rs | 2 +- 13 files changed, 20 insertions(+), 21 deletions(-) diff --git a/chain/src/types.rs b/chain/src/types.rs index 560458d01..a6a1c3fc9 100644 --- a/chain/src/types.rs +++ b/chain/src/types.rs @@ -70,7 +70,7 @@ impl Tip { height: 0, last_block_h: gbh, prev_block_h: gbh, - total_difficulty: Difficulty::one(), + total_difficulty: Difficulty::min(), } } diff --git a/chain/tests/store_indices.rs b/chain/tests/store_indices.rs index f4ae5a166..1d74b6077 100644 --- a/chain/tests/store_indices.rs +++ b/chain/tests/store_indices.rs @@ -53,7 +53,7 @@ fn test_various_store_indices() { let genesis = pow::mine_genesis_block().unwrap(); let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap(); - let block = Block::new(&genesis.header, vec![], Difficulty::one(), reward).unwrap(); + let block = Block::new(&genesis.header, vec![], Difficulty::min(), reward).unwrap(); let block_hash = block.hash(); { diff --git a/core/fuzz/src/main.rs b/core/fuzz/src/main.rs index 65c6adf08..2b6232706 100644 --- a/core/fuzz/src/main.rs +++ b/core/fuzz/src/main.rs @@ -50,7 +50,7 @@ fn block() -> Block { let reward = reward::output(&keychain, &key_id, 0, header.height).unwrap(); - Block::new(&header, txs, Difficulty::one(), reward).unwrap() + Block::new(&header, txs, Difficulty::min(), reward).unwrap() } fn compact_block() -> CompactBlock { diff --git a/core/src/consensus.rs b/core/src/consensus.rs index 2ba2389ab..d903d1a88 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -290,8 +290,8 @@ where BLOCK_TIME_WINDOW, CLAMP_FACTOR, ); - // minimum of 3 avoids getting stuck due to dampening - let difficulty = max(3, diff_sum * BLOCK_TIME_SEC / adj_ts); + // minimum difficulty avoids getting stuck due to dampening + let difficulty = max(MIN_DIFFICULTY, diff_sum * BLOCK_TIME_SEC / adj_ts); HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling) } @@ -320,8 +320,8 @@ pub fn secondary_pow_scaling(height: u64, diff_data: &Vec) -> u32 { ); let scale = scale_sum * target_pct / adj_count; - // minimum of 3 avoids getting stuck due to dampening - max(3, min(scale, MAX_SECONDARY_SCALING)) as u32 + // minimum difficulty avoids getting stuck due to dampening + max(MIN_DIFFICULTY, min(scale, MAX_SECONDARY_SCALING)) as u32 } /// Consensus rule that collections of items are sorted lexicographically. diff --git a/core/src/genesis.rs b/core/src/genesis.rs index d2994fcf5..cb4e88d3d 100644 --- a/core/src/genesis.rs +++ b/core/src/genesis.rs @@ -44,7 +44,7 @@ pub fn genesis_testnet1() -> core::Block { height: 0, timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0), pow: ProofOfWork { - total_difficulty: Difficulty::one(), + total_difficulty: Difficulty::min(), scaling_difficulty: 1, nonce: 28205, proof: Proof::new(vec![ diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index 6c4e143ef..b4fff5b60 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -236,7 +236,7 @@ impl Default for ProofOfWork { fn default() -> ProofOfWork { let proof_size = global::proofsize(); ProofOfWork { - total_difficulty: Difficulty::one(), + total_difficulty: Difficulty::min(), scaling_difficulty: 1, nonce: 0, proof: Proof::zero(proof_size), diff --git a/core/tests/common/mod.rs b/core/tests/common/mod.rs index 66222a34d..b9604642a 100644 --- a/core/tests/common/mod.rs +++ b/core/tests/common/mod.rs @@ -92,7 +92,7 @@ where Block::new( &previous_header, txs.into_iter().cloned().collect(), - Difficulty::one(), + Difficulty::min(), reward_output, ).unwrap() } diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index 478559975..9dc5108cb 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -381,7 +381,6 @@ fn adjustment_scenarios() { fn next_target_adjustment() { global::set_mining_mode(global::ChainTypes::AutomatedTesting); let cur_time = Utc::now().timestamp() as u64; - let diff_one = Difficulty::one(); let diff_min = Difficulty::min(); // Check we don't get stuck on difficulty <= MIN_DIFFICULTY (at 4x faster blocks at least) diff --git a/p2p/src/serv.rs b/p2p/src/serv.rs index b53bf893a..a194ace2a 100644 --- a/p2p/src/serv.rs +++ b/p2p/src/serv.rs @@ -232,7 +232,7 @@ pub struct DummyAdapter {} impl ChainAdapter for DummyAdapter { fn total_difficulty(&self) -> Difficulty { - Difficulty::one() + Difficulty::min() } fn total_height(&self) -> u64 { 0 diff --git a/p2p/tests/peer_handshake.rs b/p2p/tests/peer_handshake.rs index 44ca7c76c..e1afc30e1 100644 --- a/p2p/tests/peer_handshake.rs +++ b/p2p/tests/peer_handshake.rs @@ -75,7 +75,7 @@ fn peer_handshake() { let mut peer = Peer::connect( &mut socket, p2p::Capabilities::UNKNOWN, - Difficulty::one(), + Difficulty::min(), my_addr, &p2p::handshake::Handshake::new(Hash::from_vec(&vec![]), p2p_config.clone()), net_adapter, @@ -86,10 +86,10 @@ fn peer_handshake() { peer.start(socket); thread::sleep(time::Duration::from_secs(1)); - peer.send_ping(Difficulty::one(), 0).unwrap(); + peer.send_ping(Difficulty::min(), 0).unwrap(); thread::sleep(time::Duration::from_secs(1)); let server_peer = server.peers.get_connected_peer(&my_addr).unwrap(); - assert_eq!(server_peer.info.total_difficulty(), Difficulty::one()); + assert_eq!(server_peer.info.total_difficulty(), Difficulty::min()); assert!(server.peers.peer_count() > 0); } diff --git a/pool/tests/block_building.rs b/pool/tests/block_building.rs index 4ffc54cd1..960651f19 100644 --- a/pool/tests/block_building.rs +++ b/pool/tests/block_building.rs @@ -54,7 +54,7 @@ fn test_transaction_pool_block_building() { let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let fee = txs.iter().map(|x| x.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap(); - let block = Block::new(&prev_header, txs, Difficulty::one(), reward).unwrap(); + let block = Block::new(&prev_header, txs, Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); block.header @@ -115,7 +115,7 @@ fn test_transaction_pool_block_building() { let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let fees = txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - Block::new(&header, txs, Difficulty::one(), reward) + Block::new(&header, txs, Difficulty::min(), reward) }.unwrap(); chain.update_db_for_block(&block); diff --git a/pool/tests/block_reconciliation.rs b/pool/tests/block_reconciliation.rs index e204efa55..f626f0d0c 100644 --- a/pool/tests/block_reconciliation.rs +++ b/pool/tests/block_reconciliation.rs @@ -52,7 +52,7 @@ fn test_transaction_pool_block_reconciliation() { let height = 1; let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); - let block = Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); + let block = Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); @@ -67,7 +67,7 @@ fn test_transaction_pool_block_reconciliation() { let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let fees = initial_tx.fee(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - let block = Block::new(&header, vec![initial_tx], Difficulty::one(), reward).unwrap(); + let block = Block::new(&header, vec![initial_tx], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); @@ -157,7 +157,7 @@ fn test_transaction_pool_block_reconciliation() { let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0); let fees = block_txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - let block = Block::new(&header, block_txs, Difficulty::one(), reward).unwrap(); + let block = Block::new(&header, block_txs, Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); block diff --git a/pool/tests/transaction_pool.rs b/pool/tests/transaction_pool.rs index eced1590c..d8faf8926 100644 --- a/pool/tests/transaction_pool.rs +++ b/pool/tests/transaction_pool.rs @@ -53,7 +53,7 @@ fn test_the_transaction_pool() { let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); let mut block = - Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); + Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); From b1ebfe1c73d5ef5850b9ad1d0ef245d7627abc5a Mon Sep 17 00:00:00 2001 From: John Tromp Date: Thu, 18 Oct 2018 23:29:32 +0200 Subject: [PATCH 4/9] found last few instrances of Difficulty::one --- core/src/pow/mod.rs | 4 ++-- core/src/pow/types.rs | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/core/src/pow/mod.rs b/core/src/pow/mod.rs index 97f8f3ad6..596dfdaa1 100644 --- a/core/src/pow/mod.rs +++ b/core/src/pow/mod.rs @@ -141,12 +141,12 @@ mod test { b.header.pow.nonce = 485; pow_size( &mut b.header, - Difficulty::one(), + Difficulty::min(), global::proofsize(), global::min_edge_bits(), ).unwrap(); assert!(b.header.pow.nonce != 310); - assert!(b.header.pow.to_difficulty() >= Difficulty::one()); + assert!(b.header.pow.to_difficulty() >= Difficulty::min()); assert!(verify_size(&b.header, global::min_edge_bits()).is_ok()); } } diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index b4fff5b60..57d4b220b 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -64,12 +64,6 @@ impl Difficulty { Difficulty { num: 0 } } - /// Difficulty of one, which is the minimum difficulty - /// (when the hash equals the max target) - pub fn one() -> Difficulty { - Difficulty { num: 1 } - } - /// Difficulty of MIN_DIFFICULTY pub fn min() -> Difficulty { Difficulty { num: MIN_DIFFICULTY } From e1b5d7f3c22b417fc560558dd188326a29074768 Mon Sep 17 00:00:00 2001 From: John Tromp Date: Fri, 19 Oct 2018 21:39:54 +0200 Subject: [PATCH 5/9] revert secondary scaling default in HeaderInfo; rename scaling_difficulty; refactor difficulty_data_to_vector --- api/src/types.rs | 6 ++-- chain/src/pipe.rs | 8 ++++- chain/src/store.rs | 2 +- chain/tests/data_file_integrity.rs | 2 +- chain/tests/mine_simple_chain.rs | 4 +-- chain/tests/test_coinbase_maturity.rs | 8 ++--- core/src/consensus.rs | 4 +-- core/src/core/block.rs | 2 +- core/src/genesis.rs | 10 +++--- core/src/global.rs | 49 ++++++++------------------- core/src/pow/types.rs | 14 ++++---- core/tests/consensus.rs | 6 ++++ doc/api/node_api.md | 2 +- servers/src/mining/mine_block.rs | 2 +- wallet/tests/common/mod.rs | 2 +- 15 files changed, 56 insertions(+), 65 deletions(-) diff --git a/api/src/types.rs b/api/src/types.rs index 74a6db7b7..82a2a5ca9 100644 --- a/api/src/types.rs +++ b/api/src/types.rs @@ -509,8 +509,8 @@ pub struct BlockHeaderPrintable { pub cuckoo_solution: Vec, /// Total accumulated difficulty since genesis block pub total_difficulty: u64, - /// Difficulty scaling factor between the different proofs of work - pub scaling_difficulty: u32, + /// Variable difficulty scaling factor for secondary proof of work + pub secondary_scaling: u32, /// Total kernel offset since genesis block pub total_kernel_offset: String, } @@ -531,7 +531,7 @@ impl BlockHeaderPrintable { edge_bits: h.pow.edge_bits(), cuckoo_solution: h.pow.proof.nonces.clone(), total_difficulty: h.pow.total_difficulty.to_num(), - scaling_difficulty: h.pow.scaling_difficulty, + secondary_scaling: h.pow.secondary_scaling, total_kernel_offset: h.total_kernel_offset.to_hex(), } } diff --git a/chain/src/pipe.rs b/chain/src/pipe.rs index 060a0bb58..c2f4c8527 100644 --- a/chain/src/pipe.rs +++ b/chain/src/pipe.rs @@ -441,7 +441,13 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E return Err(ErrorKind::WrongTotalDifficulty.into()); } // check the secondary PoW scaling factor if applicable - if header.pow.scaling_difficulty != next_header_info.secondary_scaling { + if header.pow.secondary_scaling != next_header_info.secondary_scaling { + info!( + LOGGER, + "validate_header: header secondary scaling {} != {}", + header.pow.secondary_scaling, + next_header_info.secondary_scaling + ); return Err(ErrorKind::InvalidScaling.into()); } } diff --git a/chain/src/store.rs b/chain/src/store.rs index 423833812..3933b264c 100644 --- a/chain/src/store.rs +++ b/chain/src/store.rs @@ -650,7 +650,7 @@ impl<'a> Iterator for DifficultyIter<'a> { .clone() .map_or(Difficulty::zero(), |x| x.total_difficulty()); let difficulty = header.total_difficulty() - prev_difficulty; - let scaling = header.pow.scaling_difficulty; + let scaling = header.pow.secondary_scaling; Some(HeaderInfo::new( header.timestamp.timestamp() as u64, diff --git a/chain/tests/data_file_integrity.rs b/chain/tests/data_file_integrity.rs index f80a87386..f802f9b8c 100644 --- a/chain/tests/data_file_integrity.rs +++ b/chain/tests/data_file_integrity.rs @@ -89,7 +89,7 @@ fn data_files() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); diff --git a/chain/tests/mine_simple_chain.rs b/chain/tests/mine_simple_chain.rs index 095d89dbd..faa67108b 100644 --- a/chain/tests/mine_simple_chain.rs +++ b/chain/tests/mine_simple_chain.rs @@ -71,7 +71,7 @@ fn mine_empty_chain() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); @@ -395,7 +395,7 @@ fn output_header_mappings() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); diff --git a/chain/tests/test_coinbase_maturity.rs b/chain/tests/test_coinbase_maturity.rs index c5005e251..79270ad26 100644 --- a/chain/tests/test_coinbase_maturity.rs +++ b/chain/tests/test_coinbase_maturity.rs @@ -72,7 +72,7 @@ fn test_coinbase_maturity() { let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap(); let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -119,7 +119,7 @@ fn test_coinbase_maturity() { let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -152,7 +152,7 @@ fn test_coinbase_maturity() { let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -179,7 +179,7 @@ fn test_coinbase_maturity() { let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); diff --git a/core/src/consensus.rs b/core/src/consensus.rs index d903d1a88..8c6a4b9a8 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -221,7 +221,7 @@ impl HeaderInfo { timestamp, difficulty, secondary_scaling: global::initial_graph_weight(), - is_secondary: true, + is_secondary: false, } } @@ -232,7 +232,7 @@ impl HeaderInfo { timestamp: 1, difficulty, secondary_scaling, - is_secondary: true, + is_secondary: false, } } } diff --git a/core/src/core/block.rs b/core/src/core/block.rs index 4089235d0..5405c3896 100644 --- a/core/src/core/block.rs +++ b/core/src/core/block.rs @@ -155,7 +155,7 @@ fn fixed_size_of_serialized_header(_version: u16) -> usize { size += mem::size_of::(); // output_mmr_size size += mem::size_of::(); // kernel_mmr_size size += mem::size_of::(); // total_difficulty - size += mem::size_of::(); // scaling_difficulty + size += mem::size_of::(); // secondary_scaling size += mem::size_of::(); // nonce size } diff --git a/core/src/genesis.rs b/core/src/genesis.rs index cb4e88d3d..ac408e04d 100644 --- a/core/src/genesis.rs +++ b/core/src/genesis.rs @@ -45,7 +45,7 @@ pub fn genesis_testnet1() -> core::Block { timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::min(), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 28205, proof: Proof::new(vec![ 0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74, @@ -67,7 +67,7 @@ pub fn genesis_testnet2() -> core::Block { timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 1060, proof: Proof::new(vec![ 0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2, @@ -90,7 +90,7 @@ pub fn genesis_testnet3() -> core::Block { timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 4956988373127691, proof: Proof::new(vec![ 0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f, @@ -114,7 +114,7 @@ pub fn genesis_testnet4() -> core::Block { timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: global::initial_graph_weight(), + secondary_scaling: global::initial_graph_weight(), nonce: 8612241555342799290, proof: Proof::new(vec![ 0x46f3b4, 0x1135f8c, 0x1a1596f, 0x1e10f71, 0x41c03ea, 0x63fe8e7, 0x65af34f, @@ -137,7 +137,7 @@ pub fn genesis_main() -> core::Block { timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: global::get_genesis_nonce(), proof: Proof::zero(consensus::PROOFSIZE), }, diff --git a/core/src/global.rs b/core/src/global.rs index 0002d82b2..db9967dc0 100644 --- a/core/src/global.rs +++ b/core/src/global.rs @@ -279,47 +279,26 @@ where let needed_block_count = DIFFICULTY_ADJUST_WINDOW as usize + 1; let mut last_n: Vec = cursor.into_iter().take(needed_block_count).collect(); - // Sort blocks from earliest to latest (to keep conceptually easier) - last_n.reverse(); // Only needed just after blockchain launch... basically ensures there's // always enough data by simulating perfectly timed pre-genesis // blocks at the genesis difficulty as needed. - let block_count_difference = needed_block_count - last_n.len(); - if block_count_difference > 0 { - // Collect any real data we have - let mut live_intervals: Vec = last_n - .iter() - .map(|b| HeaderInfo::from_ts_diff(b.timestamp, b.difficulty)) - .collect(); - for i in (1..live_intervals.len()).rev() { - // prevents issues with very fast automated test chains - if live_intervals[i - 1].timestamp > live_intervals[i].timestamp { - live_intervals[i].timestamp = 0; - } else { - live_intervals[i].timestamp = - live_intervals[i].timestamp - live_intervals[i - 1].timestamp; - } - } - // Remove genesis "interval" - if live_intervals.len() > 1 { - live_intervals.remove(0); + let n = last_n.len(); + if needed_block_count > n { + let last_ts_delta = if n > 1 { + last_n[0].timestamp - last_n[1].timestamp } else { - //if it's just genesis, adjust the interval - live_intervals[0].timestamp = BLOCK_TIME_SEC; - } - let mut interval_index = live_intervals.len() - 1; - let mut last_ts = last_n.first().unwrap().timestamp; - let last_diff = live_intervals[live_intervals.len() - 1].difficulty; - // fill in simulated blocks with values from the previous real block + BLOCK_TIME_SEC + }; + let last_diff = last_n[0].difficulty; - for _ in 0..block_count_difference { - last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp); - last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff.clone())); - interval_index = match interval_index { - 0 => live_intervals.len() - 1, - _ => interval_index - 1, - }; + // fill in simulated blocks with values from the previous real block + let mut last_ts = last_n.last().unwrap().timestamp; + for _ in n..needed_block_count { + last_ts = last_ts.saturating_sub(last_ts_delta); + last_n.push(HeaderInfo::from_ts_diff(last_ts, last_diff.clone())); } } + last_n.reverse(); + assert_eq!(last_n.len(), needed_block_count); last_n } diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index 57d4b220b..10caf6ce9 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -218,8 +218,8 @@ impl<'de> de::Visitor<'de> for DiffVisitor { pub struct ProofOfWork { /// Total accumulated difficulty since genesis block pub total_difficulty: Difficulty, - /// Difficulty scaling factor between the different proofs of work - pub scaling_difficulty: u32, + /// Variable difficulty scaling factor fo secondary proof of work + pub secondary_scaling: u32, /// Nonce increment used to mine this block. pub nonce: u64, /// Proof of work data. @@ -231,7 +231,7 @@ impl Default for ProofOfWork { let proof_size = global::proofsize(); ProofOfWork { total_difficulty: Difficulty::min(), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 0, proof: Proof::zero(proof_size), } @@ -242,12 +242,12 @@ impl ProofOfWork { /// Read implementation, can't define as trait impl as we need a version pub fn read(_ver: u16, reader: &mut Reader) -> Result { let total_difficulty = Difficulty::read(reader)?; - let scaling_difficulty = reader.read_u32()?; + let secondary_scaling = reader.read_u32()?; let nonce = reader.read_u64()?; let proof = Proof::read(reader)?; Ok(ProofOfWork { total_difficulty, - scaling_difficulty, + secondary_scaling, nonce, proof, }) @@ -269,7 +269,7 @@ impl ProofOfWork { ser_multiwrite!( writer, [write_u64, self.total_difficulty.to_num()], - [write_u32, self.scaling_difficulty] + [write_u32, self.secondary_scaling] ); Ok(()) } @@ -279,7 +279,7 @@ impl ProofOfWork { // 2 proof of works, Cuckoo29 (for now) and Cuckoo30+, which are scaled // differently (scaling not controlled for now) if self.proof.edge_bits == SECOND_POW_EDGE_BITS { - Difficulty::from_proof_scaled(&self.proof, self.scaling_difficulty) + Difficulty::from_proof_scaled(&self.proof, self.secondary_scaling) } else { Difficulty::from_proof_adjusted(&self.proof) } diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index 9dc5108cb..daae4e307 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -400,6 +400,12 @@ fn next_target_adjustment() { Difficulty::from_num(10000) ); + // check pre difficulty_data_to_vector effect on retargetting + assert_eq!( + next_difficulty(1, vec![HeaderInfo::from_ts_diff(42, hi.difficulty)]).difficulty, + Difficulty::from_num(14913) + ); + // checking averaging works hi.difficulty = Difficulty::from_num(500); let sec = DIFFICULTY_ADJUST_WINDOW / 2; diff --git a/doc/api/node_api.md b/doc/api/node_api.md index 65d14fee6..f158fda4f 100644 --- a/doc/api/node_api.md +++ b/doc/api/node_api.md @@ -82,7 +82,7 @@ Optionally return results as "compact blocks" by passing `?compact` query. | - edge_bits | number | Size of the cuckoo graph (2_log of number of edges) | | - cuckoo_solution | []number | The Cuckoo solution for this block | | - total_difficulty | number | Total accumulated difficulty since genesis block | - | - scaling_difficulty | number | Difficulty scaling factor between the different proofs of work | + | - secondary_scaling | number | Variable difficulty scaling factor for secondary proof of work | | - total_kernel_offset | string | Total kernel offset since genesis block | | inputs | []string | Input transactions | | outputs | []object | Outputs transactions | diff --git a/servers/src/mining/mine_block.rs b/servers/src/mining/mine_block.rs index b98234517..b8c3ce4ec 100644 --- a/servers/src/mining/mine_block.rs +++ b/servers/src/mining/mine_block.rs @@ -132,7 +132,7 @@ fn build_block( b.validate(&head.total_kernel_offset, verifier_cache)?; b.header.pow.nonce = thread_rng().gen(); - b.header.pow.scaling_difficulty = difficulty.secondary_scaling; + b.header.pow.secondary_scaling = difficulty.secondary_scaling; b.header.timestamp = DateTime::::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc); let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num(); diff --git a/wallet/tests/common/mod.rs b/wallet/tests/common/mod.rs index 07cf75ff8..3cd019f86 100644 --- a/wallet/tests/common/mod.rs +++ b/wallet/tests/common/mod.rs @@ -97,7 +97,7 @@ pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbDa (output, kernel), ).unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); pow::pow_size( &mut b.header, From 1cfc474817265ddbe147ca49aaa673181fdda89f Mon Sep 17 00:00:00 2001 From: John Tromp Date: Fri, 19 Oct 2018 22:21:13 +0200 Subject: [PATCH 6/9] forgot to take out assert --- core/src/global.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/global.rs b/core/src/global.rs index db9967dc0..57de79b69 100644 --- a/core/src/global.rs +++ b/core/src/global.rs @@ -299,6 +299,5 @@ where } } last_n.reverse(); - assert_eq!(last_n.len(), needed_block_count); last_n } From 1121a38bd5037b67667eefc7069bd9e15fef7234 Mon Sep 17 00:00:00 2001 From: Ignotus Peverell Date: Fri, 26 Oct 2018 15:35:21 -0700 Subject: [PATCH 7/9] Logger fix --- chain/src/pipe.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/chain/src/pipe.rs b/chain/src/pipe.rs index 0e2218144..95d98d3ff 100644 --- a/chain/src/pipe.rs +++ b/chain/src/pipe.rs @@ -438,7 +438,6 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E // check the secondary PoW scaling factor if applicable if header.pow.secondary_scaling != next_header_info.secondary_scaling { info!( - LOGGER, "validate_header: header secondary scaling {} != {}", header.pow.secondary_scaling, next_header_info.secondary_scaling From 6f3ae4563a3c193be7108b0324ae336ffbc3284f Mon Sep 17 00:00:00 2001 From: Ignotus Peverell Date: Fri, 26 Oct 2018 16:07:12 -0700 Subject: [PATCH 8/9] Small test fix --- core/tests/consensus.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index e81e12941..14ede57bb 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -501,7 +501,7 @@ fn secondary_pow_scale() { let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32); low_hi.is_secondary = true; assert_eq!( - secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()), + secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()::>), MIN_DIFFICULTY as u32 ); // just about the right ratio, also no longer playing with median From bb5392548fdd2be993d0b4ff972c9aeafbf71e79 Mon Sep 17 00:00:00 2001 From: Ignotus Peverell Date: Fri, 26 Oct 2018 16:28:15 -0700 Subject: [PATCH 9/9] Take 2 --- core/tests/consensus.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index 14ede57bb..b252d9b00 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -501,7 +501,7 @@ fn secondary_pow_scale() { let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32); low_hi.is_secondary = true; assert_eq!( - secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()::>), + secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect::>()), MIN_DIFFICULTY as u32 ); // just about the right ratio, also no longer playing with median