diff --git a/api/src/types.rs b/api/src/types.rs index 74a6db7b7..82a2a5ca9 100644 --- a/api/src/types.rs +++ b/api/src/types.rs @@ -509,8 +509,8 @@ pub struct BlockHeaderPrintable { pub cuckoo_solution: Vec, /// Total accumulated difficulty since genesis block pub total_difficulty: u64, - /// Difficulty scaling factor between the different proofs of work - pub scaling_difficulty: u32, + /// Variable difficulty scaling factor for secondary proof of work + pub secondary_scaling: u32, /// Total kernel offset since genesis block pub total_kernel_offset: String, } @@ -531,7 +531,7 @@ impl BlockHeaderPrintable { edge_bits: h.pow.edge_bits(), cuckoo_solution: h.pow.proof.nonces.clone(), total_difficulty: h.pow.total_difficulty.to_num(), - scaling_difficulty: h.pow.scaling_difficulty, + secondary_scaling: h.pow.secondary_scaling, total_kernel_offset: h.total_kernel_offset.to_hex(), } } diff --git a/chain/src/pipe.rs b/chain/src/pipe.rs index 1eae38b53..95d98d3ff 100644 --- a/chain/src/pipe.rs +++ b/chain/src/pipe.rs @@ -436,7 +436,12 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E return Err(ErrorKind::WrongTotalDifficulty.into()); } // check the secondary PoW scaling factor if applicable - if header.pow.scaling_difficulty != next_header_info.secondary_scaling { + if header.pow.secondary_scaling != next_header_info.secondary_scaling { + info!( + "validate_header: header secondary scaling {} != {}", + header.pow.secondary_scaling, + next_header_info.secondary_scaling + ); return Err(ErrorKind::InvalidScaling.into()); } } diff --git a/chain/src/store.rs b/chain/src/store.rs index 0b91eb2f6..3f2ea0c05 100644 --- a/chain/src/store.rs +++ b/chain/src/store.rs @@ -651,7 +651,7 @@ impl<'a> Iterator for DifficultyIter<'a> { .clone() .map_or(Difficulty::zero(), |x| x.total_difficulty()); let difficulty = header.total_difficulty() - prev_difficulty; - let scaling = header.pow.scaling_difficulty; + let scaling = header.pow.secondary_scaling; Some(HeaderInfo::new( header.timestamp.timestamp() as u64, diff --git a/chain/src/types.rs b/chain/src/types.rs index 560458d01..a6a1c3fc9 100644 --- a/chain/src/types.rs +++ b/chain/src/types.rs @@ -70,7 +70,7 @@ impl Tip { height: 0, last_block_h: gbh, prev_block_h: gbh, - total_difficulty: Difficulty::one(), + total_difficulty: Difficulty::min(), } } diff --git a/chain/tests/data_file_integrity.rs b/chain/tests/data_file_integrity.rs index 88088cdb5..d69904b7a 100644 --- a/chain/tests/data_file_integrity.rs +++ b/chain/tests/data_file_integrity.rs @@ -90,7 +90,7 @@ fn data_files() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); diff --git a/chain/tests/mine_simple_chain.rs b/chain/tests/mine_simple_chain.rs index 3c3ad1218..b893dc8b8 100644 --- a/chain/tests/mine_simple_chain.rs +++ b/chain/tests/mine_simple_chain.rs @@ -72,7 +72,7 @@ fn mine_empty_chain() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); @@ -396,7 +396,7 @@ fn output_header_mappings() { core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward) .unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); diff --git a/chain/tests/store_indices.rs b/chain/tests/store_indices.rs index f4ae5a166..1d74b6077 100644 --- a/chain/tests/store_indices.rs +++ b/chain/tests/store_indices.rs @@ -53,7 +53,7 @@ fn test_various_store_indices() { let genesis = pow::mine_genesis_block().unwrap(); let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap(); - let block = Block::new(&genesis.header, vec![], Difficulty::one(), reward).unwrap(); + let block = Block::new(&genesis.header, vec![], Difficulty::min(), reward).unwrap(); let block_hash = block.hash(); { diff --git a/chain/tests/test_coinbase_maturity.rs b/chain/tests/test_coinbase_maturity.rs index ceeab4dff..5422f326a 100644 --- a/chain/tests/test_coinbase_maturity.rs +++ b/chain/tests/test_coinbase_maturity.rs @@ -72,9 +72,9 @@ fn test_coinbase_maturity() { let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -118,10 +118,10 @@ fn test_coinbase_maturity() { let txs = vec![coinbase_txn.clone()]; let fees = txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id3, fees, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -151,10 +151,10 @@ fn test_coinbase_maturity() { let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); @@ -178,10 +178,10 @@ fn test_coinbase_maturity() { let fees = txs.iter().map(|tx| tx.fee()).sum(); let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()); let reward = libtx::reward::output(&keychain, &key_id4, fees, prev.height).unwrap(); - let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap(); + let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap(); block.header.timestamp = prev.timestamp + Duration::seconds(60); - block.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + block.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut block, false).unwrap(); diff --git a/core/fuzz/src/main.rs b/core/fuzz/src/main.rs index 65c6adf08..2b6232706 100644 --- a/core/fuzz/src/main.rs +++ b/core/fuzz/src/main.rs @@ -50,7 +50,7 @@ fn block() -> Block { let reward = reward::output(&keychain, &key_id, 0, header.height).unwrap(); - Block::new(&header, txs, Difficulty::one(), reward).unwrap() + Block::new(&header, txs, Difficulty::min(), reward).unwrap() } fn compact_block() -> CompactBlock { diff --git a/core/src/consensus.rs b/core/src/consensus.rs index 49d2d6651..4e1c53137 100644 --- a/core/src/consensus.rs +++ b/core/src/consensus.rs @@ -158,18 +158,18 @@ pub fn graph_weight(edge_bits: u8) -> u64 { (2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64) } -/// minimum possible difficulty equal to graph_weight(SECOND_POW_EDGE_BITS) -pub const MIN_DIFFICULTY: u64 = +/// minimum difficulty to avoid getting stuck when trying to increase subject to dampening +pub const MIN_DIFFICULTY: u64 = DAMP_FACTOR; + +/// unit difficulty, equal to graph_weight(SECOND_POW_EDGE_BITS) +pub const UNIT_DIFFICULTY: u64 = ((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64); /// The initial difficulty at launch. This should be over-estimated /// and difficulty should come down at launch rather than up /// Currently grossly over-estimated at 10% of current /// ethereum GPUs (assuming 1GPU can solve a block at diff 1 in one block interval) -/// FOR MAINNET, use -/// pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * MIN_DIFFICULTY; -/// Pick MUCH more modest value for TESTNET4: -pub const INITIAL_DIFFICULTY: u64 = 1_000 * MIN_DIFFICULTY; +pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * UNIT_DIFFICULTY; /// Consensus errors #[derive(Clone, Debug, Eq, PartialEq, Fail)] @@ -237,12 +237,12 @@ impl HeaderInfo { } } -/// TODO: Doc +/// Move value linearly toward a goal pub fn damp(actual: u64, goal: u64, damp_factor: u64) -> u64 { (1 * actual + (damp_factor - 1) * goal) / damp_factor } -/// TODO: Doc +/// limit value to be within some factor from a goal pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 { max(goal / clamp_factor, min(actual, goal * clamp_factor)) } @@ -290,7 +290,8 @@ where BLOCK_TIME_WINDOW, CLAMP_FACTOR, ); - let difficulty = max(1, diff_sum * BLOCK_TIME_SEC / adj_ts); + // minimum difficulty avoids getting stuck due to dampening + let difficulty = max(MIN_DIFFICULTY, diff_sum * BLOCK_TIME_SEC / adj_ts); HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling) } @@ -319,7 +320,8 @@ pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 { ); let scale = scale_sum * target_pct / adj_count; - max(1, min(scale, MAX_SECONDARY_SCALING)) as u32 + // minimum difficulty avoids getting stuck due to dampening + max(MIN_DIFFICULTY, min(scale, MAX_SECONDARY_SCALING)) as u32 } /// Consensus rule that collections of items are sorted lexicographically. diff --git a/core/src/core/block.rs b/core/src/core/block.rs index 2336bda6b..9d956b938 100644 --- a/core/src/core/block.rs +++ b/core/src/core/block.rs @@ -156,7 +156,7 @@ fn fixed_size_of_serialized_header(_version: u16) -> usize { size += mem::size_of::(); // output_mmr_size size += mem::size_of::(); // kernel_mmr_size size += mem::size_of::(); // total_difficulty - size += mem::size_of::(); // scaling_difficulty + size += mem::size_of::(); // secondary_scaling size += mem::size_of::(); // nonce size } diff --git a/core/src/genesis.rs b/core/src/genesis.rs index d2994fcf5..ac408e04d 100644 --- a/core/src/genesis.rs +++ b/core/src/genesis.rs @@ -44,8 +44,8 @@ pub fn genesis_testnet1() -> core::Block { height: 0, timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0), pow: ProofOfWork { - total_difficulty: Difficulty::one(), - scaling_difficulty: 1, + total_difficulty: Difficulty::min(), + secondary_scaling: 1, nonce: 28205, proof: Proof::new(vec![ 0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74, @@ -67,7 +67,7 @@ pub fn genesis_testnet2() -> core::Block { timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 1060, proof: Proof::new(vec![ 0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2, @@ -90,7 +90,7 @@ pub fn genesis_testnet3() -> core::Block { timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: 4956988373127691, proof: Proof::new(vec![ 0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f, @@ -114,7 +114,7 @@ pub fn genesis_testnet4() -> core::Block { timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: global::initial_graph_weight(), + secondary_scaling: global::initial_graph_weight(), nonce: 8612241555342799290, proof: Proof::new(vec![ 0x46f3b4, 0x1135f8c, 0x1a1596f, 0x1e10f71, 0x41c03ea, 0x63fe8e7, 0x65af34f, @@ -137,7 +137,7 @@ pub fn genesis_main() -> core::Block { timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0), pow: ProofOfWork { total_difficulty: Difficulty::from_num(global::initial_block_difficulty()), - scaling_difficulty: 1, + secondary_scaling: 1, nonce: global::get_genesis_nonce(), proof: Proof::zero(consensus::PROOFSIZE), }, diff --git a/core/src/global.rs b/core/src/global.rs index 84c4d294e..e3a2c6315 100644 --- a/core/src/global.rs +++ b/core/src/global.rs @@ -20,6 +20,7 @@ use consensus::HeaderInfo; use consensus::{ graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON, DAY_HEIGHT, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE, SECOND_POW_EDGE_BITS, + UNIT_DIFFICULTY }; use pow::{self, CuckatooContext, EdgeType, PoWContext}; /// An enum collecting sets of parameters used throughout the @@ -77,7 +78,7 @@ pub const PEER_EXPIRATION_REMOVE_TIME: i64 = PEER_EXPIRATION_DAYS * 24 * 3600; /// Testnet 4 initial block difficulty /// 1_000 times natural scale factor for cuckatoo29 -pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * (2 << (29 - 24)) * 29; +pub const TESTNET4_INITIAL_DIFFICULTY: u64 = 1_000 * UNIT_DIFFICULTY; /// Trigger compaction check on average every day for all nodes. /// Randomized per node - roll the dice on every block to decide. @@ -286,46 +287,25 @@ where let needed_block_count = DIFFICULTY_ADJUST_WINDOW as usize + 1; let mut last_n: Vec = cursor.into_iter().take(needed_block_count).collect(); - // Sort blocks from earliest to latest (to keep conceptually easier) - last_n.reverse(); // Only needed just after blockchain launch... basically ensures there's // always enough data by simulating perfectly timed pre-genesis // blocks at the genesis difficulty as needed. - let block_count_difference = needed_block_count - last_n.len(); - if block_count_difference > 0 { - // Collect any real data we have - let mut live_intervals: Vec = last_n - .iter() - .map(|b| HeaderInfo::from_ts_diff(b.timestamp, b.difficulty)) - .collect(); - for i in (1..live_intervals.len()).rev() { - // prevents issues with very fast automated test chains - if live_intervals[i - 1].timestamp > live_intervals[i].timestamp { - live_intervals[i].timestamp = 0; - } else { - live_intervals[i].timestamp -= live_intervals[i - 1].timestamp; - } - } - // Remove genesis "interval" - if live_intervals.len() > 1 { - live_intervals.remove(0); + let n = last_n.len(); + if needed_block_count > n { + let last_ts_delta = if n > 1 { + last_n[0].timestamp - last_n[1].timestamp } else { - //if it's just genesis, adjust the interval - live_intervals[0].timestamp = BLOCK_TIME_SEC; - } - let mut interval_index = live_intervals.len() - 1; - let mut last_ts = last_n.first().unwrap().timestamp; - let last_diff = live_intervals[live_intervals.len() - 1].difficulty; - // fill in simulated blocks with values from the previous real block + BLOCK_TIME_SEC + }; + let last_diff = last_n[0].difficulty; - for _ in 0..block_count_difference { - last_ts = last_ts.saturating_sub(live_intervals[live_intervals.len() - 1].timestamp); - last_n.insert(0, HeaderInfo::from_ts_diff(last_ts, last_diff)); - interval_index = match interval_index { - 0 => live_intervals.len() - 1, - _ => interval_index - 1, - }; + // fill in simulated blocks with values from the previous real block + let mut last_ts = last_n.last().unwrap().timestamp; + for _ in n..needed_block_count { + last_ts = last_ts.saturating_sub(last_ts_delta); + last_n.push(HeaderInfo::from_ts_diff(last_ts, last_diff.clone())); } } + last_n.reverse(); last_n } diff --git a/core/src/pow/mod.rs b/core/src/pow/mod.rs index fc231f553..1c1a1d4b2 100644 --- a/core/src/pow/mod.rs +++ b/core/src/pow/mod.rs @@ -141,12 +141,12 @@ mod test { b.header.pow.nonce = 485; pow_size( &mut b.header, - Difficulty::one(), + Difficulty::min(), global::proofsize(), global::min_edge_bits(), ).unwrap(); assert!(b.header.pow.nonce != 310); - assert!(b.header.pow.to_difficulty() >= Difficulty::one()); + assert!(b.header.pow.to_difficulty() >= Difficulty::min()); assert!(verify_size(&b.header, global::min_edge_bits()).is_ok()); } } diff --git a/core/src/pow/types.rs b/core/src/pow/types.rs index 6e287da7f..23a161ebe 100644 --- a/core/src/pow/types.rs +++ b/core/src/pow/types.rs @@ -21,7 +21,7 @@ use std::{fmt, iter}; use rand::{thread_rng, Rng}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use consensus::{graph_weight, SECOND_POW_EDGE_BITS}; +use consensus::{graph_weight, SECOND_POW_EDGE_BITS, MIN_DIFFICULTY}; use core::hash::Hashed; use global; use ser::{self, Readable, Reader, Writeable, Writer}; @@ -64,10 +64,14 @@ impl Difficulty { Difficulty { num: 0 } } - /// Difficulty of one, which is the minimum difficulty - /// (when the hash equals the max target) - pub fn one() -> Difficulty { - Difficulty { num: 1 } + /// Difficulty of MIN_DIFFICULTY + pub fn min() -> Difficulty { + Difficulty { num: MIN_DIFFICULTY } + } + + /// Difficulty unit, which is the graph weight of minimal graph + pub fn unit() -> Difficulty { + Difficulty { num: global::initial_graph_weight() as u64 } } /// Convert a `u32` into a `Difficulty` @@ -209,8 +213,8 @@ impl<'de> de::Visitor<'de> for DiffVisitor { pub struct ProofOfWork { /// Total accumulated difficulty since genesis block pub total_difficulty: Difficulty, - /// Difficulty scaling factor between the different proofs of work - pub scaling_difficulty: u32, + /// Variable difficulty scaling factor fo secondary proof of work + pub secondary_scaling: u32, /// Nonce increment used to mine this block. pub nonce: u64, /// Proof of work data. @@ -221,8 +225,8 @@ impl Default for ProofOfWork { fn default() -> ProofOfWork { let proof_size = global::proofsize(); ProofOfWork { - total_difficulty: Difficulty::one(), - scaling_difficulty: 1, + total_difficulty: Difficulty::min(), + secondary_scaling: 1, nonce: 0, proof: Proof::zero(proof_size), } @@ -233,12 +237,12 @@ impl ProofOfWork { /// Read implementation, can't define as trait impl as we need a version pub fn read(_ver: u16, reader: &mut Reader) -> Result { let total_difficulty = Difficulty::read(reader)?; - let scaling_difficulty = reader.read_u32()?; + let secondary_scaling = reader.read_u32()?; let nonce = reader.read_u64()?; let proof = Proof::read(reader)?; Ok(ProofOfWork { total_difficulty, - scaling_difficulty, + secondary_scaling, nonce, proof, }) @@ -260,7 +264,7 @@ impl ProofOfWork { ser_multiwrite!( writer, [write_u64, self.total_difficulty.to_num()], - [write_u32, self.scaling_difficulty] + [write_u32, self.secondary_scaling] ); Ok(()) } @@ -270,7 +274,7 @@ impl ProofOfWork { // 2 proof of works, Cuckoo29 (for now) and Cuckoo30+, which are scaled // differently (scaling not controlled for now) if self.proof.edge_bits == SECOND_POW_EDGE_BITS { - Difficulty::from_proof_scaled(&self.proof, self.scaling_difficulty) + Difficulty::from_proof_scaled(&self.proof, self.secondary_scaling) } else { Difficulty::from_proof_adjusted(&self.proof) } diff --git a/core/tests/common/mod.rs b/core/tests/common/mod.rs index 66222a34d..b9604642a 100644 --- a/core/tests/common/mod.rs +++ b/core/tests/common/mod.rs @@ -92,7 +92,7 @@ where Block::new( &previous_header, txs.into_iter().cloned().collect(), - Difficulty::one(), + Difficulty::min(), reward_output, ).unwrap() } diff --git a/core/tests/consensus.rs b/core/tests/consensus.rs index b87ca7129..b252d9b00 100644 --- a/core/tests/consensus.rs +++ b/core/tests/consensus.rs @@ -381,55 +381,38 @@ fn adjustment_scenarios() { fn next_target_adjustment() { global::set_mining_mode(global::ChainTypes::AutomatedTesting); let cur_time = Utc::now().timestamp() as u64; + let diff_min = Difficulty::min(); - let diff_one = Difficulty::one(); - assert_eq!( - next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), - ); - assert_eq!( - next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), - ); + // Check we don't get stuck on difficulty <= MIN_DIFFICULTY (at 4x faster blocks at least) + let mut hi = HeaderInfo::from_diff_scaling(diff_min, MIN_DIFFICULTY as u32); + hi.is_secondary = false; + let hinext = next_difficulty(1, repeat(15, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)); + assert_ne!(hinext.difficulty, diff_min); - let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1); - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), - ); - hi.is_secondary = true; - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 1), - ); - hi.secondary_scaling = 100; - assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)), - HeaderInfo::from_diff_scaling(Difficulty::one(), 96), - ); - - // Check we don't get stuck on difficulty 1 - let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1); - assert_ne!( - next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty, - Difficulty::one() - ); + // Check we don't get stuck on scale MIN_DIFFICULTY, when primary frequency is too high + assert_ne!(hinext.secondary_scaling, MIN_DIFFICULTY as u32); // just enough data, right interval, should stay constant let just_enough = DIFFICULTY_ADJUST_WINDOW + 1; - hi.difficulty = Difficulty::from_num(1000); + hi.difficulty = Difficulty::from_num(10000); assert_eq!( - next_difficulty(1, repeat(60, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1000) + next_difficulty(1, repeat(BLOCK_TIME_SEC, hi.clone(), just_enough, None)).difficulty, + Difficulty::from_num(10000) + ); + + // check pre difficulty_data_to_vector effect on retargetting + assert_eq!( + next_difficulty(1, vec![HeaderInfo::from_ts_diff(42, hi.difficulty)]).difficulty, + Difficulty::from_num(14913) ); // checking averaging works hi.difficulty = Difficulty::from_num(500); let sec = DIFFICULTY_ADJUST_WINDOW / 2; - let mut s1 = repeat(60, hi.clone(), sec, Some(cur_time)); + let mut s1 = repeat(BLOCK_TIME_SEC, hi.clone(), sec, Some(cur_time)); let mut s2 = repeat_offs( - cur_time + (sec * 60) as u64, - 60, + cur_time + (sec * BLOCK_TIME_SEC) as u64, + BLOCK_TIME_SEC, 1500, DIFFICULTY_ADJUST_WINDOW / 2, ); @@ -459,16 +442,16 @@ fn next_target_adjustment() { next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty, Difficulty::from_num(1090) ); + assert_eq!( + next_difficulty(1, repeat(30, hi.clone(), just_enough, None)).difficulty, + Difficulty::from_num(1200) + ); // hitting lower time bound, should always get the same result below assert_eq!( next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty, Difficulty::from_num(1500) ); - assert_eq!( - next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1500) - ); // hitting higher time bound, should always get the same result above assert_eq!( @@ -480,11 +463,11 @@ fn next_target_adjustment() { Difficulty::from_num(500) ); - // We should never drop below 1 + // We should never drop below minimum hi.difficulty = Difficulty::zero(); assert_eq!( next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty, - Difficulty::from_num(1) + Difficulty::min() ); } @@ -495,6 +478,7 @@ fn secondary_pow_scale() { // all primary, factor should increase so it becomes easier to find a high // difficulty block + hi.is_secondary = false; assert_eq!( secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect::>()), 147 @@ -514,17 +498,15 @@ fn secondary_pow_scale() { 49 ); // same as above, testing lowest bound - let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3); + let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32); low_hi.is_secondary = true; assert_eq!( - secondary_pow_scaling( - 890_000, - &(0..window).map(|_| low_hi.clone()).collect::>() - ), - 1 + secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect::>()), + MIN_DIFFICULTY as u32 ); // just about the right ratio, also no longer playing with median - let primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50); + let mut primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50); + primary_hi.is_secondary = false; assert_eq!( secondary_pow_scaling( 1, @@ -535,7 +517,7 @@ fn secondary_pow_scale() { ), 94 ); - // 95% secondary, should come down based on 100 median + // 95% secondary, should come down based on 97.5 average assert_eq!( secondary_pow_scaling( 1, @@ -546,7 +528,7 @@ fn secondary_pow_scale() { ), 94 ); - // 40% secondary, should come up based on 50 median + // 40% secondary, should come up based on 70 average assert_eq!( secondary_pow_scaling( 1, diff --git a/doc/api/node_api.md b/doc/api/node_api.md index 65d14fee6..f158fda4f 100644 --- a/doc/api/node_api.md +++ b/doc/api/node_api.md @@ -82,7 +82,7 @@ Optionally return results as "compact blocks" by passing `?compact` query. | - edge_bits | number | Size of the cuckoo graph (2_log of number of edges) | | - cuckoo_solution | []number | The Cuckoo solution for this block | | - total_difficulty | number | Total accumulated difficulty since genesis block | - | - scaling_difficulty | number | Difficulty scaling factor between the different proofs of work | + | - secondary_scaling | number | Variable difficulty scaling factor for secondary proof of work | | - total_kernel_offset | string | Total kernel offset since genesis block | | inputs | []string | Input transactions | | outputs | []object | Outputs transactions | diff --git a/p2p/src/serv.rs b/p2p/src/serv.rs index a83aeb506..4ad75fde5 100644 --- a/p2p/src/serv.rs +++ b/p2p/src/serv.rs @@ -195,7 +195,7 @@ pub struct DummyAdapter {} impl ChainAdapter for DummyAdapter { fn total_difficulty(&self) -> Difficulty { - Difficulty::one() + Difficulty::min() } fn total_height(&self) -> u64 { 0 diff --git a/p2p/tests/peer_handshake.rs b/p2p/tests/peer_handshake.rs index bb2b50d56..b71725aeb 100644 --- a/p2p/tests/peer_handshake.rs +++ b/p2p/tests/peer_handshake.rs @@ -73,7 +73,7 @@ fn peer_handshake() { let mut peer = Peer::connect( &mut socket, p2p::Capabilities::UNKNOWN, - Difficulty::one(), + Difficulty::min(), my_addr, &p2p::handshake::Handshake::new(Hash::from_vec(&vec![]), p2p_config.clone()), net_adapter, @@ -84,10 +84,10 @@ fn peer_handshake() { peer.start(socket); thread::sleep(time::Duration::from_secs(1)); - peer.send_ping(Difficulty::one(), 0).unwrap(); + peer.send_ping(Difficulty::min(), 0).unwrap(); thread::sleep(time::Duration::from_secs(1)); let server_peer = server.peers.get_connected_peer(&my_addr).unwrap(); - assert_eq!(server_peer.info.total_difficulty(), Difficulty::one()); + assert_eq!(server_peer.info.total_difficulty(), Difficulty::min()); assert!(server.peers.peer_count() > 0); } diff --git a/pool/tests/block_building.rs b/pool/tests/block_building.rs index 5f464c723..288865030 100644 --- a/pool/tests/block_building.rs +++ b/pool/tests/block_building.rs @@ -55,7 +55,7 @@ fn test_transaction_pool_block_building() { let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let fee = txs.iter().map(|x| x.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap(); - let block = Block::new(&prev_header, txs, Difficulty::one(), reward).unwrap(); + let block = Block::new(&prev_header, txs, Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); block.header @@ -116,7 +116,7 @@ fn test_transaction_pool_block_building() { let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let fees = txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - Block::new(&header, txs, Difficulty::one(), reward) + Block::new(&header, txs, Difficulty::min(), reward) }.unwrap(); chain.update_db_for_block(&block); diff --git a/pool/tests/block_reconciliation.rs b/pool/tests/block_reconciliation.rs index 6d85ffeb6..dc6ecac33 100644 --- a/pool/tests/block_reconciliation.rs +++ b/pool/tests/block_reconciliation.rs @@ -53,7 +53,7 @@ fn test_transaction_pool_block_reconciliation() { let height = 1; let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); - let block = Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); + let block = Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); @@ -68,7 +68,7 @@ fn test_transaction_pool_block_reconciliation() { let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0); let fees = initial_tx.fee(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - let block = Block::new(&header, vec![initial_tx], Difficulty::one(), reward).unwrap(); + let block = Block::new(&header, vec![initial_tx], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); @@ -158,7 +158,7 @@ fn test_transaction_pool_block_reconciliation() { let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0); let fees = block_txs.iter().map(|tx| tx.fee()).sum(); let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap(); - let block = Block::new(&header, block_txs, Difficulty::one(), reward).unwrap(); + let block = Block::new(&header, block_txs, Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); block diff --git a/pool/tests/transaction_pool.rs b/pool/tests/transaction_pool.rs index e8a803c6c..b9526328b 100644 --- a/pool/tests/transaction_pool.rs +++ b/pool/tests/transaction_pool.rs @@ -54,7 +54,7 @@ fn test_the_transaction_pool() { let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0); let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap(); let mut block = - Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap(); + Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap(); chain.update_db_for_block(&block); diff --git a/servers/src/mining/mine_block.rs b/servers/src/mining/mine_block.rs index d61f71088..c527d5e7b 100644 --- a/servers/src/mining/mine_block.rs +++ b/servers/src/mining/mine_block.rs @@ -126,7 +126,7 @@ fn build_block( b.validate(&head.total_kernel_offset, verifier_cache)?; b.header.pow.nonce = thread_rng().gen(); - b.header.pow.scaling_difficulty = difficulty.secondary_scaling; + b.header.pow.secondary_scaling = difficulty.secondary_scaling; b.header.timestamp = DateTime::::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc); let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num(); diff --git a/wallet/tests/common/mod.rs b/wallet/tests/common/mod.rs index 8b39a898d..f81f58536 100644 --- a/wallet/tests/common/mod.rs +++ b/wallet/tests/common/mod.rs @@ -98,7 +98,7 @@ pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbDa (output, kernel), ).unwrap(); b.header.timestamp = prev.timestamp + Duration::seconds(60); - b.header.pow.scaling_difficulty = next_header_info.secondary_scaling; + b.header.pow.secondary_scaling = next_header_info.secondary_scaling; chain.set_txhashset_roots(&mut b, false).unwrap(); pow::pow_size( &mut b.header,