improve minimum difficulty handling

This commit is contained in:
John Tromp 2018-10-18 20:37:33 +02:00
parent b56fae6f30
commit 41293e9ef7
4 changed files with 49 additions and 60 deletions

View file

@ -70,7 +70,7 @@ fn test_coinbase_maturity() {
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling;
@ -116,7 +116,7 @@ fn test_coinbase_maturity() {
let txs = vec![coinbase_txn.clone()];
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id3, fees, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling;
@ -149,7 +149,7 @@ fn test_coinbase_maturity() {
let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling;
@ -176,7 +176,7 @@ fn test_coinbase_maturity() {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
let reward = libtx::reward::output(&keychain, &key_id4, fees, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
block.header.timestamp = prev.timestamp + Duration::seconds(60);
block.header.pow.scaling_difficulty = next_header_info.secondary_scaling;

View file

@ -158,6 +158,9 @@ pub fn graph_weight(edge_bits: u8) -> u64 {
(2 << (edge_bits - global::base_edge_bits()) as u64) * (edge_bits as u64)
}
/// minimum difficulty to avoid getting stuck when trying to increase subject to dampening
pub const MIN_DIFFICULTY: u64 = DAMP_FACTOR;
/// unit difficulty, equal to graph_weight(SECOND_POW_EDGE_BITS)
pub const UNIT_DIFFICULTY: u64 =
((2 as u64) << (SECOND_POW_EDGE_BITS - BASE_EDGE_BITS)) * (SECOND_POW_EDGE_BITS as u64);
@ -218,7 +221,7 @@ impl HeaderInfo {
timestamp,
difficulty,
secondary_scaling: global::initial_graph_weight(),
is_secondary: false,
is_secondary: true,
}
}
@ -229,17 +232,17 @@ impl HeaderInfo {
timestamp: 1,
difficulty,
secondary_scaling,
is_secondary: false,
is_secondary: true,
}
}
}
/// TODO: Doc
/// Move value linearly toward a goal
pub fn damp(actual: u64, goal: u64, damp_factor: u64) -> u64 {
(1 * actual + (damp_factor - 1) * goal) / damp_factor
}
/// TODO: Doc
/// limit value to be within some factor from a goal
pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 {
max(goal / clamp_factor, min(actual, goal * clamp_factor))
}
@ -287,7 +290,8 @@ where
BLOCK_TIME_WINDOW,
CLAMP_FACTOR,
);
let difficulty = max(1, diff_sum * BLOCK_TIME_SEC / adj_ts);
// minimum of 3 avoids getting stuck due to dampening
let difficulty = max(3, diff_sum * BLOCK_TIME_SEC / adj_ts);
HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling)
}
@ -316,7 +320,8 @@ pub fn secondary_pow_scaling(height: u64, diff_data: &Vec<HeaderInfo>) -> u32 {
);
let scale = scale_sum * target_pct / adj_count;
max(1, min(scale, MAX_SECONDARY_SCALING)) as u32
// minimum of 3 avoids getting stuck due to dampening
max(3, min(scale, MAX_SECONDARY_SCALING)) as u32
}
/// Consensus rule that collections of items are sorted lexicographically.

View file

@ -21,7 +21,7 @@ use std::{fmt, iter};
use rand::{thread_rng, Rng};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use consensus::{graph_weight, SECOND_POW_EDGE_BITS};
use consensus::{graph_weight, SECOND_POW_EDGE_BITS, MIN_DIFFICULTY};
use core::hash::Hashed;
use global;
use ser::{self, Readable, Reader, Writeable, Writer};
@ -70,9 +70,14 @@ impl Difficulty {
Difficulty { num: 1 }
}
/// Difficulty of MIN_DIFFICULTY
pub fn min() -> Difficulty {
Difficulty { num: MIN_DIFFICULTY }
}
/// Difficulty unit, which is the graph weight of minimal graph
pub fn unit() -> Difficulty {
Difficulty { num: global::initial_graph_weight() }
Difficulty { num: global::initial_graph_weight() as u64 }
}
/// Convert a `u32` into a `Difficulty`

View file

@ -381,56 +381,33 @@ fn adjustment_scenarios() {
fn next_target_adjustment() {
global::set_mining_mode(global::ChainTypes::AutomatedTesting);
let cur_time = Utc::now().timestamp() as u64;
let diff_one = Difficulty::one();
let diff_unit = Difficulty::unit();
assert_eq!(
next_difficulty(1, vec![HeaderInfo::from_ts_diff(cur_time, diff_one)]),
HeaderInfo::from_diff_scaling(diff_one, diff_unit),
);
assert_eq!(
next_difficulty(1, vec![HeaderInfo::new(cur_time, diff_one, 10, true)]),
HeaderInfo::from_diff_scaling(diff_one, 1),
);
let diff_min = Difficulty::min();
let mut hi = HeaderInfo::from_diff_scaling(diff_one, 1);
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(diff_one, 1),
);
hi.is_secondary = true;
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(diff_one, 1),
);
hi.secondary_scaling = 100;
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)),
HeaderInfo::from_diff_scaling(diff_one, 96),
);
// Check we don't get stuck on difficulty <= MIN_DIFFICULTY (at 4x faster blocks at least)
let mut hi = HeaderInfo::from_diff_scaling(diff_min, MIN_DIFFICULTY as u32);
hi.is_secondary = false;
let hinext = next_difficulty(1, repeat(15, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None));
assert_ne!(hinext.difficulty, diff_min);
// Check we don't get stuck on difficulty 1
let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 1);
assert_ne!(
next_difficulty(1, repeat(1, hi.clone(), DIFFICULTY_ADJUST_WINDOW, None)).difficulty,
diff_one
);
// Check we don't get stuck on scale MIN_DIFFICULTY, when primary frequency is too high
assert_ne!(hinext.secondary_scaling, MIN_DIFFICULTY as u32);
// just enough data, right interval, should stay constant
let just_enough = DIFFICULTY_ADJUST_WINDOW + 1;
hi.difficulty = Difficulty::from_num(1000);
hi.difficulty = Difficulty::from_num(10000);
assert_eq!(
next_difficulty(1, repeat(60, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1000)
next_difficulty(1, repeat(BLOCK_TIME_SEC, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(10000)
);
// checking averaging works
hi.difficulty = Difficulty::from_num(500);
let sec = DIFFICULTY_ADJUST_WINDOW / 2;
let mut s1 = repeat(60, hi.clone(), sec, Some(cur_time));
let mut s1 = repeat(BLOCK_TIME_SEC, hi.clone(), sec, Some(cur_time));
let mut s2 = repeat_offs(
cur_time + (sec * 60) as u64,
60,
cur_time + (sec * BLOCK_TIME_SEC) as u64,
BLOCK_TIME_SEC,
1500,
DIFFICULTY_ADJUST_WINDOW / 2,
);
@ -460,16 +437,16 @@ fn next_target_adjustment() {
next_difficulty(1, repeat(45, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1090)
);
assert_eq!(
next_difficulty(1, repeat(30, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1200)
);
// hitting lower time bound, should always get the same result below
assert_eq!(
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1500)
);
assert_eq!(
next_difficulty(1, repeat(0, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1500)
);
// hitting higher time bound, should always get the same result above
assert_eq!(
@ -481,11 +458,11 @@ fn next_target_adjustment() {
Difficulty::from_num(500)
);
// We should never drop below 1
// We should never drop below minimum
hi.difficulty = Difficulty::zero();
assert_eq!(
next_difficulty(1, repeat(90, hi.clone(), just_enough, None)).difficulty,
Difficulty::from_num(1)
Difficulty::min()
);
}
@ -496,6 +473,7 @@ fn secondary_pow_scale() {
// all primary, factor should increase so it becomes easier to find a high
// difficulty block
hi.is_secondary = false;
assert_eq!(
secondary_pow_scaling(1, &(0..window).map(|_| hi.clone()).collect()),
147
@ -512,14 +490,15 @@ fn secondary_pow_scale() {
49
);
// same as above, testing lowest bound
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 3);
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_DIFFICULTY as u32);
low_hi.is_secondary = true;
assert_eq!(
secondary_pow_scaling(890_000, &(0..window).map(|_| low_hi.clone()).collect()),
1
MIN_DIFFICULTY as u32
);
// just about the right ratio, also no longer playing with median
let primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50);
let mut primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50);
primary_hi.is_secondary = false;
assert_eq!(
secondary_pow_scaling(
1,
@ -530,7 +509,7 @@ fn secondary_pow_scale() {
),
94
);
// 95% secondary, should come down based on 100 median
// 95% secondary, should come down based on 97.5 average
assert_eq!(
secondary_pow_scaling(
1,
@ -541,7 +520,7 @@ fn secondary_pow_scale() {
),
94
);
// 40% secondary, should come up based on 50 median
// 40% secondary, should come up based on 70 average
assert_eq!(
secondary_pow_scaling(
1,