mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
[WIP] Difficulty Adjustment Updates (#651)
* large updates for mining, fix async mode, will list changes in PR * reset config and build defaults * change to difficulty calculations * tweaking mining params and tests * tweaking to tests * including pre-genesis data for difficulty adjustments, adding adjustment scenario tests * further clarifying next_difficulty function * moving tests out of consensus.rs * pow test fix * changing pre-genesis generation
This commit is contained in:
parent
84128964fa
commit
783e4c250d
14 changed files with 441 additions and 233 deletions
|
@ -210,7 +210,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
|
||||
// explicit check to ensure we are not below the minimum difficulty
|
||||
// we will also check difficulty based on next_difficulty later on
|
||||
if header.difficulty < Difficulty::minimum() {
|
||||
if header.difficulty < Difficulty::one() {
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
|
||||
|
|
|
@ -231,11 +231,8 @@ impl Iterator for DifficultyIter {
|
|||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let bhe = self.store.get_block_header(&self.next);
|
||||
match bhe {
|
||||
Err(e) => Some(Err(TargetError(e.to_string()))),
|
||||
Err(_) => None,
|
||||
Ok(bh) => {
|
||||
if bh.height == 0 {
|
||||
return None;
|
||||
}
|
||||
self.next = bh.previous;
|
||||
Some(Ok((bh.timestamp.to_timespec().sec as u64, bh.difficulty)))
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ fn test_various_store_indices() {
|
|||
vec![],
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum()
|
||||
Difficulty::one()
|
||||
).unwrap();
|
||||
let block_hash = block.hash();
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ fn test_coinbase_maturity() {
|
|||
vec![],
|
||||
&keychain,
|
||||
&key_id1,
|
||||
Difficulty::minimum()
|
||||
Difficulty::one()
|
||||
).unwrap();
|
||||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
|
@ -131,7 +131,7 @@ fn test_coinbase_maturity() {
|
|||
vec![&coinbase_txn],
|
||||
&keychain,
|
||||
&key_id3,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
|
@ -163,7 +163,7 @@ fn test_coinbase_maturity() {
|
|||
vec![],
|
||||
&keychain,
|
||||
&pk,
|
||||
Difficulty::minimum()
|
||||
Difficulty::one()
|
||||
).unwrap();
|
||||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
|
@ -197,7 +197,7 @@ fn test_coinbase_maturity() {
|
|||
vec![&coinbase_txn],
|
||||
&keychain,
|
||||
&key_id4,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
|
||||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
// Copyright 2016 The Grin Developers
|
||||
//
|
||||
// Copyright 2018 The Grin Developers
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
|
@ -23,6 +22,7 @@ use std::fmt;
|
|||
use std::cmp::max;
|
||||
|
||||
use core::target::Difficulty;
|
||||
use global;
|
||||
|
||||
/// A grin is divisible to 10^9, following the SI prefixes
|
||||
pub const GRIN_BASE: u64 = 1_000_000_000;
|
||||
|
@ -128,23 +128,33 @@ pub fn valid_header_version(height: u64, version: u16) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// The minimum mining difficulty we'll allow
|
||||
pub const MINIMUM_DIFFICULTY: u64 = 1;
|
||||
|
||||
/// Time window in blocks to calculate block time median
|
||||
pub const MEDIAN_TIME_WINDOW: u64 = 11;
|
||||
|
||||
/// Index at half the desired median
|
||||
pub const MEDIAN_TIME_INDEX: u64 = MEDIAN_TIME_WINDOW / 2;
|
||||
|
||||
/// Number of blocks used to calculate difficulty adjustments
|
||||
pub const DIFFICULTY_ADJUST_WINDOW: u64 = 23;
|
||||
pub const DIFFICULTY_ADJUST_WINDOW: u64 = 60;
|
||||
|
||||
/// Average time span of the difficulty adjustment window
|
||||
pub const BLOCK_TIME_WINDOW: u64 = DIFFICULTY_ADJUST_WINDOW * BLOCK_TIME_SEC;
|
||||
|
||||
/// Maximum size time window used for difficulty adjustments
|
||||
pub const UPPER_TIME_BOUND: u64 = BLOCK_TIME_WINDOW * 4 / 3;
|
||||
pub const UPPER_TIME_BOUND: u64 = BLOCK_TIME_WINDOW * 2;
|
||||
|
||||
/// Minimum size time window used for difficulty adjustments
|
||||
pub const LOWER_TIME_BOUND: u64 = BLOCK_TIME_WINDOW * 5 / 6;
|
||||
pub const LOWER_TIME_BOUND: u64 = BLOCK_TIME_WINDOW / 2;
|
||||
|
||||
/// Dampening factor to use for difficulty adjustment
|
||||
pub const DAMP_FACTOR: u64 = 3;
|
||||
|
||||
/// The initial difficulty at launch. This should be over-estimated
|
||||
/// and difficulty should come down at launch rather than up
|
||||
/// Currently grossly over-estimated at 10% of current
|
||||
/// ethereum GPUs (assuming 1GPU can solve a block at diff 1
|
||||
/// in one block interval)
|
||||
pub const INITIAL_DIFFICULTY: u64 = 1_000_000;
|
||||
|
||||
/// Consensus errors
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -171,57 +181,58 @@ impl fmt::Display for TargetError {
|
|||
/// The difficulty calculation is based on both Digishield and GravityWave
|
||||
/// family of difficulty computation, coming to something very close to Zcash.
|
||||
/// The refence difficulty is an average of the difficulty over a window of
|
||||
/// 23 blocks. The corresponding timespan is calculated by using the
|
||||
/// difference between the median timestamps at the beginning and the end
|
||||
/// of the window.
|
||||
/// DIFFICULTY_ADJUST_WINDOW blocks. The corresponding timespan is calculated
|
||||
/// by using the difference between the median timestamps at the beginning
|
||||
/// and the end of the window.
|
||||
pub fn next_difficulty<T>(cursor: T) -> Result<Difficulty, TargetError>
|
||||
where
|
||||
T: IntoIterator<Item = Result<(u64, Difficulty), TargetError>>,
|
||||
{
|
||||
// Block times at the begining and end of the adjustment window, used to
|
||||
// calculate medians later.
|
||||
let mut window_begin = vec![];
|
||||
let mut window_end = vec![];
|
||||
// Create vector of difficulty data running from earliest
|
||||
// to latest, and pad with simulated pre-genesis data to allow earlier
|
||||
// adjustment if there isn't enough window data
|
||||
// length will be DIFFICULTY_ADJUST_WINDOW+MEDIAN_TIME_WINDOW
|
||||
let diff_data = global::difficulty_data_to_vector(cursor);
|
||||
// Get the difficulty sum for averaging later
|
||||
// Which in this case is the sum of the last
|
||||
// DIFFICULTY_ADJUST_WINDOW elements
|
||||
let diff_sum = diff_data.iter()
|
||||
.skip(MEDIAN_TIME_WINDOW as usize)
|
||||
.take(DIFFICULTY_ADJUST_WINDOW as usize)
|
||||
.fold(Difficulty::zero(), |sum, d| sum + d.clone().unwrap().1);
|
||||
|
||||
// Sum of difficulties in the window, used to calculate the average later.
|
||||
let mut diff_sum = Difficulty::zero();
|
||||
// Obtain the median window for the earlier time period
|
||||
// which is just the first MEDIAN_TIME_WINDOW elements
|
||||
let mut window_earliest: Vec<u64> = diff_data.iter()
|
||||
.take(MEDIAN_TIME_WINDOW as usize)
|
||||
.map(|n| n.clone().unwrap().0)
|
||||
.collect();
|
||||
|
||||
// Enumerating backward over blocks
|
||||
for (n, head_info) in cursor.into_iter().enumerate() {
|
||||
let m = n as u64;
|
||||
let (ts, diff) = head_info?;
|
||||
// Obtain the median window for the latest time period
|
||||
// i.e. the last MEDIAN_TIME_WINDOW elements
|
||||
let mut window_latest: Vec<u64> = diff_data.iter()
|
||||
.skip(DIFFICULTY_ADJUST_WINDOW as usize)
|
||||
.map(|n| n.clone().unwrap().0)
|
||||
.collect();
|
||||
|
||||
// Sum each element in the adjustment window. In addition, retain
|
||||
// timestamps within median windows (at ]start;start-11] and ]end;end-11]
|
||||
// to later calculate medians.
|
||||
if m < DIFFICULTY_ADJUST_WINDOW {
|
||||
diff_sum = diff_sum + diff;
|
||||
// And obtain our median values
|
||||
window_earliest.sort();
|
||||
window_latest.sort();
|
||||
let latest_ts = window_latest[MEDIAN_TIME_INDEX as usize];
|
||||
let earliest_ts = window_earliest[MEDIAN_TIME_INDEX as usize];
|
||||
|
||||
if m < MEDIAN_TIME_WINDOW {
|
||||
window_begin.push(ts);
|
||||
}
|
||||
} else if m < DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW {
|
||||
window_end.push(ts);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Calculate the average difficulty
|
||||
let diff_avg = diff_sum.into_num() /
|
||||
Difficulty::from_num(DIFFICULTY_ADJUST_WINDOW).into_num();
|
||||
|
||||
// Check we have enough blocks
|
||||
if window_end.len() < (MEDIAN_TIME_WINDOW as usize) {
|
||||
return Ok(Difficulty::minimum());
|
||||
}
|
||||
// Actual undampened time delta
|
||||
let ts_delta = latest_ts - earliest_ts;
|
||||
|
||||
// Calculating time medians at the beginning and end of the window.
|
||||
window_begin.sort();
|
||||
window_end.sort();
|
||||
let begin_ts = window_begin[window_begin.len() / 2];
|
||||
let end_ts = window_end[window_end.len() / 2];
|
||||
|
||||
// Average difficulty and dampened average time
|
||||
let diff_avg = diff_sum.into_num() as f64 /
|
||||
Difficulty::from_num(DIFFICULTY_ADJUST_WINDOW).into_num() as f64;
|
||||
let ts_damp = (3 * BLOCK_TIME_WINDOW + (begin_ts - end_ts)) / 4;
|
||||
// Apply dampening
|
||||
let ts_damp = match diff_avg {
|
||||
n if n >= DAMP_FACTOR => ((DAMP_FACTOR-1) * BLOCK_TIME_WINDOW + ts_delta) / DAMP_FACTOR,
|
||||
_ => ts_delta,
|
||||
};
|
||||
|
||||
// Apply time bounds
|
||||
let adj_ts = if ts_damp < LOWER_TIME_BOUND {
|
||||
|
@ -233,11 +244,10 @@ where
|
|||
};
|
||||
|
||||
let difficulty =
|
||||
diff_avg * Difficulty::from_num(BLOCK_TIME_WINDOW).into_num() as f64
|
||||
/ Difficulty::from_num(adj_ts).into_num() as f64;
|
||||
// All this ceil and f64 business is so that difficulty can always adjust
|
||||
// for smaller numbers < 10
|
||||
Ok(max(Difficulty::from_num(difficulty.ceil() as u64), Difficulty::minimum()))
|
||||
diff_avg * Difficulty::from_num(BLOCK_TIME_WINDOW).into_num()
|
||||
/ Difficulty::from_num(adj_ts).into_num();
|
||||
|
||||
Ok(max(Difficulty::from_num(difficulty), Difficulty::one()))
|
||||
}
|
||||
|
||||
/// Consensus rule that collections of items are sorted lexicographically.
|
||||
|
@ -245,143 +255,3 @@ pub trait VerifySortOrder<T> {
|
|||
/// Verify a collection of items is sorted as required.
|
||||
fn verify_sort_order(&self) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use std;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use core::target::Difficulty;
|
||||
|
||||
use super::*;
|
||||
|
||||
// Builds an iterator for next difficulty calculation with the provided
|
||||
// constant time interval, difficulty and total length.
|
||||
fn repeat(interval: u64, diff: u64, len: u64) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
// watch overflow here, length shouldn't be ridiculous anyhow
|
||||
assert!(len < std::usize::MAX as u64);
|
||||
let diffs = vec![Difficulty::from_num(diff); len as usize];
|
||||
let times = (0..(len as usize)).map(|n| n * interval as usize).rev();
|
||||
let pairs = times.zip(diffs.iter());
|
||||
pairs
|
||||
.map(|(t, d)| Ok((t as u64, d.clone())))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn repeat_offs(
|
||||
from: u64,
|
||||
interval: u64,
|
||||
diff: u64,
|
||||
len: u64,
|
||||
) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
map_vec!(repeat(interval, diff, len), |e| match e.clone() {
|
||||
Err(e) => Err(e),
|
||||
Ok((t, d)) => Ok((t + from, d)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks different next_target adjustments and difficulty boundaries
|
||||
#[test]
|
||||
fn next_target_adjustment() {
|
||||
// not enough data
|
||||
assert_eq!(
|
||||
next_difficulty(vec![]).unwrap(),
|
||||
Difficulty::from_num(MINIMUM_DIFFICULTY)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
next_difficulty(vec![Ok((60, Difficulty::one()))]).unwrap(),
|
||||
Difficulty::from_num(MINIMUM_DIFFICULTY)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(60, 10, DIFFICULTY_ADJUST_WINDOW)).unwrap(),
|
||||
Difficulty::from_num(MINIMUM_DIFFICULTY)
|
||||
);
|
||||
|
||||
// just enough data, right interval, should stay constant
|
||||
|
||||
let just_enough = DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW;
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(60, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1000)
|
||||
);
|
||||
|
||||
// checking averaging works, window length is odd so need to compensate a little
|
||||
let sec = DIFFICULTY_ADJUST_WINDOW / 2 + 1 + MEDIAN_TIME_WINDOW;
|
||||
let mut s1 = repeat(60, 500, sec);
|
||||
let mut s2 = repeat_offs((sec * 60) as u64, 60, 1545, DIFFICULTY_ADJUST_WINDOW / 2);
|
||||
s2.append(&mut s1);
|
||||
assert_eq!(next_difficulty(s2).unwrap(), Difficulty::from_num(1000));
|
||||
|
||||
// too slow, diff goes down
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(90, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(890)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(120, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(800)
|
||||
);
|
||||
|
||||
// too fast, diff goes up
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(55, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1022)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(45, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1068)
|
||||
);
|
||||
|
||||
// hitting lower time bound, should always get the same result below
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(20, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1200)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(10, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1200)
|
||||
);
|
||||
|
||||
// hitting higher time bound, should always get the same result above
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(160, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(750)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(200, 1000, just_enough)).unwrap(),
|
||||
Difficulty::from_num(750)
|
||||
);
|
||||
|
||||
// We should never drop below MINIMUM_DIFFICULTY (1)
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(90, 0, just_enough)).unwrap(),
|
||||
Difficulty::from_num(1)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hard_fork_1() {
|
||||
assert!(valid_header_version(0, 1));
|
||||
assert!(valid_header_version(10, 1));
|
||||
assert!(!valid_header_version(10, 2));
|
||||
assert!(valid_header_version(250_000, 1));
|
||||
assert!(!valid_header_version(250_001, 1));
|
||||
assert!(!valid_header_version(500_000, 1));
|
||||
assert!(!valid_header_version(250_001, 2));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn hard_fork_2() {
|
||||
// assert!(valid_header_version(0, 1));
|
||||
// assert!(valid_header_version(10, 1));
|
||||
// assert!(valid_header_version(10, 2));
|
||||
// assert!(valid_header_version(250_000, 1));
|
||||
// assert!(!valid_header_version(250_001, 1));
|
||||
// assert!(!valid_header_version(500_000, 1));
|
||||
// assert!(valid_header_version(250_001, 2));
|
||||
// assert!(valid_header_version(500_000, 2));
|
||||
// assert!(!valid_header_version(500_001, 2));
|
||||
// }
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ use core::{
|
|||
COINBASE_OUTPUT
|
||||
};
|
||||
use consensus;
|
||||
use consensus::{exceeds_weight, reward, MINIMUM_DIFFICULTY, REWARD, VerifySortOrder};
|
||||
use consensus::{exceeds_weight, reward, REWARD, VerifySortOrder};
|
||||
use core::hash::{Hash, Hashed, ZERO_HASH};
|
||||
use core::id::ShortIdentifiable;
|
||||
use core::target::Difficulty;
|
||||
|
@ -140,8 +140,8 @@ impl Default for BlockHeader {
|
|||
height: 0,
|
||||
previous: ZERO_HASH,
|
||||
timestamp: time::at_utc(time::Timespec { sec: 0, nsec: 0 }),
|
||||
difficulty: Difficulty::from_num(MINIMUM_DIFFICULTY),
|
||||
total_difficulty: Difficulty::from_num(MINIMUM_DIFFICULTY),
|
||||
difficulty: Difficulty::one(),
|
||||
total_difficulty: Difficulty::one(),
|
||||
utxo_root: ZERO_HASH,
|
||||
range_proof_root: ZERO_HASH,
|
||||
kernel_root: ZERO_HASH,
|
||||
|
@ -851,7 +851,7 @@ mod test {
|
|||
txs,
|
||||
keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum()
|
||||
Difficulty::one()
|
||||
).unwrap()
|
||||
}
|
||||
|
||||
|
|
|
@ -396,7 +396,7 @@ mod test {
|
|||
vec![],
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
b.cut_through().validate().unwrap();
|
||||
}
|
||||
|
@ -414,7 +414,7 @@ mod test {
|
|||
vec![&mut tx1],
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
b.cut_through().validate().unwrap();
|
||||
}
|
||||
|
@ -432,7 +432,7 @@ mod test {
|
|||
vec![&mut tx1, &mut tx2],
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
b.validate().unwrap();
|
||||
}
|
||||
|
@ -463,7 +463,7 @@ mod test {
|
|||
vec![&tx1],
|
||||
&keychain,
|
||||
&key_id3.clone(),
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
b.validate().unwrap();
|
||||
|
||||
|
@ -484,7 +484,7 @@ mod test {
|
|||
vec![&tx1],
|
||||
&keychain,
|
||||
&key_id3.clone(),
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
match b.validate() {
|
||||
Err(KernelLockHeight(height)) => {
|
||||
|
|
|
@ -25,10 +25,8 @@ use std::ops::{Add, Div, Mul, Sub};
|
|||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use byteorder::{BigEndian, ByteOrder};
|
||||
|
||||
use consensus;
|
||||
use core::hash::Hash;
|
||||
use ser::{self, Readable, Reader, Writeable, Writer};
|
||||
use util::logger::LOGGER;
|
||||
use core::global;
|
||||
|
||||
|
||||
|
@ -52,11 +50,6 @@ impl Difficulty {
|
|||
Difficulty { num: 1 }
|
||||
}
|
||||
|
||||
/// Minimum difficulty according to our consensus rules.
|
||||
pub fn minimum() -> Difficulty {
|
||||
Difficulty { num: consensus::MINIMUM_DIFFICULTY }
|
||||
}
|
||||
|
||||
/// Convert a `u32` into a `Difficulty`
|
||||
pub fn from_num(num: u64) -> Difficulty {
|
||||
Difficulty { num: num }
|
||||
|
@ -70,7 +63,6 @@ impl Difficulty {
|
|||
let mut in_vec = h.to_vec();
|
||||
in_vec.truncate(8);
|
||||
let num = BigEndian::read_u64(&in_vec);
|
||||
trace!(LOGGER, "Calculated difficulty: {}", max_target as f64 / num as f64);
|
||||
Difficulty { num: max_target / num }
|
||||
}
|
||||
|
||||
|
|
|
@ -89,6 +89,9 @@ pub fn genesis_testnet2() -> core::Block {
|
|||
tm_hour: 20,
|
||||
..time::empty_tm()
|
||||
},
|
||||
//TODO: Check this is over-estimated at T2 launch
|
||||
difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: 70081,
|
||||
pow: core::Proof::new(vec![0x43ee48, 0x18d5a49, 0x2b76803, 0x3181a29, 0x39d6a8a, 0x39ef8d8,
|
||||
0x478a0fb, 0x69c1f9e, 0x6da4bca, 0x6f8782c, 0x9d842d7, 0xa051397,
|
||||
|
@ -118,8 +121,8 @@ pub fn genesis_main() -> core::Block {
|
|||
tm_mday: 14,
|
||||
..time::empty_tm()
|
||||
},
|
||||
difficulty: Difficulty::from_num(1000),
|
||||
total_difficulty: Difficulty::from_num(1000),
|
||||
difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: global::get_genesis_nonce(),
|
||||
pow: core::Proof::zero(consensus::PROOFSIZE),
|
||||
..Default::default()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2017 The Grin Developers
|
||||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -25,6 +25,10 @@ use std::sync::RwLock;
|
|||
use consensus::PROOFSIZE;
|
||||
use consensus::DEFAULT_SIZESHIFT;
|
||||
use consensus::COINBASE_MATURITY;
|
||||
use consensus::{MEDIAN_TIME_WINDOW, INITIAL_DIFFICULTY,
|
||||
BLOCK_TIME_SEC, DIFFICULTY_ADJUST_WINDOW};
|
||||
use core::target::Difficulty;
|
||||
use consensus::TargetError;
|
||||
|
||||
/// Define these here, as they should be developer-set, not really tweakable
|
||||
/// by users
|
||||
|
@ -47,6 +51,14 @@ pub const AUTOMATED_TESTING_COINBASE_MATURITY: u64 = 3;
|
|||
/// User testing coinbase maturity
|
||||
pub const USER_TESTING_COINBASE_MATURITY: u64 = 3;
|
||||
|
||||
/// Testing initial block difficulty
|
||||
pub const TESTING_INITIAL_DIFFICULTY: u64 = 1;
|
||||
|
||||
/// Testing initial block difficulty, testnet 2
|
||||
/// we want to overestimate here as well
|
||||
/// Setting to 1 for development, but should be 1000 at T2 launch
|
||||
pub const TESTNET2_INITIAL_DIFFICULTY: u64 = 1;
|
||||
|
||||
/// The target is the 32-bytes hash block hashes must be lower than.
|
||||
pub const MAX_PROOF_TARGET: [u8; 8] = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff];
|
||||
|
||||
|
@ -139,6 +151,18 @@ pub fn max_proof_target() -> [u8; 8] {
|
|||
}
|
||||
}
|
||||
|
||||
/// Initial mining difficulty
|
||||
pub fn initial_block_difficulty() -> u64 {
|
||||
let param_ref = CHAIN_TYPE.read().unwrap();
|
||||
match *param_ref {
|
||||
ChainTypes::AutomatedTesting => TESTING_INITIAL_DIFFICULTY,
|
||||
ChainTypes::UserTesting => TESTING_INITIAL_DIFFICULTY,
|
||||
ChainTypes::Testnet1 => TESTING_INITIAL_DIFFICULTY,
|
||||
ChainTypes::Testnet2 => TESTNET2_INITIAL_DIFFICULTY,
|
||||
ChainTypes::Mainnet => INITIAL_DIFFICULTY,
|
||||
}
|
||||
}
|
||||
|
||||
/// Are we in automated testing mode?
|
||||
pub fn is_automated_testing_mode() -> bool {
|
||||
let param_ref = CHAIN_TYPE.read().unwrap();
|
||||
|
@ -162,8 +186,7 @@ pub fn is_production_mode() -> bool {
|
|||
/// Helper function to get a nonce known to create a valid POW on
|
||||
/// the genesis block, to prevent it taking ages. Should be fine for now
|
||||
/// as the genesis block POW solution turns out to be the same for every new
|
||||
/// block chain
|
||||
/// at the moment
|
||||
/// block chain at the moment
|
||||
pub fn get_genesis_nonce() -> u64 {
|
||||
let param_ref = CHAIN_TYPE.read().unwrap();
|
||||
match *param_ref {
|
||||
|
@ -176,3 +199,57 @@ pub fn get_genesis_nonce() -> u64 {
|
|||
_ => panic!("Pre-set"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an iterator of block difficulty data to more a more mangeable vector and pads
|
||||
/// if needed (which will) only be needed for the first few blocks after genesis
|
||||
|
||||
pub fn difficulty_data_to_vector<T>(cursor: T) -> Vec<Result<(u64, Difficulty), TargetError>>
|
||||
where
|
||||
T: IntoIterator<Item = Result<(u64, Difficulty), TargetError>> {
|
||||
// Convert iterator to vector, so we can append to it if necessary
|
||||
let needed_block_count = (MEDIAN_TIME_WINDOW + DIFFICULTY_ADJUST_WINDOW) as usize;
|
||||
let mut last_n: Vec<Result<(u64, Difficulty), TargetError>> = cursor.into_iter()
|
||||
.take(needed_block_count)
|
||||
.collect();
|
||||
|
||||
// Sort blocks from earliest to latest (to keep conceptually easier)
|
||||
last_n.reverse();
|
||||
// Only needed just after blockchain launch... basically ensures there's
|
||||
// always enough data by simulating perfectly timed pre-genesis
|
||||
// blocks at the genesis difficulty as needed.
|
||||
let block_count_difference = needed_block_count - last_n.len();
|
||||
if block_count_difference > 0 {
|
||||
// Collect any real data we have
|
||||
let mut live_intervals:Vec<(u64, Difficulty)> = last_n.iter()
|
||||
.map(|b| (b.clone().unwrap().0, b.clone().unwrap().1))
|
||||
.collect();
|
||||
for i in (1..live_intervals.len()).rev() {
|
||||
live_intervals[i].0=live_intervals[i].0-live_intervals[i-1].0;
|
||||
}
|
||||
//
|
||||
// Remove genesis "interval"
|
||||
if live_intervals.len() > 1 {
|
||||
live_intervals.remove(0);
|
||||
} else {
|
||||
//if it's just genesis, adjust the interval
|
||||
live_intervals[0].0 = BLOCK_TIME_SEC;
|
||||
}
|
||||
let mut interval_index = live_intervals.len() - 1;
|
||||
let mut last_ts = last_n.first().as_ref().unwrap().as_ref().unwrap().0;
|
||||
// fill in simulated blocks, repeating whatever pattern we've obtained from
|
||||
// real data
|
||||
// if we have, say, 15 blocks so far with intervals of I1..I15, then
|
||||
// the 71-15=56 pre genesis blocks will have
|
||||
// intervals/difficulties I1..I15 I1..I15 I1..I15 I1..I11
|
||||
for _ in 0..block_count_difference {
|
||||
last_ts = last_ts - live_intervals[interval_index].0;
|
||||
let last_diff = &live_intervals[interval_index].1;
|
||||
last_n.insert(0, Ok((last_ts, last_diff.clone())));
|
||||
interval_index = match interval_index {
|
||||
0 => live_intervals.len()-1,
|
||||
_ => interval_index - 1,
|
||||
};
|
||||
}
|
||||
}
|
||||
last_n
|
||||
}
|
||||
|
|
270
core/tests/consensus.rs
Normal file
270
core/tests/consensus.rs
Normal file
|
@ -0,0 +1,270 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! core consensus.rs tests (separated to de-clutter consensus.rs)
|
||||
#[macro_use]
|
||||
extern crate grin_core as core;
|
||||
extern crate time;
|
||||
|
||||
use core::core::target::Difficulty;
|
||||
use core::global;
|
||||
use core::consensus::*;
|
||||
|
||||
// Builds an iterator for next difficulty calculation with the provided
|
||||
// constant time interval, difficulty and total length.
|
||||
fn repeat(interval: u64, diff: u64, len: u64, cur_time:Option<u64>) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
let cur_time = match cur_time {
|
||||
Some(t) => t,
|
||||
None => time::get_time().sec as u64,
|
||||
};
|
||||
// watch overflow here, length shouldn't be ridiculous anyhow
|
||||
assert!(len < std::usize::MAX as u64);
|
||||
let diffs = vec![Difficulty::from_num(diff); len as usize];
|
||||
let times = (0..(len as usize)).map(|n| n * interval as usize).rev();
|
||||
let pairs = times.zip(diffs.iter());
|
||||
pairs
|
||||
.map(|(t, d)| Ok((cur_time + t as u64, d.clone())))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
// Creates a new chain with a genesis at a simulated difficulty
|
||||
fn create_chain_sim(diff: u64) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
vec![Ok((time::get_time().sec as u64, Difficulty::from_num(diff)))]
|
||||
}
|
||||
|
||||
// Adds another 'block' to the iterator, so to speak, with difficulty calculated
|
||||
// from the difficulty adjustment at interval seconds from the previous block
|
||||
fn add_block(interval: u64, chain_sim: Vec<Result<(u64, Difficulty), TargetError>>)
|
||||
-> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
let mut return_chain = chain_sim.clone();
|
||||
// get last interval
|
||||
let last_elem = chain_sim.first().as_ref().unwrap().as_ref().unwrap();
|
||||
return_chain.insert(0, Ok((last_elem.0+interval, last_elem.clone().1)));
|
||||
let diff = next_difficulty(return_chain.clone()).unwrap();
|
||||
return_chain[0]=Ok((last_elem.0+interval, diff));
|
||||
return_chain
|
||||
}
|
||||
|
||||
// Adds another n 'blocks' to the iterator, with difficulty calculated
|
||||
fn add_block_repeated(interval: u64, chain_sim: Vec<Result<(u64, Difficulty), TargetError>>, iterations: usize)
|
||||
-> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
let mut return_chain = chain_sim.clone();
|
||||
for _ in 0..iterations {
|
||||
return_chain = add_block(interval, return_chain.clone());
|
||||
}
|
||||
return_chain
|
||||
}
|
||||
|
||||
// Prints the contents of the iterator and its difficulties.. useful for tweaking
|
||||
fn print_chain_sim(chain_sim: &Vec<Result<(u64, Difficulty), TargetError>>) {
|
||||
let mut chain_sim=chain_sim.clone();
|
||||
chain_sim.reverse();
|
||||
let mut last_time=0;
|
||||
chain_sim.iter()
|
||||
.enumerate()
|
||||
.for_each(|(i, b)| {
|
||||
let block = b.as_ref().unwrap();
|
||||
println!("Height: {}, Time: {}, Interval: {}, Next network difficulty:{}",
|
||||
i, block.0, block.0-last_time, block.1);
|
||||
last_time=block.0;
|
||||
});
|
||||
}
|
||||
|
||||
fn repeat_offs(
|
||||
from: u64,
|
||||
interval: u64,
|
||||
diff: u64,
|
||||
len: u64,
|
||||
) -> Vec<Result<(u64, Difficulty), TargetError>> {
|
||||
map_vec!(repeat(interval, diff, len, Some(from)), |e| match e.clone() {
|
||||
Err(e) => Err(e),
|
||||
Ok((t, d)) => Ok((t, d)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks different next_target adjustments and difficulty boundaries
|
||||
#[test]
|
||||
fn adjustment_scenarios() {
|
||||
// Use production parameters for genesis diff
|
||||
global::set_mining_mode(global::ChainTypes::Mainnet);
|
||||
|
||||
// Genesis block with initial diff
|
||||
let chain_sim = create_chain_sim(global::initial_block_difficulty());
|
||||
// Scenario 1) Hash power is massively over estimated, first block takes an hour
|
||||
let chain_sim = add_block_repeated(3600, chain_sim, 2);
|
||||
let chain_sim = add_block_repeated(1800, chain_sim, 2);
|
||||
let chain_sim = add_block_repeated(900, chain_sim, 10);
|
||||
|
||||
println!("*********************************************************");
|
||||
println!("Scenario 1) Grossly over-estimated genesis difficulty ");
|
||||
println!("*********************************************************");
|
||||
print_chain_sim(&chain_sim);
|
||||
println!("*********************************************************");
|
||||
|
||||
// Under-estimated difficulty
|
||||
let chain_sim = create_chain_sim(global::initial_block_difficulty());
|
||||
let chain_sim = add_block_repeated(1, chain_sim, 5);
|
||||
let chain_sim = add_block_repeated(20, chain_sim, 5);
|
||||
|
||||
println!("*********************************************************");
|
||||
println!("Scenario 2) Grossly under-estimated genesis difficulty ");
|
||||
println!("*********************************************************");
|
||||
print_chain_sim(&chain_sim);
|
||||
println!("*********************************************************");
|
||||
let just_enough = (DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW) as usize;
|
||||
|
||||
// Steady difficulty for a good while, then a sudden drop
|
||||
let chain_sim = create_chain_sim(global::initial_block_difficulty());
|
||||
let chain_sim = add_block_repeated(10, chain_sim, just_enough as usize);
|
||||
let chain_sim = add_block_repeated(600, chain_sim, 10);
|
||||
|
||||
println!("");
|
||||
println!("*********************************************************");
|
||||
println!("Scenario 3) Sudden drop in hashpower");
|
||||
println!("*********************************************************");
|
||||
print_chain_sim(&chain_sim);
|
||||
println!("*********************************************************");
|
||||
|
||||
// Sudden increase
|
||||
let chain_sim = create_chain_sim(global::initial_block_difficulty());
|
||||
let chain_sim = add_block_repeated(60, chain_sim, just_enough as usize);
|
||||
let chain_sim = add_block_repeated(10, chain_sim, 10);
|
||||
|
||||
println!("");
|
||||
println!("*********************************************************");
|
||||
println!("Scenario 4) Sudden increase in hashpower");
|
||||
println!("*********************************************************");
|
||||
print_chain_sim(&chain_sim);
|
||||
println!("*********************************************************");
|
||||
|
||||
// Oscillations
|
||||
let chain_sim = create_chain_sim(global::initial_block_difficulty());
|
||||
let chain_sim = add_block_repeated(60, chain_sim, just_enough as usize);
|
||||
let chain_sim = add_block_repeated(10, chain_sim, 10);
|
||||
let chain_sim = add_block_repeated(60, chain_sim, 20);
|
||||
let chain_sim = add_block_repeated(10, chain_sim, 10);
|
||||
|
||||
println!("");
|
||||
println!("*********************************************************");
|
||||
println!("Scenario 5) Oscillations in hashpower");
|
||||
println!("*********************************************************");
|
||||
print_chain_sim(&chain_sim);
|
||||
println!("*********************************************************");
|
||||
}
|
||||
|
||||
/// Checks different next_target adjustments and difficulty boundaries
|
||||
#[test]
|
||||
fn next_target_adjustment() {
|
||||
global::set_mining_mode(global::ChainTypes::AutomatedTesting);
|
||||
let cur_time = time::get_time().sec as u64;
|
||||
|
||||
assert_eq!(
|
||||
next_difficulty(vec![Ok((cur_time, Difficulty::one()))]).unwrap(),
|
||||
Difficulty::one()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(60, 1, DIFFICULTY_ADJUST_WINDOW, None)).unwrap(),
|
||||
Difficulty::one()
|
||||
);
|
||||
|
||||
// Check we don't get stuck on difficulty 1
|
||||
assert_ne!(
|
||||
next_difficulty(repeat(1, 10, DIFFICULTY_ADJUST_WINDOW, None)).unwrap(),
|
||||
Difficulty::one()
|
||||
);
|
||||
|
||||
// just enough data, right interval, should stay constant
|
||||
let just_enough = DIFFICULTY_ADJUST_WINDOW + MEDIAN_TIME_WINDOW;
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(60, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1000)
|
||||
);
|
||||
|
||||
// checking averaging works
|
||||
let sec = DIFFICULTY_ADJUST_WINDOW / 2 + MEDIAN_TIME_WINDOW;
|
||||
let mut s1 = repeat(60, 500, sec, Some(cur_time));
|
||||
let mut s2 = repeat_offs(cur_time+(sec * 60) as u64, 60, 1500, DIFFICULTY_ADJUST_WINDOW / 2);
|
||||
s2.append(&mut s1);
|
||||
assert_eq!(next_difficulty(s2).unwrap(), Difficulty::from_num(1000));
|
||||
|
||||
// too slow, diff goes down
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(90, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(857)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(120, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(750)
|
||||
);
|
||||
|
||||
// too fast, diff goes up
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(55, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1028)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(45, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1090)
|
||||
);
|
||||
|
||||
// hitting lower time bound, should always get the same result below
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(0, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1500)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(0, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1500)
|
||||
);
|
||||
|
||||
// hitting higher time bound, should always get the same result above
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(300, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(500)
|
||||
);
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(400, 1000, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(500)
|
||||
);
|
||||
|
||||
// We should never drop below 1
|
||||
assert_eq!(
|
||||
next_difficulty(repeat(90, 0, just_enough, None)).unwrap(),
|
||||
Difficulty::from_num(1)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hard_fork_1() {
|
||||
assert!(valid_header_version(0, 1));
|
||||
assert!(valid_header_version(10, 1));
|
||||
assert!(!valid_header_version(10, 2));
|
||||
assert!(valid_header_version(250_000, 1));
|
||||
assert!(!valid_header_version(250_001, 1));
|
||||
assert!(!valid_header_version(500_000, 1));
|
||||
assert!(!valid_header_version(250_001, 2));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn hard_fork_2() {
|
||||
// assert!(valid_header_version(0, 1));
|
||||
// assert!(valid_header_version(10, 1));
|
||||
// assert!(valid_header_version(10, 2));
|
||||
// assert!(valid_header_version(250_000, 1));
|
||||
// assert!(!valid_header_version(250_001, 1));
|
||||
// assert!(!valid_header_version(500_000, 1));
|
||||
// assert!(valid_header_version(250_001, 2));
|
||||
// assert!(valid_header_version(500_000, 2));
|
||||
// assert!(!valid_header_version(500_001, 2));
|
||||
// }
|
|
@ -604,7 +604,7 @@ impl Miner {
|
|||
b.inputs.len(),
|
||||
b.outputs.len(),
|
||||
difficulty.clone().into_num(),
|
||||
b.header.clone().difficulty.clone().into_num(),
|
||||
b.header.clone().total_difficulty.clone().into_num(),
|
||||
);
|
||||
|
||||
// making sure we're not spending time mining a useless block
|
||||
|
|
|
@ -912,7 +912,7 @@ mod tests {
|
|||
txs.iter().collect(),
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
|
||||
// now apply the block to ensure the chainstate is updated before we reconcile
|
||||
|
@ -1043,7 +1043,7 @@ mod tests {
|
|||
block_transactions,
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum(),
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
|
||||
chain_ref.apply_block(&block);
|
||||
|
@ -1172,7 +1172,7 @@ mod tests {
|
|||
tx_refs,
|
||||
&keychain,
|
||||
&key_id,
|
||||
Difficulty::minimum()
|
||||
Difficulty::one(),
|
||||
).unwrap();
|
||||
}
|
||||
|
||||
|
|
|
@ -166,7 +166,6 @@ mod test {
|
|||
use global;
|
||||
use core::core::target::Difficulty;
|
||||
use core::genesis;
|
||||
use core::consensus::MINIMUM_DIFFICULTY;
|
||||
use core::global::ChainTypes;
|
||||
|
||||
#[test]
|
||||
|
@ -182,11 +181,11 @@ mod test {
|
|||
pow_size(
|
||||
&mut internal_miner,
|
||||
&mut b.header,
|
||||
Difficulty::from_num(MINIMUM_DIFFICULTY),
|
||||
Difficulty::one(),
|
||||
global::sizeshift() as u32,
|
||||
).unwrap();
|
||||
assert!(b.header.nonce != 310);
|
||||
assert!(b.header.pow.clone().to_difficulty() >= Difficulty::from_num(MINIMUM_DIFFICULTY));
|
||||
assert!(b.header.pow.clone().to_difficulty() >= Difficulty::one());
|
||||
assert!(verify_size(&b.header, global::sizeshift() as u32));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue