mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 08:51:08 +03:00
remove difficulty field from block_header (derive from total_difficulty) (#777)
* remove difficulty field from block_header (derive from total_difficulty) * address feedback on PR and general cleanup * rustfmt * rework pow/difficulty validation in pipe::validate_header now that we only have total_difficulty available * cleanup various todos * rustfmt * rework DifficultyIterator to track header and prev_header state * rustfmt caught some garbage syntax * cleanup
This commit is contained in:
parent
e4871065b0
commit
2552b34118
12 changed files with 89 additions and 91 deletions
|
@ -511,8 +511,6 @@ pub struct BlockHeaderPrintable {
|
|||
pub kernel_root: String,
|
||||
/// Nonce increment used to mine this block.
|
||||
pub nonce: u64,
|
||||
/// Difficulty used to mine the block.
|
||||
pub difficulty: u64,
|
||||
/// Total accumulated difficulty since genesis block
|
||||
pub total_difficulty: u64,
|
||||
}
|
||||
|
@ -529,7 +527,6 @@ impl BlockHeaderPrintable {
|
|||
range_proof_root: util::to_hex(h.range_proof_root.to_vec()),
|
||||
kernel_root: util::to_hex(h.kernel_root.to_vec()),
|
||||
nonce: h.nonce,
|
||||
difficulty: h.difficulty.into_num(),
|
||||
total_difficulty: h.total_difficulty.into_num(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -272,6 +272,8 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
)),
|
||||
}?;
|
||||
|
||||
// make sure this header has a height exactly one higher than the previous
|
||||
// header
|
||||
if header.height != prev.height + 1 {
|
||||
return Err(Error::InvalidBlockHeight);
|
||||
}
|
||||
|
@ -283,43 +285,44 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
return Err(Error::InvalidBlockTime);
|
||||
}
|
||||
|
||||
// verify the proof of work and related parameters
|
||||
// at this point we have a previous block header
|
||||
// we know the height increased by one
|
||||
// so now we can check the total_difficulty increase is also valid
|
||||
// check the pow hash shows a difficulty at least as large
|
||||
// as the target difficulty
|
||||
if !ctx.opts.contains(Options::SKIP_POW) {
|
||||
// verify the proof of work and related parameters
|
||||
|
||||
// explicit check to ensure we are not below the minimum difficulty
|
||||
// we will also check difficulty based on next_difficulty later on
|
||||
if header.difficulty < Difficulty::one() {
|
||||
if header.total_difficulty.clone() <= prev.total_difficulty.clone() {
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
|
||||
let diff_iter = store::DifficultyIter::from(header.previous, ctx.store.clone());
|
||||
let difficulty =
|
||||
consensus::next_difficulty(diff_iter).map_err(|e| Error::Other(e.to_string()))?;
|
||||
let target_difficulty = header.total_difficulty.clone() - prev.total_difficulty.clone();
|
||||
|
||||
if header.pow.clone().to_difficulty() < target_difficulty {
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
|
||||
// explicit check to ensure we are not below the minimum difficulty
|
||||
// we will also check difficulty based on next_difficulty later on
|
||||
if target_difficulty < Difficulty::one() {
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
|
||||
// explicit check to ensure total_difficulty has increased by exactly
|
||||
// the _network_ difficulty of the previous block
|
||||
// (during testnet1 we use _block_ difficulty here)
|
||||
if header.total_difficulty != prev.total_difficulty.clone() + difficulty.clone() {
|
||||
let diff_iter = store::DifficultyIter::from(header.previous, ctx.store.clone());
|
||||
let network_difficulty =
|
||||
consensus::next_difficulty(diff_iter).map_err(|e| Error::Other(e.to_string()))?;
|
||||
if target_difficulty != network_difficulty.clone() {
|
||||
error!(
|
||||
LOGGER,
|
||||
"validate_header: BANNABLE OFFENCE: header cumulative difficulty {} != {}",
|
||||
header.difficulty.into_num(),
|
||||
prev.total_difficulty.into_num() + difficulty.into_num()
|
||||
target_difficulty.into_num(),
|
||||
prev.total_difficulty.into_num() + network_difficulty.into_num()
|
||||
);
|
||||
return Err(Error::WrongTotalDifficulty);
|
||||
}
|
||||
|
||||
// now check that the difficulty is not less than that calculated by the
|
||||
// difficulty iterator based on the previous block
|
||||
if header.difficulty < difficulty {
|
||||
error!(
|
||||
LOGGER,
|
||||
"validate_header: BANNABLE OFFENCE: header difficulty {} < {}",
|
||||
header.difficulty.into_num(),
|
||||
difficulty.into_num()
|
||||
);
|
||||
return Err(Error::DifficultyTooLow);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -416,7 +419,7 @@ fn update_head(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, Error>
|
|||
LOGGER,
|
||||
"pipe: chain head reached {} @ {} [{}]",
|
||||
b.header.height,
|
||||
b.header.difficulty,
|
||||
b.header.total_difficulty,
|
||||
b.hash()
|
||||
);
|
||||
} else {
|
||||
|
@ -424,7 +427,7 @@ fn update_head(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, Error>
|
|||
LOGGER,
|
||||
"pipe: chain head reached {} @ {} [{}]",
|
||||
b.header.height,
|
||||
b.header.difficulty,
|
||||
b.header.total_difficulty,
|
||||
b.hash()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -252,8 +252,15 @@ impl ChainStore for ChainKVStore {
|
|||
/// previous difficulties). Mostly used by the consensus next difficulty
|
||||
/// calculation.
|
||||
pub struct DifficultyIter {
|
||||
next: Hash,
|
||||
start: Hash,
|
||||
store: Arc<ChainStore>,
|
||||
|
||||
// maintain state for both the "next" header in this iteration
|
||||
// and its previous header in the chain ("next next" in the iteration)
|
||||
// so we effectively read-ahead as we iterate through the chain back
|
||||
// toward the genesis block (while maintaining current state)
|
||||
header: Option<BlockHeader>,
|
||||
prev_header: Option<BlockHeader>,
|
||||
}
|
||||
|
||||
impl DifficultyIter {
|
||||
|
@ -261,8 +268,10 @@ impl DifficultyIter {
|
|||
/// the provided block hash.
|
||||
pub fn from(start: Hash, store: Arc<ChainStore>) -> DifficultyIter {
|
||||
DifficultyIter {
|
||||
next: start,
|
||||
start: start,
|
||||
store: store,
|
||||
header: None,
|
||||
prev_header: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -271,13 +280,27 @@ impl Iterator for DifficultyIter {
|
|||
type Item = Result<(u64, Difficulty), TargetError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let bhe = self.store.get_block_header(&self.next);
|
||||
match bhe {
|
||||
Err(_) => None,
|
||||
Ok(bh) => {
|
||||
self.next = bh.previous;
|
||||
Some(Ok((bh.timestamp.to_timespec().sec as u64, bh.difficulty)))
|
||||
}
|
||||
// Get both header and previous_header if this is the initial iteration.
|
||||
// Otherwise move prev_header to header and get the next prev_header.
|
||||
self.header = if self.header.is_none() {
|
||||
self.store.get_block_header(&self.start).ok()
|
||||
} else {
|
||||
self.prev_header.clone()
|
||||
};
|
||||
|
||||
// If we have a header we can do this iteration.
|
||||
// Otherwise we are done.
|
||||
if let Some(header) = self.header.clone() {
|
||||
self.prev_header = self.store.get_block_header(&header.previous).ok();
|
||||
|
||||
let prev_difficulty = self.prev_header
|
||||
.clone()
|
||||
.map_or(Difficulty::zero(), |x| x.total_difficulty);
|
||||
let difficulty = header.total_difficulty - prev_difficulty;
|
||||
|
||||
Some(Ok((header.timestamp.to_timespec().sec as u64, difficulty)))
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,7 +92,6 @@ fn data_files() {
|
|||
core::core::Block::new(&prev, vec![], &keychain, &pk, difficulty.clone()).unwrap();
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
b.header.difficulty = difficulty.clone(); // TODO: overwrite here? really?
|
||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||
|
||||
pow::pow_size(
|
||||
|
|
|
@ -80,7 +80,6 @@ fn mine_empty_chain() {
|
|||
core::core::Block::new(&prev, vec![], &keychain, &pk, difficulty.clone()).unwrap();
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
b.header.difficulty = difficulty.clone(); // TODO: overwrite here? really?
|
||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
||||
|
||||
pow::pow_size(
|
||||
|
@ -429,6 +428,6 @@ fn prepare_block_nosum(
|
|||
Ok(b) => b,
|
||||
};
|
||||
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
b.header.total_difficulty = Difficulty::from_num(diff);
|
||||
b.header.total_difficulty = prev.total_difficulty.clone() + Difficulty::from_num(diff);
|
||||
b
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ fn test_coinbase_maturity() {
|
|||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
||||
block.header.difficulty = difficulty.clone();
|
||||
|
||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||
|
||||
pow::pow_size(
|
||||
|
@ -138,7 +138,6 @@ fn test_coinbase_maturity() {
|
|||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
||||
block.header.difficulty = difficulty.clone();
|
||||
|
||||
match chain.set_txhashset_roots(&mut block, false) {
|
||||
Err(Error::Transaction(transaction::Error::ImmatureCoinbase)) => (),
|
||||
|
@ -165,7 +164,7 @@ fn test_coinbase_maturity() {
|
|||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
||||
block.header.difficulty = difficulty.clone();
|
||||
|
||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||
|
||||
pow::pow_size(
|
||||
|
@ -200,7 +199,7 @@ fn test_coinbase_maturity() {
|
|||
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
|
||||
|
||||
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
|
||||
block.header.difficulty = difficulty.clone();
|
||||
|
||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
||||
|
||||
pow::pow_size(
|
||||
|
|
|
@ -115,8 +115,6 @@ pub struct BlockHeader {
|
|||
pub nonce: u64,
|
||||
/// Proof of work data.
|
||||
pub pow: Proof,
|
||||
/// Difficulty used to mine the block.
|
||||
pub difficulty: Difficulty,
|
||||
/// Total accumulated difficulty since genesis block
|
||||
pub total_difficulty: Difficulty,
|
||||
/// Total accumulated sum of kernel offsets since genesis block.
|
||||
|
@ -133,7 +131,6 @@ impl Default for BlockHeader {
|
|||
height: 0,
|
||||
previous: ZERO_HASH,
|
||||
timestamp: time::at_utc(time::Timespec { sec: 0, nsec: 0 }),
|
||||
difficulty: Difficulty::one(),
|
||||
total_difficulty: Difficulty::one(),
|
||||
output_root: ZERO_HASH,
|
||||
range_proof_root: ZERO_HASH,
|
||||
|
@ -160,7 +157,6 @@ impl Writeable for BlockHeader {
|
|||
);
|
||||
|
||||
try!(writer.write_u64(self.nonce));
|
||||
try!(self.difficulty.write(writer));
|
||||
try!(self.total_difficulty.write(writer));
|
||||
try!(self.total_kernel_offset.write(writer));
|
||||
|
||||
|
@ -181,7 +177,6 @@ impl Readable for BlockHeader {
|
|||
let rproof_root = Hash::read(reader)?;
|
||||
let kernel_root = Hash::read(reader)?;
|
||||
let nonce = reader.read_u64()?;
|
||||
let difficulty = Difficulty::read(reader)?;
|
||||
let total_difficulty = Difficulty::read(reader)?;
|
||||
let total_kernel_offset = BlindingFactor::read(reader)?;
|
||||
let pow = Proof::read(reader)?;
|
||||
|
@ -203,7 +198,6 @@ impl Readable for BlockHeader {
|
|||
kernel_root: kernel_root,
|
||||
pow: pow,
|
||||
nonce: nonce,
|
||||
difficulty: difficulty,
|
||||
total_difficulty: total_difficulty,
|
||||
total_kernel_offset: total_kernel_offset,
|
||||
})
|
||||
|
@ -620,7 +614,6 @@ impl Block {
|
|||
Block {
|
||||
header: BlockHeader {
|
||||
pow: self.header.pow.clone(),
|
||||
difficulty: self.header.difficulty.clone(),
|
||||
total_difficulty: self.header.total_difficulty.clone(),
|
||||
..self.header
|
||||
},
|
||||
|
@ -1048,7 +1041,7 @@ mod test {
|
|||
let b = new_block(vec![], &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||
let target_len = 1_256;
|
||||
let target_len = 1_248;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
@ -1060,7 +1053,7 @@ mod test {
|
|||
let b = new_block(vec![&tx1], &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||
let target_len = 2_900;
|
||||
let target_len = 2_892;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
@ -1071,7 +1064,7 @@ mod test {
|
|||
let b = new_block(vec![], &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||
let target_len = 1_264;
|
||||
let target_len = 1_256;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
@ -1083,7 +1076,7 @@ mod test {
|
|||
let b = new_block(vec![&tx1], &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||
let target_len = 1_270;
|
||||
let target_len = 1_262;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
@ -1100,7 +1093,7 @@ mod test {
|
|||
let b = new_block(txs.iter().collect(), &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||
let target_len = 17_696;
|
||||
let target_len = 17_688;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
@ -1117,7 +1110,7 @@ mod test {
|
|||
let b = new_block(txs.iter().collect(), &keychain, &prev);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||
let target_len = 1_324;
|
||||
let target_len = 1_316;
|
||||
assert_eq!(vec.len(), target_len,);
|
||||
}
|
||||
|
||||
|
|
|
@ -42,9 +42,8 @@ impl Difficulty {
|
|||
Difficulty { num: 0 }
|
||||
}
|
||||
|
||||
/// Difficulty of one, which is the minumum difficulty (when the hash
|
||||
/// equals the max target)
|
||||
/// TODO - is this the minimum dificulty or is consensus::MINIMUM_DIFFICULTY the minimum?
|
||||
/// Difficulty of one, which is the minumum difficulty
|
||||
/// (when the hash equals the max target)
|
||||
pub fn one() -> Difficulty {
|
||||
Difficulty { num: 1 }
|
||||
}
|
||||
|
|
|
@ -90,7 +90,6 @@ pub fn genesis_testnet2() -> core::Block {
|
|||
..time::empty_tm()
|
||||
},
|
||||
//TODO: Check this is over-estimated at T2 launch
|
||||
difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: 70081,
|
||||
pow: core::Proof::new(vec![
|
||||
|
@ -122,7 +121,6 @@ pub fn genesis_main() -> core::Block {
|
|||
tm_mday: 14,
|
||||
..time::empty_tm()
|
||||
},
|
||||
difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: global::get_genesis_nonce(),
|
||||
pow: core::Proof::zero(consensus::PROOFSIZE),
|
||||
|
|
|
@ -149,8 +149,6 @@ the median timestamps at the beginning and the end of the window. If the timespa
|
|||
range, (adjusted with a dampening factor to allow for normal variation,) then the difficulty is raised or lowered
|
||||
to a value aiming for the target block solve time.
|
||||
|
||||
The minimum difficuly is 10, as defined in the consensus MINIMUM_DIFFICULTY value.
|
||||
|
||||
### The Mining Loop
|
||||
|
||||
All of these systems are put together in the mining loop, which attempts to create
|
||||
|
|
|
@ -167,7 +167,7 @@ impl Miner {
|
|||
cuckoo_size,
|
||||
attempt_time_per_block,
|
||||
b.header.height,
|
||||
b.header.difficulty
|
||||
b.header.total_difficulty
|
||||
);
|
||||
|
||||
// look for a pow for at most attempt_time_per_block sec on the
|
||||
|
@ -209,7 +209,8 @@ impl Miner {
|
|||
proof_diff.into_num(),
|
||||
difficulty.into_num()
|
||||
);
|
||||
if proof_diff >= b.header.difficulty {
|
||||
if proof_diff > (b.header.total_difficulty.clone() - head.total_difficulty.clone())
|
||||
{
|
||||
sol = Some(proof);
|
||||
b.header.nonce = s.get_nonce_as_u64();
|
||||
break;
|
||||
|
@ -304,7 +305,7 @@ impl Miner {
|
|||
cuckoo_size,
|
||||
attempt_time_per_block,
|
||||
latest_hash,
|
||||
b.header.difficulty
|
||||
b.header.total_difficulty
|
||||
);
|
||||
let mut iter_count = 0;
|
||||
|
||||
|
@ -327,9 +328,10 @@ impl Miner {
|
|||
"Found cuckoo solution for nonce {} of difficulty {} (difficulty target {})",
|
||||
b.header.nonce,
|
||||
proof_diff.into_num(),
|
||||
b.header.difficulty.into_num()
|
||||
b.header.total_difficulty.into_num()
|
||||
);
|
||||
if proof_diff >= b.header.difficulty {
|
||||
if proof_diff > (b.header.total_difficulty.clone() - head.total_difficulty.clone())
|
||||
{
|
||||
sol = Some(proof);
|
||||
break;
|
||||
}
|
||||
|
@ -420,7 +422,7 @@ impl Miner {
|
|||
cuckoo_size,
|
||||
attempt_time_per_block,
|
||||
latest_hash,
|
||||
b.header.difficulty
|
||||
b.header.total_difficulty
|
||||
);
|
||||
let mut iter_count = 0;
|
||||
|
||||
|
@ -438,7 +440,8 @@ impl Miner {
|
|||
let pow_hash = b.hash();
|
||||
if let Ok(proof) = miner.mine(&pow_hash[..]) {
|
||||
let proof_diff = proof.clone().to_difficulty();
|
||||
if proof_diff >= b.header.difficulty {
|
||||
if proof_diff > (b.header.total_difficulty.clone() - head.total_difficulty.clone())
|
||||
{
|
||||
sol = Some(proof);
|
||||
break;
|
||||
}
|
||||
|
@ -537,7 +540,8 @@ impl Miner {
|
|||
{
|
||||
let mut mining_stats = mining_stats.write().unwrap();
|
||||
mining_stats.block_height = b.header.height;
|
||||
mining_stats.network_difficulty = b.header.difficulty.into_num();
|
||||
mining_stats.network_difficulty =
|
||||
(b.header.total_difficulty.clone() - head.total_difficulty.clone()).into_num();
|
||||
}
|
||||
|
||||
let mut sol = None;
|
||||
|
@ -551,7 +555,7 @@ impl Miner {
|
|||
if use_async {
|
||||
sol = self.inner_loop_async(
|
||||
&mut p,
|
||||
b.header.difficulty.clone(),
|
||||
(b.header.total_difficulty.clone() - head.total_difficulty.clone()),
|
||||
&mut b,
|
||||
cuckoo_size,
|
||||
&head,
|
||||
|
@ -668,7 +672,6 @@ impl Miner {
|
|||
|
||||
let mut rng = rand::OsRng::new().unwrap();
|
||||
b.header.nonce = rng.gen();
|
||||
b.header.difficulty = difficulty;
|
||||
b.header.timestamp = time::at_utc(time::Timespec::new(now_sec, 0));
|
||||
|
||||
let roots_result = self.chain.set_txhashset_roots(&mut b, false);
|
||||
|
|
|
@ -77,31 +77,18 @@ pub trait MiningWorker {
|
|||
/// Validates the proof of work of a given header, and that the proof of work
|
||||
/// satisfies the requirements of the header.
|
||||
pub fn verify_size(bh: &BlockHeader, cuckoo_sz: u32) -> bool {
|
||||
// make sure the pow hash shows a difficulty at least as large as the target
|
||||
// difficulty
|
||||
if bh.difficulty > bh.pow.clone().to_difficulty() {
|
||||
return false;
|
||||
}
|
||||
Cuckoo::new(&bh.hash()[..], cuckoo_sz).verify(bh.pow.clone(), consensus::EASINESS as u64)
|
||||
}
|
||||
|
||||
/// Uses the much easier Cuckoo20 (mostly for
|
||||
/// tests).
|
||||
pub fn pow20<T: MiningWorker>(
|
||||
miner: &mut T,
|
||||
bh: &mut BlockHeader,
|
||||
diff: Difficulty,
|
||||
) -> Result<(), Error> {
|
||||
pow_size(miner, bh, diff, 20)
|
||||
}
|
||||
|
||||
/// Mines a genesis block, using the config specified miner if specified.
|
||||
/// Otherwise, uses the internal miner
|
||||
pub fn mine_genesis_block(
|
||||
miner_config: Option<types::MinerConfig>,
|
||||
) -> Result<core::core::Block, Error> {
|
||||
let mut gen = genesis::genesis_testnet2();
|
||||
let diff = gen.header.difficulty.clone();
|
||||
|
||||
// total_difficulty on the genesis header *is* the difficulty of that block
|
||||
let genesis_difficulty = gen.header.total_difficulty.clone();
|
||||
|
||||
let sz = global::sizeshift() as u32;
|
||||
let proof_size = global::proofsize();
|
||||
|
@ -116,7 +103,7 @@ pub fn mine_genesis_block(
|
|||
},
|
||||
None => Box::new(cuckoo::Miner::new(consensus::EASINESS, sz, proof_size)),
|
||||
};
|
||||
pow_size(&mut *miner, &mut gen.header, diff, sz as u32).unwrap();
|
||||
pow_size(&mut *miner, &mut gen.header, genesis_difficulty, sz as u32).unwrap();
|
||||
Ok(gen)
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue