mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
Hard fork tryout: better PoW encapsulation in block header (#1478)
* Improve encapsulation with ProofOfWork struct * Add dual pow scaling factor, fix test * Fix pre_pow serialization, chain tests * Adjust header serialized size calc * Hard fork handling, version-based serialization
This commit is contained in:
parent
48857b7e16
commit
ecf20602d5
17 changed files with 262 additions and 128 deletions
|
@ -521,10 +521,10 @@ impl BlockHeaderPrintable {
|
|||
output_root: util::to_hex(h.output_root.to_vec()),
|
||||
range_proof_root: util::to_hex(h.range_proof_root.to_vec()),
|
||||
kernel_root: util::to_hex(h.kernel_root.to_vec()),
|
||||
nonce: h.nonce,
|
||||
cuckoo_size: h.pow.cuckoo_sizeshift,
|
||||
cuckoo_solution: h.pow.nonces.clone(),
|
||||
total_difficulty: h.total_difficulty.to_num(),
|
||||
nonce: h.pow.nonce,
|
||||
cuckoo_size: h.pow.cuckoo_sizeshift(),
|
||||
cuckoo_solution: h.pow.proof.nonces.clone(),
|
||||
total_difficulty: h.pow.total_difficulty.to_num(),
|
||||
total_kernel_offset: h.total_kernel_offset.to_hex(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -392,14 +392,14 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
}
|
||||
|
||||
if !ctx.opts.contains(Options::SKIP_POW) {
|
||||
if global::min_sizeshift() > header.pow.cuckoo_sizeshift {
|
||||
if global::min_sizeshift() > header.pow.cuckoo_sizeshift() {
|
||||
return Err(ErrorKind::LowSizeshift.into());
|
||||
}
|
||||
if !(ctx.pow_verifier)(header, header.pow.cuckoo_sizeshift) {
|
||||
if !(ctx.pow_verifier)(header, header.pow.cuckoo_sizeshift()) {
|
||||
error!(
|
||||
LOGGER,
|
||||
"pipe: validate_header failed for cuckoo shift size {}",
|
||||
header.pow.cuckoo_sizeshift,
|
||||
header.pow.cuckoo_sizeshift()
|
||||
);
|
||||
return Err(ErrorKind::InvalidPow.into());
|
||||
}
|
||||
|
@ -436,11 +436,11 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
|||
// check the pow hash shows a difficulty at least as large
|
||||
// as the target difficulty
|
||||
if !ctx.opts.contains(Options::SKIP_POW) {
|
||||
if header.total_difficulty.clone() <= prev.total_difficulty.clone() {
|
||||
if header.total_difficulty() <= prev.total_difficulty() {
|
||||
return Err(ErrorKind::DifficultyTooLow.into());
|
||||
}
|
||||
|
||||
let target_difficulty = header.total_difficulty.clone() - prev.total_difficulty.clone();
|
||||
let target_difficulty = header.total_difficulty() - prev.total_difficulty();
|
||||
|
||||
if header.pow.to_difficulty() < target_difficulty {
|
||||
return Err(ErrorKind::DifficultyTooLow.into());
|
||||
|
|
|
@ -446,8 +446,8 @@ impl Iterator for DifficultyIter {
|
|||
let prev_difficulty = self
|
||||
.prev_header
|
||||
.clone()
|
||||
.map_or(Difficulty::zero(), |x| x.total_difficulty);
|
||||
let difficulty = header.total_difficulty - prev_difficulty;
|
||||
.map_or(Difficulty::zero(), |x| x.total_difficulty());
|
||||
let difficulty = header.total_difficulty() - prev_difficulty;
|
||||
|
||||
Some(Ok((header.timestamp.timestamp() as u64, difficulty)))
|
||||
} else {
|
||||
|
|
|
@ -78,7 +78,7 @@ impl Tip {
|
|||
height: bh.height,
|
||||
last_block_h: bh.hash(),
|
||||
prev_block_h: bh.previous,
|
||||
total_difficulty: bh.total_difficulty.clone(),
|
||||
total_difficulty: bh.total_difficulty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -169,6 +169,6 @@ fn _prepare_block_nosum(
|
|||
Ok(b) => b,
|
||||
};
|
||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||
b.header.total_difficulty = Difficulty::from_num(diff);
|
||||
b.header.pow.total_difficulty = Difficulty::from_num(diff);
|
||||
b
|
||||
}
|
||||
|
|
|
@ -77,9 +77,9 @@ fn mine_empty_chain() {
|
|||
} else {
|
||||
global::min_sizeshift()
|
||||
};
|
||||
b.header.pow.cuckoo_sizeshift = sizeshift;
|
||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||
pow::pow_size(&mut b.header, difficulty, global::proofsize(), sizeshift).unwrap();
|
||||
b.header.pow.cuckoo_sizeshift = sizeshift;
|
||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||
|
||||
let bhash = b.hash();
|
||||
chain.process_block(b, chain::Options::MINE).unwrap();
|
||||
|
@ -390,9 +390,9 @@ fn output_header_mappings() {
|
|||
} else {
|
||||
global::min_sizeshift()
|
||||
};
|
||||
b.header.pow.cuckoo_sizeshift = sizeshift;
|
||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||
pow::pow_size(&mut b.header, difficulty, global::proofsize(), sizeshift).unwrap();
|
||||
b.header.pow.cuckoo_sizeshift = sizeshift;
|
||||
b.header.pow.proof.cuckoo_sizeshift = sizeshift;
|
||||
|
||||
chain.process_block(b, chain::Options::MINE).unwrap();
|
||||
|
||||
|
@ -479,8 +479,8 @@ where
|
|||
Ok(b) => b,
|
||||
};
|
||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||
b.header.total_difficulty = prev.total_difficulty.clone() + Difficulty::from_num(diff);
|
||||
b.header.pow = core::core::Proof::random(proof_size);
|
||||
b.header.pow.total_difficulty = prev.total_difficulty() + Difficulty::from_num(diff);
|
||||
b.header.pow.proof = core::core::Proof::random(proof_size);
|
||||
b
|
||||
}
|
||||
|
||||
|
|
|
@ -105,16 +105,16 @@ pub const HARD_FORK_INTERVAL: u64 = 250_000;
|
|||
/// 6 months interval scheduled hard forks for the first 2 years.
|
||||
pub fn valid_header_version(height: u64, version: u16) -> bool {
|
||||
// uncomment below as we go from hard fork to hard fork
|
||||
if height <= HARD_FORK_INTERVAL && version == 1 {
|
||||
true
|
||||
/* } else if height <= 2 * HARD_FORK_INTERVAL && version == 2 {
|
||||
true */
|
||||
/* } else if height <= 3 * HARD_FORK_INTERVAL && version == 3 {
|
||||
true */
|
||||
/* } else if height <= 4 * HARD_FORK_INTERVAL && version == 4 {
|
||||
true */
|
||||
/* } else if height > 4 * HARD_FORK_INTERVAL && version > 4 {
|
||||
true */
|
||||
if height < HEADER_V2_HARD_FORK {
|
||||
version == 1
|
||||
} else if height < HARD_FORK_INTERVAL {
|
||||
version == 2
|
||||
} else if height < 2 * HARD_FORK_INTERVAL {
|
||||
version == 3
|
||||
/* } else if height < 3 * HARD_FORK_INTERVAL {
|
||||
version == 4 */
|
||||
/* } else if height >= 4 * HARD_FORK_INTERVAL {
|
||||
version > 4 */
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -249,3 +249,5 @@ pub trait VerifySortOrder<T> {
|
|||
/// Verify a collection of items is sorted as required.
|
||||
fn verify_sort_order(&self) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
pub const HEADER_V2_HARD_FORK: u64 = 95_000;
|
||||
|
|
|
@ -109,6 +109,79 @@ impl fmt::Display for Error {
|
|||
}
|
||||
}
|
||||
|
||||
/// Block header information pertaining to the proof of work
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ProofOfWork {
|
||||
/// Total accumulated difficulty since genesis block
|
||||
pub total_difficulty: Difficulty,
|
||||
/// Difficulty scaling factor between the different proofs of work
|
||||
pub scaling_difficulty: u64,
|
||||
/// Nonce increment used to mine this block.
|
||||
pub nonce: u64,
|
||||
/// Proof of work data.
|
||||
pub proof: Proof,
|
||||
}
|
||||
|
||||
impl Default for ProofOfWork {
|
||||
fn default() -> ProofOfWork {
|
||||
let proof_size = global::proofsize();
|
||||
ProofOfWork {
|
||||
total_difficulty: Difficulty::one(),
|
||||
scaling_difficulty: 1,
|
||||
nonce: 0,
|
||||
proof: Proof::zero(proof_size),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProofOfWork {
|
||||
/// Read implementation, can't define as trait impl as we need a version
|
||||
fn read(ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
|
||||
let (total_difficulty, scaling_difficulty) = if ver == 1 {
|
||||
// read earlier in the header on older versions
|
||||
(Difficulty::one(), 1)
|
||||
} else {
|
||||
(Difficulty::read(reader)?, reader.read_u64()?)
|
||||
};
|
||||
let nonce = reader.read_u64()?;
|
||||
let proof = Proof::read(reader)?;
|
||||
Ok(ProofOfWork { total_difficulty, scaling_difficulty, nonce, proof})
|
||||
}
|
||||
|
||||
/// Write implementation, can't define as trait impl as we need a version
|
||||
fn write<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
|
||||
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
||||
self.write_pre_pow(ver, writer)?;
|
||||
}
|
||||
|
||||
self.proof.write(writer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the pre-hash portion of the header
|
||||
pub fn write_pre_pow<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
|
||||
if ver > 1 {
|
||||
ser_multiwrite!(
|
||||
writer,
|
||||
[write_u64, self.total_difficulty.to_num()],
|
||||
[write_u64, self.scaling_difficulty]
|
||||
);
|
||||
}
|
||||
writer.write_u64(self.nonce)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Maximum difficulty this proof of work can achieve
|
||||
pub fn to_difficulty(&self) -> Difficulty {
|
||||
self.proof.to_difficulty()
|
||||
}
|
||||
|
||||
/// The shift used for the cuckoo cycle size on this proof
|
||||
pub fn cuckoo_sizeshift(&self) -> u8 {
|
||||
self.proof.cuckoo_sizeshift
|
||||
}
|
||||
}
|
||||
|
||||
/// Block header, fairly standard compared to other blockchains.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct BlockHeader {
|
||||
|
@ -120,8 +193,6 @@ pub struct BlockHeader {
|
|||
pub previous: Hash,
|
||||
/// Timestamp at which the block was built.
|
||||
pub timestamp: DateTime<Utc>,
|
||||
/// Total accumulated difficulty since genesis block
|
||||
pub total_difficulty: Difficulty,
|
||||
/// Merklish root of all the commitments in the TxHashSet
|
||||
pub output_root: Hash,
|
||||
/// Merklish root of all range proofs in the TxHashSet
|
||||
|
@ -139,20 +210,17 @@ pub struct BlockHeader {
|
|||
pub output_mmr_size: u64,
|
||||
/// Total size of the kernel MMR after applying this block
|
||||
pub kernel_mmr_size: u64,
|
||||
/// Nonce increment used to mine this block.
|
||||
pub nonce: u64,
|
||||
/// Proof of work data.
|
||||
pub pow: Proof,
|
||||
/// Proof of work and related
|
||||
pub pow: ProofOfWork,
|
||||
}
|
||||
|
||||
/// Serialized size of fixed part of a BlockHeader, i.e. without pow
|
||||
fn fixed_size_of_serialized_header() -> usize {
|
||||
fn fixed_size_of_serialized_header(version: u16) -> usize {
|
||||
let mut size: usize = 0;
|
||||
size += mem::size_of::<u16>(); // version
|
||||
size += mem::size_of::<u64>(); // height
|
||||
size += mem::size_of::<Hash>(); // previous
|
||||
size += mem::size_of::<u64>(); // timestamp
|
||||
size += mem::size_of::<Difficulty>(); // total_difficulty
|
||||
size += mem::size_of::<Hash>(); // output_root
|
||||
size += mem::size_of::<Hash>(); // range_proof_root
|
||||
size += mem::size_of::<Hash>(); // kernel_root
|
||||
|
@ -160,13 +228,17 @@ fn fixed_size_of_serialized_header() -> usize {
|
|||
size += mem::size_of::<Commitment>(); // total_kernel_sum
|
||||
size += mem::size_of::<u64>(); // output_mmr_size
|
||||
size += mem::size_of::<u64>(); // kernel_mmr_size
|
||||
size += mem::size_of::<Difficulty>(); // total_difficulty
|
||||
if version >= 2 {
|
||||
size += mem::size_of::<u64>(); // scaling_difficulty
|
||||
}
|
||||
size += mem::size_of::<u64>(); // nonce
|
||||
size
|
||||
}
|
||||
|
||||
/// Serialized size of a BlockHeader
|
||||
pub fn serialized_size_of_header(cuckoo_sizeshift: u8) -> usize {
|
||||
let mut size = fixed_size_of_serialized_header();
|
||||
pub fn serialized_size_of_header(version: u16, cuckoo_sizeshift: u8) -> usize {
|
||||
let mut size = fixed_size_of_serialized_header(version);
|
||||
|
||||
size += mem::size_of::<u8>(); // pow.cuckoo_sizeshift
|
||||
let nonce_bits = cuckoo_sizeshift as usize - 1;
|
||||
|
@ -180,13 +252,11 @@ pub fn serialized_size_of_header(cuckoo_sizeshift: u8) -> usize {
|
|||
|
||||
impl Default for BlockHeader {
|
||||
fn default() -> BlockHeader {
|
||||
let proof_size = global::proofsize();
|
||||
BlockHeader {
|
||||
version: 1,
|
||||
height: 0,
|
||||
previous: ZERO_HASH,
|
||||
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),
|
||||
total_difficulty: Difficulty::one(),
|
||||
output_root: ZERO_HASH,
|
||||
range_proof_root: ZERO_HASH,
|
||||
kernel_root: ZERO_HASH,
|
||||
|
@ -194,8 +264,7 @@ impl Default for BlockHeader {
|
|||
total_kernel_sum: Commitment::from_vec(vec![0; 33]),
|
||||
output_mmr_size: 0,
|
||||
kernel_mmr_size: 0,
|
||||
nonce: 0,
|
||||
pow: Proof::zero(proof_size),
|
||||
pow: ProofOfWork::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,8 +275,7 @@ impl Writeable for BlockHeader {
|
|||
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
||||
self.write_pre_pow(writer)?;
|
||||
}
|
||||
|
||||
self.pow.write(writer)?;
|
||||
self.pow.write(self.version, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -218,15 +286,20 @@ impl Readable for BlockHeader {
|
|||
let (version, height) = ser_multiread!(reader, read_u16, read_u64);
|
||||
let previous = Hash::read(reader)?;
|
||||
let timestamp = reader.read_i64()?;
|
||||
let total_difficulty = Difficulty::read(reader)?;
|
||||
let mut total_difficulty = None;
|
||||
if version == 1 {
|
||||
total_difficulty = Some(Difficulty::read(reader)?);
|
||||
}
|
||||
let output_root = Hash::read(reader)?;
|
||||
let range_proof_root = Hash::read(reader)?;
|
||||
let kernel_root = Hash::read(reader)?;
|
||||
let total_kernel_offset = BlindingFactor::read(reader)?;
|
||||
let total_kernel_sum = Commitment::read(reader)?;
|
||||
let (output_mmr_size, kernel_mmr_size, nonce) =
|
||||
ser_multiread!(reader, read_u64, read_u64, read_u64);
|
||||
let pow = Proof::read(reader)?;
|
||||
let (output_mmr_size, kernel_mmr_size) = ser_multiread!(reader, read_u64, read_u64);
|
||||
let mut pow = ProofOfWork::read(version, reader)?;
|
||||
if version == 1 {
|
||||
pow.total_difficulty = total_difficulty.unwrap();
|
||||
}
|
||||
|
||||
if timestamp > MAX_DATE.and_hms(0, 0, 0).timestamp()
|
||||
|| timestamp < MIN_DATE.and_hms(0, 0, 0).timestamp()
|
||||
|
@ -239,7 +312,6 @@ impl Readable for BlockHeader {
|
|||
height,
|
||||
previous,
|
||||
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(timestamp, 0), Utc),
|
||||
total_difficulty,
|
||||
output_root,
|
||||
range_proof_root,
|
||||
kernel_root,
|
||||
|
@ -247,7 +319,6 @@ impl Readable for BlockHeader {
|
|||
total_kernel_sum,
|
||||
output_mmr_size,
|
||||
kernel_mmr_size,
|
||||
nonce,
|
||||
pow,
|
||||
})
|
||||
}
|
||||
|
@ -261,30 +332,41 @@ impl BlockHeader {
|
|||
[write_u16, self.version],
|
||||
[write_u64, self.height],
|
||||
[write_fixed_bytes, &self.previous],
|
||||
[write_i64, self.timestamp.timestamp()],
|
||||
[write_u64, self.total_difficulty.to_num()],
|
||||
[write_i64, self.timestamp.timestamp()]
|
||||
);
|
||||
if self.version == 1 {
|
||||
// written as part of the ProofOfWork in later versions
|
||||
writer.write_u64(self.pow.total_difficulty.to_num())?;
|
||||
}
|
||||
ser_multiwrite!(
|
||||
writer,
|
||||
[write_fixed_bytes, &self.output_root],
|
||||
[write_fixed_bytes, &self.range_proof_root],
|
||||
[write_fixed_bytes, &self.kernel_root],
|
||||
[write_fixed_bytes, &self.total_kernel_offset],
|
||||
[write_fixed_bytes, &self.total_kernel_sum],
|
||||
[write_u64, self.output_mmr_size],
|
||||
[write_u64, self.kernel_mmr_size],
|
||||
[write_u64, self.nonce]
|
||||
[write_u64, self.kernel_mmr_size]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
///
|
||||
|
||||
/// Returns the pre-pow hash, as the post-pow hash
|
||||
/// should just be the hash of the POW
|
||||
pub fn pre_pow_hash(&self) -> Hash {
|
||||
let mut hasher = HashWriter::default();
|
||||
self.write_pre_pow(&mut hasher).unwrap();
|
||||
self.pow.write_pre_pow(self.version, &mut hasher).unwrap();
|
||||
let mut ret = [0; 32];
|
||||
hasher.finalize(&mut ret);
|
||||
Hash(ret)
|
||||
}
|
||||
|
||||
/// Total difficulty accumulated by the proof of work on this header
|
||||
pub fn total_difficulty(&self) -> Difficulty {
|
||||
self.pow.total_difficulty.clone()
|
||||
}
|
||||
|
||||
/// The "overage" to use when verifying the kernel sums.
|
||||
/// For a block header the overage is 0 - reward.
|
||||
pub fn overage(&self) -> i64 {
|
||||
|
@ -304,10 +386,10 @@ impl BlockHeader {
|
|||
|
||||
/// Serialized size of this header
|
||||
pub fn serialized_size(&self) -> usize {
|
||||
let mut size = fixed_size_of_serialized_header();
|
||||
let mut size = fixed_size_of_serialized_header(self.version);
|
||||
|
||||
size += mem::size_of::<u8>(); // pow.cuckoo_sizeshift
|
||||
let nonce_bits = self.pow.cuckoo_sizeshift as usize - 1;
|
||||
let nonce_bits = self.pow.cuckoo_sizeshift() as usize - 1;
|
||||
let bitvec_len = global::proofsize() * nonce_bits;
|
||||
size += bitvec_len / 8; // pow.nonces
|
||||
if bitvec_len % 8 != 0 {
|
||||
|
@ -418,7 +500,7 @@ impl Block {
|
|||
// Now set the pow on the header so block hashing works as expected.
|
||||
{
|
||||
let proof_size = global::proofsize();
|
||||
block.header.pow = Proof::random(proof_size);
|
||||
block.header.pow.proof = Proof::random(proof_size);
|
||||
}
|
||||
|
||||
Ok(block)
|
||||
|
@ -515,17 +597,27 @@ impl Block {
|
|||
let now = Utc::now().timestamp();
|
||||
let timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now, 0), Utc);
|
||||
|
||||
let version = if prev.height + 1 < consensus::HEADER_V2_HARD_FORK {
|
||||
1
|
||||
} else {
|
||||
2
|
||||
};
|
||||
|
||||
// Now build the block with all the above information.
|
||||
// Note: We have not validated the block here.
|
||||
// Caller must validate the block as necessary.
|
||||
Block {
|
||||
header: BlockHeader {
|
||||
version,
|
||||
height: prev.height + 1,
|
||||
timestamp,
|
||||
previous: prev.hash(),
|
||||
total_difficulty: difficulty + prev.total_difficulty,
|
||||
total_kernel_offset,
|
||||
total_kernel_sum,
|
||||
pow: ProofOfWork {
|
||||
total_difficulty: difficulty + prev.pow.total_difficulty,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
body: agg_tx.into(),
|
||||
|
|
|
@ -29,7 +29,10 @@ pub fn genesis_dev() -> core::Block {
|
|||
height: 0,
|
||||
previous: core::hash::Hash([0xff; 32]),
|
||||
timestamp: Utc.ymd(1997, 8, 4).and_hms(0, 0, 0),
|
||||
nonce: global::get_genesis_nonce(),
|
||||
pow: core::ProofOfWork {
|
||||
nonce: global::get_genesis_nonce(),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
@ -39,15 +42,19 @@ pub fn genesis_dev() -> core::Block {
|
|||
pub fn genesis_testnet1() -> core::Block {
|
||||
core::Block::with_header(core::BlockHeader {
|
||||
height: 0,
|
||||
previous: core::hash::Hash([0xff; 32]),
|
||||
timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0),
|
||||
nonce: 28205,
|
||||
pow: core::Proof::new(vec![
|
||||
0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74, 0x2bfa,
|
||||
0x2c26, 0x32bb, 0x346a, 0x34c7, 0x37c5, 0x4164, 0x42cc, 0x4cc3, 0x55af, 0x5a70, 0x5b14,
|
||||
0x5e1c, 0x5f76, 0x6061, 0x60f9, 0x61d7, 0x6318, 0x63a1, 0x63fb, 0x649b, 0x64e5, 0x65a1,
|
||||
0x6b69, 0x70f8, 0x71c7, 0x71cd, 0x7492, 0x7b11, 0x7db8, 0x7f29, 0x7ff8,
|
||||
]),
|
||||
pow: core::ProofOfWork {
|
||||
total_difficulty: Difficulty::one(),
|
||||
scaling_difficulty: 1,
|
||||
nonce: 28205,
|
||||
proof: core::Proof::new(vec![
|
||||
0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74,
|
||||
0x2bfa, 0x2c26, 0x32bb, 0x346a, 0x34c7, 0x37c5, 0x4164, 0x42cc, 0x4cc3, 0x55af,
|
||||
0x5a70, 0x5b14, 0x5e1c, 0x5f76, 0x6061, 0x60f9, 0x61d7, 0x6318, 0x63a1, 0x63fb,
|
||||
0x649b, 0x64e5, 0x65a1, 0x6b69, 0x70f8, 0x71c7, 0x71cd, 0x7492, 0x7b11, 0x7db8,
|
||||
0x7f29, 0x7ff8,
|
||||
]),
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
@ -58,16 +65,19 @@ pub fn genesis_testnet2() -> core::Block {
|
|||
height: 0,
|
||||
previous: core::hash::Hash([0xff; 32]),
|
||||
timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: 1060,
|
||||
pow: core::Proof::new(vec![
|
||||
0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2, 0x83e2024,
|
||||
0x8ca22b5, 0x9d39ab8, 0xb6646dd, 0xc6698b6, 0xc6f78fe, 0xc99b662, 0xcf2ae8c, 0xcf41eed,
|
||||
0xdd073e6, 0xded6af8, 0xf08d1a5, 0x1156a144, 0x11d1160a, 0x131bb0a5, 0x137ad703,
|
||||
0x13b0831f, 0x1421683f, 0x147e3c1f, 0x1496fda0, 0x150ba22b, 0x15cc5bc6, 0x16edf697,
|
||||
0x17ced40c, 0x17d84f9e, 0x18a515c1, 0x19320d9c, 0x19da4f6d, 0x1b50bcb1, 0x1b8bc72f,
|
||||
0x1c7b6964, 0x1d07b3a9, 0x1d189d4d, 0x1d1f9a15, 0x1dafcd41,
|
||||
]),
|
||||
pow: core::ProofOfWork {
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
scaling_difficulty: 1,
|
||||
nonce: 1060,
|
||||
proof: core::Proof::new(vec![
|
||||
0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2,
|
||||
0x83e2024, 0x8ca22b5, 0x9d39ab8, 0xb6646dd, 0xc6698b6, 0xc6f78fe, 0xc99b662,
|
||||
0xcf2ae8c, 0xcf41eed, 0xdd073e6, 0xded6af8, 0xf08d1a5, 0x1156a144, 0x11d1160a,
|
||||
0x131bb0a5, 0x137ad703, 0x13b0831f, 0x1421683f, 0x147e3c1f, 0x1496fda0, 0x150ba22b,
|
||||
0x15cc5bc6, 0x16edf697, 0x17ced40c, 0x17d84f9e, 0x18a515c1, 0x19320d9c, 0x19da4f6d,
|
||||
0x1b50bcb1, 0x1b8bc72f, 0x1c7b6964, 0x1d07b3a9, 0x1d189d4d, 0x1d1f9a15, 0x1dafcd41,
|
||||
]),
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
@ -78,16 +88,19 @@ pub fn genesis_testnet3() -> core::Block {
|
|||
height: 0,
|
||||
previous: core::hash::Hash([0xff; 32]),
|
||||
timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: 4956988373127691,
|
||||
pow: core::Proof::new(vec![
|
||||
0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f, 0x6b8fcae,
|
||||
0x8319b53, 0x845ca8c, 0x8d2a13e, 0x8d6e4cc, 0x9349e8d, 0xa7a33c5, 0xaeac3cb, 0xb193e23,
|
||||
0xb502e19, 0xb5d9804, 0xc9ac184, 0xd4f4de3, 0xd7a23b8, 0xf1d8660, 0xf443756,
|
||||
0x10b833d2, 0x11418fc5, 0x11b8aeaf, 0x131836ec, 0x132ab818, 0x13a46a55, 0x13df89fe,
|
||||
0x145d65b5, 0x166f9c3a, 0x166fe0ef, 0x178cb36f, 0x185baf68, 0x1bbfe563, 0x1bd637b4,
|
||||
0x1cfc8382, 0x1d1ed012, 0x1e391ca5, 0x1e999b4c, 0x1f7c6d21,
|
||||
]),
|
||||
pow: core::ProofOfWork {
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
scaling_difficulty: 1,
|
||||
nonce: 4956988373127691,
|
||||
proof: core::Proof::new(vec![
|
||||
0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f,
|
||||
0x6b8fcae, 0x8319b53, 0x845ca8c, 0x8d2a13e, 0x8d6e4cc, 0x9349e8d, 0xa7a33c5,
|
||||
0xaeac3cb, 0xb193e23, 0xb502e19, 0xb5d9804, 0xc9ac184, 0xd4f4de3, 0xd7a23b8,
|
||||
0xf1d8660, 0xf443756, 0x10b833d2, 0x11418fc5, 0x11b8aeaf, 0x131836ec, 0x132ab818,
|
||||
0x13a46a55, 0x13df89fe, 0x145d65b5, 0x166f9c3a, 0x166fe0ef, 0x178cb36f, 0x185baf68,
|
||||
0x1bbfe563, 0x1bd637b4, 0x1cfc8382, 0x1d1ed012, 0x1e391ca5, 0x1e999b4c, 0x1f7c6d21,
|
||||
]),
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
@ -99,9 +112,12 @@ pub fn genesis_main() -> core::Block {
|
|||
height: 0,
|
||||
previous: core::hash::Hash([0xff; 32]),
|
||||
timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0),
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
nonce: global::get_genesis_nonce(),
|
||||
pow: core::Proof::zero(consensus::PROOFSIZE),
|
||||
pow: core::ProofOfWork {
|
||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||
scaling_difficulty: 1,
|
||||
nonce: global::get_genesis_nonce(),
|
||||
proof: core::Proof::zero(consensus::PROOFSIZE),
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ use pow::cuckoo::{Cuckoo, Error};
|
|||
/// satisfies the requirements of the header.
|
||||
pub fn verify_size(bh: &BlockHeader, cuckoo_sz: u8) -> bool {
|
||||
Cuckoo::from_hash(bh.pre_pow_hash().as_ref(), cuckoo_sz)
|
||||
.verify(&bh.pow, consensus::EASINESS as u64)
|
||||
.verify(&bh.pow.proof, consensus::EASINESS as u64)
|
||||
}
|
||||
|
||||
/// Mines a genesis block using the internal miner
|
||||
|
@ -62,7 +62,7 @@ pub fn mine_genesis_block() -> Result<Block, Error> {
|
|||
}
|
||||
|
||||
// total_difficulty on the genesis header *is* the difficulty of that block
|
||||
let genesis_difficulty = gen.header.total_difficulty.clone();
|
||||
let genesis_difficulty = gen.header.pow.total_difficulty.clone();
|
||||
|
||||
let sz = global::min_sizeshift();
|
||||
let proof_size = global::proofsize();
|
||||
|
@ -80,11 +80,11 @@ pub fn pow_size(
|
|||
proof_size: usize,
|
||||
sz: u8,
|
||||
) -> Result<(), Error> {
|
||||
let start_nonce = bh.nonce;
|
||||
let start_nonce = bh.pow.nonce;
|
||||
|
||||
// set the nonce for faster solution finding in user testing
|
||||
if bh.height == 0 && global::is_user_testing_mode() {
|
||||
bh.nonce = global::get_genesis_nonce();
|
||||
bh.pow.nonce = global::get_genesis_nonce();
|
||||
}
|
||||
|
||||
// try to find a cuckoo cycle on that header hash
|
||||
|
@ -93,18 +93,18 @@ pub fn pow_size(
|
|||
// diff, we're all good
|
||||
if let Ok(proof) = cuckoo::Miner::new(bh, consensus::EASINESS, proof_size, sz).mine() {
|
||||
if proof.to_difficulty() >= diff {
|
||||
bh.pow = proof.clone();
|
||||
bh.pow.proof = proof.clone();
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise increment the nonce
|
||||
let (res, _) = bh.nonce.overflowing_add(1);
|
||||
bh.nonce = res;
|
||||
let (res, _) = bh.pow.nonce.overflowing_add(1);
|
||||
bh.pow.nonce = res;
|
||||
|
||||
// and if we're back where we started, update the time (changes the hash as
|
||||
// well)
|
||||
if bh.nonce == start_nonce {
|
||||
if bh.pow.nonce == start_nonce {
|
||||
bh.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc);
|
||||
}
|
||||
}
|
||||
|
@ -122,15 +122,15 @@ mod test {
|
|||
#[test]
|
||||
fn genesis_pow() {
|
||||
let mut b = genesis::genesis_dev();
|
||||
b.header.nonce = 485;
|
||||
b.header.pow.nonce = 485;
|
||||
pow_size(
|
||||
&mut b.header,
|
||||
Difficulty::one(),
|
||||
global::proofsize(),
|
||||
global::min_sizeshift(),
|
||||
).unwrap();
|
||||
assert!(b.header.nonce != 310);
|
||||
assert!(b.header.pow.to_difficulty() >= Difficulty::one());
|
||||
assert!(b.header.pow.nonce != 310);
|
||||
assert!(b.header.pow.proof.to_difficulty() >= Difficulty::one());
|
||||
assert!(verify_size(&b.header, global::min_sizeshift()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ pub mod common;
|
|||
|
||||
use chrono::Duration;
|
||||
use common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
|
||||
use grin_core::consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
|
||||
use grin_core::consensus::{self, BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
|
||||
use grin_core::core::block::Error;
|
||||
use grin_core::core::hash::Hashed;
|
||||
use grin_core::core::id::ShortIdentifiable;
|
||||
|
@ -436,3 +436,26 @@ fn serialize_deserialize_compact_block() {
|
|||
assert_eq!(cb1.header, cb2.header);
|
||||
assert_eq!(cb1.kern_ids(), cb2.kern_ids());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_block_v2_switch() {
|
||||
let keychain = ExtKeychain::from_random_seed().unwrap();
|
||||
let mut prev = BlockHeader::default();
|
||||
prev.height = consensus::HEADER_V2_HARD_FORK - 1;
|
||||
let key_id = keychain.derive_key_id(1).unwrap();
|
||||
let b = new_block(vec![], &keychain, &prev, &key_id);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||
let target_len = 1_260;
|
||||
assert_eq!(b.header.version, 2);
|
||||
assert_eq!(vec.len(), target_len);
|
||||
|
||||
// another try right before v2
|
||||
prev.height = consensus::HEADER_V2_HARD_FORK - 2;
|
||||
let b = new_block(vec![], &keychain, &prev, &key_id);
|
||||
let mut vec = Vec::new();
|
||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||
let target_len = 1_252;
|
||||
assert_eq!(b.header.version, 1);
|
||||
assert_eq!(vec.len(), target_len);
|
||||
}
|
||||
|
|
|
@ -498,12 +498,14 @@ fn next_target_adjustment() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn hard_fork_1() {
|
||||
fn hard_forks() {
|
||||
assert!(valid_header_version(0, 1));
|
||||
assert!(valid_header_version(10, 1));
|
||||
assert!(!valid_header_version(10, 2));
|
||||
assert!(valid_header_version(250_000, 1));
|
||||
assert!(!valid_header_version(250_001, 1));
|
||||
assert!(valid_header_version(100_000, 2));
|
||||
assert!(valid_header_version(249_999, 2));
|
||||
assert!(valid_header_version(250_000, 3));
|
||||
assert!(!valid_header_version(250_000, 1));
|
||||
assert!(!valid_header_version(500_000, 1));
|
||||
assert!(!valid_header_version(250_001, 2));
|
||||
}
|
||||
|
|
|
@ -313,7 +313,7 @@ impl Peers {
|
|||
debug!(
|
||||
LOGGER,
|
||||
"broadcast_block: {} @ {} [{}] was sent to {} peers.",
|
||||
b.header.total_difficulty,
|
||||
b.header.pow.total_difficulty,
|
||||
b.header.height,
|
||||
b.hash(),
|
||||
count,
|
||||
|
@ -331,7 +331,7 @@ impl Peers {
|
|||
LOGGER,
|
||||
"broadcast_compact_block: {}, {} at {}, to {} peers, done.",
|
||||
b.hash(),
|
||||
b.header.total_difficulty,
|
||||
b.header.pow.total_difficulty,
|
||||
b.header.height,
|
||||
count,
|
||||
);
|
||||
|
@ -348,7 +348,7 @@ impl Peers {
|
|||
LOGGER,
|
||||
"broadcast_header: {}, {} at {}, to {} peers, done.",
|
||||
bh.hash(),
|
||||
bh.total_difficulty,
|
||||
bh.pow.total_difficulty,
|
||||
bh.height,
|
||||
count,
|
||||
);
|
||||
|
|
|
@ -329,9 +329,10 @@ fn headers_header_size(conn: &mut TcpStream, msg_len: u64) -> Result<u64, Error>
|
|||
}
|
||||
let average_header_size = (msg_len - 2) / total_headers;
|
||||
|
||||
// support size of Cuckoo: from Cuckoo 30 to Cuckoo 36
|
||||
let minimum_size = core::serialized_size_of_header(global::min_sizeshift());
|
||||
let maximum_size = core::serialized_size_of_header(global::min_sizeshift() + 6);
|
||||
// support size of Cuckoo: from Cuckoo 30 to Cuckoo 36, with version 2
|
||||
// having slightly larger headers
|
||||
let minimum_size = core::serialized_size_of_header(1, global::min_sizeshift());
|
||||
let maximum_size = core::serialized_size_of_header(2, global::min_sizeshift() + 6);
|
||||
if average_header_size < minimum_size as u64 || average_header_size > maximum_size as u64 {
|
||||
debug!(
|
||||
LOGGER,
|
||||
|
|
|
@ -179,17 +179,17 @@ fn build_block(
|
|||
)?;
|
||||
|
||||
let mut rng = rand::OsRng::new().unwrap();
|
||||
b.header.nonce = rng.gen();
|
||||
b.header.pow.nonce = rng.gen();
|
||||
b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc);;
|
||||
|
||||
let b_difficulty = (b.header.total_difficulty.clone() - head.total_difficulty.clone()).to_num();
|
||||
let b_difficulty = (b.header.total_difficulty() - head.total_difficulty()).to_num();
|
||||
debug!(
|
||||
LOGGER,
|
||||
"Built new block with {} inputs and {} outputs, network difficulty: {}, cumulative difficulty {}",
|
||||
b.inputs().len(),
|
||||
b.outputs().len(),
|
||||
b_difficulty,
|
||||
b.header.clone().total_difficulty.to_num(),
|
||||
b.header.total_difficulty().to_num(),
|
||||
);
|
||||
|
||||
// Now set txhashset roots and sizes on the header of the block being built.
|
||||
|
|
|
@ -476,8 +476,8 @@ impl StratumServer {
|
|||
}
|
||||
let mut b: Block = b.unwrap().clone();
|
||||
// Reconstruct the block header with this nonce and pow added
|
||||
b.header.nonce = params.nonce;
|
||||
b.header.pow.nonces = params.pow;
|
||||
b.header.pow.nonce = params.nonce;
|
||||
b.header.pow.proof.nonces = params.pow;
|
||||
// Get share difficulty
|
||||
share_difficulty = b.header.pow.to_difficulty().to_num();
|
||||
// If the difficulty is too low its an error
|
||||
|
@ -532,7 +532,7 @@ impl StratumServer {
|
|||
"(Server ID: {}) Failed to validate share at height {} with nonce {} using job_id {}",
|
||||
self.id,
|
||||
params.height,
|
||||
b.header.nonce,
|
||||
b.header.pow.nonce,
|
||||
params.job_id,
|
||||
);
|
||||
worker_stats.num_rejected += 1;
|
||||
|
@ -554,7 +554,7 @@ impl StratumServer {
|
|||
self.id,
|
||||
b.hash(),
|
||||
b.header.height,
|
||||
b.header.nonce,
|
||||
b.header.pow.nonce,
|
||||
share_difficulty,
|
||||
self.current_difficulty,
|
||||
submitted_by,
|
||||
|
@ -733,9 +733,8 @@ impl StratumServer {
|
|||
self.current_key_id.clone(),
|
||||
wallet_listener_url,
|
||||
);
|
||||
self.current_difficulty = (new_block.header.total_difficulty.clone()
|
||||
- head.total_difficulty.clone())
|
||||
.to_num();
|
||||
self.current_difficulty =
|
||||
(new_block.header.total_difficulty() - head.total_difficulty).to_num();
|
||||
self.current_key_id = block_fees.key_id();
|
||||
current_hash = latest_hash;
|
||||
// set the minimum acceptable share difficulty for this block
|
||||
|
|
|
@ -89,7 +89,7 @@ impl Miner {
|
|||
self.debug_output_id,
|
||||
global::min_sizeshift(),
|
||||
attempt_time_per_block,
|
||||
b.header.total_difficulty,
|
||||
b.header.total_difficulty(),
|
||||
b.header.height,
|
||||
latest_hash
|
||||
);
|
||||
|
@ -105,14 +105,13 @@ impl Miner {
|
|||
).mine()
|
||||
{
|
||||
let proof_diff = proof.to_difficulty();
|
||||
if proof_diff >= (b.header.total_difficulty.clone() - head.total_difficulty.clone())
|
||||
{
|
||||
if proof_diff >= (b.header.total_difficulty() - head.total_difficulty()) {
|
||||
sol = Some(proof);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
b.header.nonce += 1;
|
||||
b.header.pow.nonce += 1;
|
||||
*latest_hash = self.chain.head().unwrap().last_block_h;
|
||||
iter_count += 1;
|
||||
}
|
||||
|
@ -165,7 +164,7 @@ impl Miner {
|
|||
|
||||
// we found a solution, push our block through the chain processing pipeline
|
||||
if let Some(proof) = sol {
|
||||
b.header.pow = proof;
|
||||
b.header.pow.proof = proof;
|
||||
info!(
|
||||
LOGGER,
|
||||
"(Server ID: {}) Found valid proof of work, adding block {}.",
|
||||
|
|
Loading…
Reference in a new issue