Dual proof of work, first step (#1534)

* Move Proof and Difficulty types to pow module. Difficulty scaling calculation.
* Diffculty scaling and size shift for 2nd PoW
* Backport e7eb26ee
* Test compilation fixes
* Scaling only need to go one way, as @tromp pointed out
This commit is contained in:
Ignotus Peverell 2018-09-18 15:12:57 -07:00 committed by GitHub
parent 7db8e5e2dd
commit 9cc7aed713
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
37 changed files with 516 additions and 512 deletions

View file

@ -25,10 +25,10 @@ use lmdb;
use core::core::hash::{Hash, Hashed};
use core::core::merkle_proof::MerkleProof;
use core::core::target::Difficulty;
use core::core::verifier_cache::VerifierCache;
use core::core::{Block, BlockHeader, Output, OutputIdentifier, Transaction, TxKernel};
use core::global;
use core::pow::Difficulty;
use error::{Error, ErrorKind};
use grin_store::Error::NotFoundErr;
use pipe;

View file

@ -48,6 +48,9 @@ pub enum ErrorKind {
/// Block header sizeshift is lower than our min
#[fail(display = "Cuckoo Size too Low")]
LowSizeshift,
/// Scaling factor between primary and secondary PoW is invalid
#[fail(display = "Wrong scaling factor")]
InvalidScaling,
/// The proof of work is invalid
#[fail(display = "Invalid PoW")]
InvalidPow,

View file

@ -23,10 +23,10 @@ use chrono::Duration;
use chain::OrphanBlockPool;
use core::consensus;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::core::verifier_cache::VerifierCache;
use core::core::{Block, BlockHeader};
use core::global;
use core::pow::Difficulty;
use error::{Error, ErrorKind};
use grin_store;
use store;
@ -392,14 +392,20 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
}
if !ctx.opts.contains(Options::SKIP_POW) {
if global::min_sizeshift() > header.pow.cuckoo_sizeshift() {
let shift = header.pow.cuckoo_sizeshift();
// size shift can either be larger than the minimum on the primary PoW
// or equal to the seconday PoW size shift
if shift != consensus::SECOND_POW_SIZESHIFT && global::min_sizeshift() > shift {
return Err(ErrorKind::LowSizeshift.into());
}
if !(ctx.pow_verifier)(header, header.pow.cuckoo_sizeshift()) {
// primary PoW must have a scaling factor of 1
if shift != consensus::SECOND_POW_SIZESHIFT && header.pow.scaling_difficulty != 1 {
return Err(ErrorKind::InvalidScaling.into());
}
if !(ctx.pow_verifier)(header, shift) {
error!(
LOGGER,
"pipe: validate_header failed for cuckoo shift size {}",
header.pow.cuckoo_sizeshift()
"pipe: validate_header bad cuckoo shift size {}", shift
);
return Err(ErrorKind::InvalidPow.into());
}
@ -461,7 +467,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
if target_difficulty != network_difficulty.clone() {
error!(
LOGGER,
"validate_header: BANNABLE OFFENCE: header target difficulty {} != {}",
"validate_header: header target difficulty {} != {}",
target_difficulty.to_num(),
network_difficulty.to_num()
);

View file

@ -24,8 +24,8 @@ use util::secp::pedersen::Commitment;
use core::consensus::TargetError;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::core::{Block, BlockHeader};
use core::pow::Difficulty;
use grin_store as store;
use grin_store::{option_to_not_found, to_key, u64_to_key, Error};
use types::Tip;

View file

@ -15,8 +15,8 @@
//! Base types that the block chain pipeline requires.
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::core::{Block, BlockHeader};
use core::pow::Difficulty;
use core::ser;
bitflags! {

View file

@ -28,11 +28,10 @@ use std::sync::{Arc, RwLock};
use chain::types::NoopAdapter;
use chain::Chain;
use core::core::target::Difficulty;
use core::core::verifier_cache::LruVerifierCache;
use core::core::{Block, BlockHeader, Transaction};
use core::global::{self, ChainTypes};
use core::pow;
use core::pow::{self, Difficulty};
use core::{consensus, genesis};
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;

View file

@ -28,10 +28,10 @@ use std::sync::{Arc, RwLock};
use chain::types::NoopAdapter;
use chain::Chain;
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::core::verifier_cache::LruVerifierCache;
use core::core::{Block, BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
use core::global::ChainTypes;
use core::pow::Difficulty;
use core::{consensus, global, pow};
use keychain::{ExtKeychain, Keychain};
use wallet::libtx::{self, build};
@ -480,7 +480,7 @@ where
};
b.header.timestamp = prev.timestamp + Duration::seconds(60);
b.header.pow.total_difficulty = prev.total_difficulty() + Difficulty::from_num(diff);
b.header.pow.proof = core::core::Proof::random(proof_size);
b.header.pow.proof = pow::Proof::random(proof_size);
b
}

View file

@ -25,10 +25,9 @@ use std::sync::Arc;
use chain::Tip;
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::core::{Block, BlockHeader};
use core::global::{self, ChainTypes};
use core::pow;
use core::pow::{self, Difficulty};
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;

View file

@ -27,10 +27,10 @@ use std::sync::{Arc, RwLock};
use chain::types::NoopAdapter;
use chain::ErrorKind;
use core::core::target::Difficulty;
use core::core::transaction;
use core::core::verifier_cache::LruVerifierCache;
use core::global::{self, ChainTypes};
use core::pow::Difficulty;
use core::{consensus, pow};
use keychain::{ExtKeychain, Keychain};
use wallet::libtx::{self, build};

View file

@ -28,8 +28,8 @@ use std::sync::Arc;
use chain::store::ChainStore;
use chain::txhashset;
use chain::types::Tip;
use core::core::target::Difficulty;
use core::core::{Block, BlockHeader};
use core::pow::Difficulty;
use keychain::{ExtKeychain, Keychain};
use util::file;
use wallet::libtx::{build, reward};

View file

@ -21,8 +21,8 @@
use std::cmp::max;
use std::fmt;
use core::target::Difficulty;
use global;
use pow::Difficulty;
/// A grin is divisible to 10^9, following the SI prefixes
pub const GRIN_BASE: u64 = 1_000_000_000;
@ -49,7 +49,7 @@ pub const BLOCK_TIME_SEC: u64 = 60;
/// Number of blocks before a coinbase matures and can be spent
/// set to nominal number of block in one day (1440 with 1-minute blocks)
pub const COINBASE_MATURITY: u64 = 24 * 60 * 60 / BLOCK_TIME_SEC;
pub const COINBASE_MATURITY: u64 = 24 * 60 * 60 / BLOCK_TIME_SEC;
/// Cuckoo-cycle proof size (cycle length)
pub const PROOFSIZE: usize = 42;
@ -57,6 +57,9 @@ pub const PROOFSIZE: usize = 42;
/// Default Cuckoo Cycle size shift used for mining and validating.
pub const DEFAULT_MIN_SIZESHIFT: u8 = 30;
/// Secondary proof-of-work size shift, meant to be ASIC resistant.
pub const SECOND_POW_SIZESHIFT: u8 = 29;
/// Original reference sizeshift to compute difficulty factors for higher
/// Cuckoo graph sizes, changing this would hard fork
pub const REFERENCE_SIZESHIFT: u8 = 30;
@ -250,4 +253,5 @@ pub trait VerifySortOrder<T> {
fn verify_sort_order(&self) -> Result<(), Error>;
}
/// Height for the v2 headers hard fork, with extended proof of work in header
pub const HEADER_V2_HARD_FORK: u64 = 95_000;

View file

@ -26,14 +26,14 @@ use consensus::{self, reward, REWARD};
use core::committed::{self, Committed};
use core::compact_block::{CompactBlock, CompactBlockBody};
use core::hash::{Hash, HashWriter, Hashed, ZERO_HASH};
use core::target::Difficulty;
use core::verifier_cache::{LruVerifierCache, VerifierCache};
use core::{
transaction, Commitment, Input, KernelFeatures, Output, OutputFeatures, Proof, Transaction,
transaction, Commitment, Input, KernelFeatures, Output, OutputFeatures, Transaction,
TransactionBody, TxKernel,
};
use global;
use keychain::{self, BlindingFactor};
use pow::{Difficulty, Proof, ProofOfWork};
use ser::{self, Readable, Reader, Writeable, Writer};
use util::{secp, secp_static, static_secp_instance, LOGGER};
@ -109,91 +109,6 @@ impl fmt::Display for Error {
}
}
/// Block header information pertaining to the proof of work
#[derive(Clone, Debug, PartialEq)]
pub struct ProofOfWork {
/// Total accumulated difficulty since genesis block
pub total_difficulty: Difficulty,
/// Difficulty scaling factor between the different proofs of work
pub scaling_difficulty: u64,
/// Nonce increment used to mine this block.
pub nonce: u64,
/// Proof of work data.
pub proof: Proof,
}
impl Default for ProofOfWork {
fn default() -> ProofOfWork {
let proof_size = global::proofsize();
ProofOfWork {
total_difficulty: Difficulty::one(),
scaling_difficulty: 1,
nonce: 0,
proof: Proof::zero(proof_size),
}
}
}
impl ProofOfWork {
/// Read implementation, can't define as trait impl as we need a version
fn read(ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
let (total_difficulty, scaling_difficulty) = if ver == 1 {
// read earlier in the header on older versions
(Difficulty::one(), 1)
} else {
(Difficulty::read(reader)?, reader.read_u64()?)
};
let nonce = reader.read_u64()?;
let proof = Proof::read(reader)?;
Ok(ProofOfWork {
total_difficulty,
scaling_difficulty,
nonce,
proof,
})
}
/// Write implementation, can't define as trait impl as we need a version
fn write<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
if writer.serialization_mode() != ser::SerializationMode::Hash {
self.write_pre_pow(ver, true, writer)?;
}
self.proof.write(writer)?;
Ok(())
}
/// Write the pre-hash portion of the header
pub fn write_pre_pow<W: Writer>(
&self,
ver: u16,
include_nonce: bool,
writer: &mut W,
) -> Result<(), ser::Error> {
if ver > 1 {
ser_multiwrite!(
writer,
[write_u64, self.total_difficulty.to_num()],
[write_u64, self.scaling_difficulty]
);
}
if include_nonce {
writer.write_u64(self.nonce)?;
}
Ok(())
}
/// Maximum difficulty this proof of work can achieve
pub fn to_difficulty(&self) -> Difficulty {
self.proof.to_difficulty()
}
/// The shift used for the cuckoo cycle size on this proof
pub fn cuckoo_sizeshift(&self) -> u8 {
self.proof.cuckoo_sizeshift
}
}
/// Block header, fairly standard compared to other blockchains.
#[derive(Clone, Debug, PartialEq)]
pub struct BlockHeader {
@ -368,9 +283,8 @@ impl BlockHeader {
pub fn pre_pow_hash(&self) -> Hash {
let mut hasher = HashWriter::default();
self.write_pre_pow(&mut hasher).unwrap();
self.pow
.write_pre_pow(self.version, true, &mut hasher)
.unwrap();
self.pow.write_pre_pow(self.version, &mut hasher).unwrap();
hasher.write_u64(self.pow.nonce).unwrap();
let mut ret = [0; 32];
hasher.finalize(&mut ret);
Hash(ret)

View file

@ -22,14 +22,10 @@ pub mod hash;
pub mod id;
pub mod merkle_proof;
pub mod pmmr;
pub mod target;
pub mod transaction;
pub mod verifier_cache;
use consensus::GRIN_BASE;
#[allow(dead_code)]
use rand::{thread_rng, Rng};
use std::{fmt, iter};
use util::secp::pedersen::Commitment;
@ -39,165 +35,6 @@ pub use self::compact_block::*;
pub use self::compact_transaction::*;
pub use self::id::ShortId;
pub use self::transaction::*;
use core::hash::Hashed;
use global;
use ser::{self, Readable, Reader, Writeable, Writer};
/// A Cuckoo Cycle proof of work, consisting of the shift to get the graph
/// size (i.e. 31 for Cuckoo31 with a 2^31 or 1<<31 graph size) and the nonces
/// of the graph solution. While being expressed as u64 for simplicity, each
/// nonce is strictly less than half the cycle size (i.e. <2^30 for Cuckoo 31).
///
/// The hash of the `Proof` is the hash of its packed nonces when serializing
/// them at their exact bit size. The resulting bit sequence is padded to be
/// byte-aligned.
///
#[derive(Clone, PartialOrd, PartialEq)]
pub struct Proof {
/// Power of 2 used for the size of the cuckoo graph
pub cuckoo_sizeshift: u8,
/// The nonces
pub nonces: Vec<u64>,
}
impl fmt::Debug for Proof {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Cuckoo{}(", self.cuckoo_sizeshift)?;
for (i, val) in self.nonces[..].iter().enumerate() {
write!(f, "{:x}", val)?;
if i < self.nonces.len() - 1 {
write!(f, " ")?;
}
}
write!(f, ")")
}
}
impl Eq for Proof {}
impl Proof {
/// Builds a proof with provided nonces at default sizeshift
pub fn new(mut in_nonces: Vec<u64>) -> Proof {
in_nonces.sort();
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: in_nonces,
}
}
/// Builds a proof with all bytes zeroed out
pub fn zero(proof_size: usize) -> Proof {
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: vec![0; proof_size],
}
}
/// Builds a proof with random POW data,
/// needed so that tests that ignore POW
/// don't fail due to duplicate hashes
pub fn random(proof_size: usize) -> Proof {
let sizeshift = global::min_sizeshift();
let nonce_mask = (1 << (sizeshift - 1)) - 1;
let mut rng = thread_rng();
// force the random num to be within sizeshift bits
let mut v: Vec<u64> = iter::repeat(())
.map(|()| (rng.gen::<u32>() & nonce_mask) as u64)
.take(proof_size)
.collect();
v.sort();
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: v,
}
}
/// Converts the proof to a proof-of-work Target so they can be compared.
/// Hashes the Cuckoo Proof data.
pub fn to_difficulty(&self) -> target::Difficulty {
target::Difficulty::from_hash_and_shift(&self.hash(), self.cuckoo_sizeshift)
}
/// Returns the proof size
pub fn proof_size(&self) -> usize {
self.nonces.len()
}
}
impl Readable for Proof {
fn read(reader: &mut Reader) -> Result<Proof, ser::Error> {
let cuckoo_sizeshift = reader.read_u8()?;
if cuckoo_sizeshift == 0 || cuckoo_sizeshift > 64 {
return Err(ser::Error::CorruptedData);
}
let mut nonces = Vec::with_capacity(global::proofsize());
let nonce_bits = cuckoo_sizeshift as usize - 1;
let bytes_len = BitVec::bytes_len(nonce_bits * global::proofsize());
let bits = reader.read_fixed_bytes(bytes_len)?;
let bitvec = BitVec { bits };
for n in 0..global::proofsize() {
let mut nonce = 0;
for bit in 0..nonce_bits {
if bitvec.bit_at(n * nonce_bits + (bit as usize)) {
nonce |= 1 << bit;
}
}
nonces.push(nonce);
}
Ok(Proof {
cuckoo_sizeshift,
nonces,
})
}
}
impl Writeable for Proof {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
if writer.serialization_mode() != ser::SerializationMode::Hash {
writer.write_u8(self.cuckoo_sizeshift)?;
}
let nonce_bits = self.cuckoo_sizeshift as usize - 1;
let mut bitvec = BitVec::new(nonce_bits * global::proofsize());
for (n, nonce) in self.nonces.iter().enumerate() {
for bit in 0..nonce_bits {
if nonce & (1 << bit) != 0 {
bitvec.set_bit_at(n * nonce_bits + (bit as usize))
}
}
}
writer.write_fixed_bytes(&bitvec.bits)?;
Ok(())
}
}
// TODO this could likely be optimized by writing whole bytes (or even words)
// in the `BitVec` at once, dealing with the truncation, instead of bits by bits
struct BitVec {
bits: Vec<u8>,
}
impl BitVec {
/// Number of bytes required to store the provided number of bits
fn bytes_len(bits_len: usize) -> usize {
(bits_len + 7) / 8
}
fn new(bits_len: usize) -> BitVec {
BitVec {
bits: vec![0; BitVec::bytes_len(bits_len)],
}
}
fn set_bit_at(&mut self, pos: usize) {
self.bits[pos / 8] |= 1 << (pos % 8) as u8;
}
fn bit_at(&self, pos: usize) -> bool {
self.bits[pos / 8] & (1 << (pos % 8) as u8) != 0
}
}
/// Common errors
#[derive(Fail, Debug)]

View file

@ -1,180 +0,0 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Definition of the maximum target value a proof-of-work block hash can have
//! and
//! the related difficulty, defined as the maximum target divided by the hash.
//!
//! Note this is now wrapping a simple U64 now, but it's desirable to keep the
//! wrapper in case the internal representation needs to change again
use std::fmt;
use std::ops::{Add, Div, Mul, Sub};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std::cmp::max;
use core::global;
use core::hash::Hash;
use ser::{self, Readable, Reader, Writeable, Writer};
/// The difficulty is defined as the maximum target divided by the block hash.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord)]
pub struct Difficulty {
num: u64,
}
impl Difficulty {
/// Difficulty of zero, which is invalid (no target can be
/// calculated from it) but very useful as a start for additions.
pub fn zero() -> Difficulty {
Difficulty { num: 0 }
}
/// Difficulty of one, which is the minimum difficulty
/// (when the hash equals the max target)
pub fn one() -> Difficulty {
Difficulty { num: 1 }
}
/// Convert a `u32` into a `Difficulty`
pub fn from_num(num: u64) -> Difficulty {
Difficulty { num: num }
}
/// Computes the difficulty from a hash. Divides the maximum target by the
/// provided hash and applies the Cuckoo sizeshift adjustment factor (see
/// https://lists.launchpad.net/mimblewimble/msg00494.html).
pub fn from_hash_and_shift(h: &Hash, shift: u8) -> Difficulty {
let max_target = <u64>::max_value();
let num = h.to_u64();
// Adjust the difficulty based on a 2^(N-M)*(N-1) factor, with M being
// the minimum sizeshift and N the provided sizeshift
let adjust_factor = (1 << (shift - global::ref_sizeshift()) as u64) * (shift as u64 - 1);
Difficulty {
num: (max_target / max(num, adjust_factor)) * adjust_factor,
}
}
/// Converts the difficulty into a u64
pub fn to_num(&self) -> u64 {
self.num
}
}
impl fmt::Display for Difficulty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.num)
}
}
impl Add<Difficulty> for Difficulty {
type Output = Difficulty;
fn add(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num + other.num,
}
}
}
impl Sub<Difficulty> for Difficulty {
type Output = Difficulty;
fn sub(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num - other.num,
}
}
}
impl Mul<Difficulty> for Difficulty {
type Output = Difficulty;
fn mul(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num * other.num,
}
}
}
impl Div<Difficulty> for Difficulty {
type Output = Difficulty;
fn div(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num / other.num,
}
}
}
impl Writeable for Difficulty {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_u64(self.num)
}
}
impl Readable for Difficulty {
fn read(reader: &mut Reader) -> Result<Difficulty, ser::Error> {
let data = reader.read_u64()?;
Ok(Difficulty { num: data })
}
}
impl Serialize for Difficulty {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u64(self.num)
}
}
impl<'de> Deserialize<'de> for Difficulty {
fn deserialize<D>(deserializer: D) -> Result<Difficulty, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u64(DiffVisitor)
}
}
struct DiffVisitor;
impl<'de> de::Visitor<'de> for DiffVisitor {
type Value = Difficulty;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a difficulty")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let num_in = s.parse::<u64>();
if num_in.is_err() {
return Err(de::Error::invalid_value(
de::Unexpected::Str(s),
&"a value number",
));
};
Ok(Difficulty {
num: num_in.unwrap(),
})
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Difficulty { num: value })
}
}

View file

@ -18,8 +18,8 @@ use chrono::prelude::{TimeZone, Utc};
use consensus;
use core;
use core::target::Difficulty;
use global;
use pow::{Difficulty, Proof, ProofOfWork};
/// Genesis block definition for development networks. The proof of work size
/// is small enough to mine it on the fly, so it does not contain its own
@ -29,7 +29,7 @@ pub fn genesis_dev() -> core::Block {
height: 0,
previous: core::hash::Hash([0xff; 32]),
timestamp: Utc.ymd(1997, 8, 4).and_hms(0, 0, 0),
pow: core::ProofOfWork {
pow: ProofOfWork {
nonce: global::get_genesis_nonce(),
..Default::default()
},
@ -43,11 +43,11 @@ pub fn genesis_testnet1() -> core::Block {
core::Block::with_header(core::BlockHeader {
height: 0,
timestamp: Utc.ymd(2017, 11, 16).and_hms(20, 0, 0),
pow: core::ProofOfWork {
pow: ProofOfWork {
total_difficulty: Difficulty::one(),
scaling_difficulty: 1,
nonce: 28205,
proof: core::Proof::new(vec![
proof: Proof::new(vec![
0x21e, 0x7a2, 0xeae, 0x144e, 0x1b1c, 0x1fbd, 0x203a, 0x214b, 0x293b, 0x2b74,
0x2bfa, 0x2c26, 0x32bb, 0x346a, 0x34c7, 0x37c5, 0x4164, 0x42cc, 0x4cc3, 0x55af,
0x5a70, 0x5b14, 0x5e1c, 0x5f76, 0x6061, 0x60f9, 0x61d7, 0x6318, 0x63a1, 0x63fb,
@ -65,11 +65,11 @@ pub fn genesis_testnet2() -> core::Block {
height: 0,
previous: core::hash::Hash([0xff; 32]),
timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0),
pow: core::ProofOfWork {
pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1,
nonce: 1060,
proof: core::Proof::new(vec![
proof: Proof::new(vec![
0x1940730, 0x333b9d0, 0x4739d6f, 0x4c6cfb1, 0x6e3d6c3, 0x74408a3, 0x7ba2bd2,
0x83e2024, 0x8ca22b5, 0x9d39ab8, 0xb6646dd, 0xc6698b6, 0xc6f78fe, 0xc99b662,
0xcf2ae8c, 0xcf41eed, 0xdd073e6, 0xded6af8, 0xf08d1a5, 0x1156a144, 0x11d1160a,
@ -88,11 +88,11 @@ pub fn genesis_testnet3() -> core::Block {
height: 0,
previous: core::hash::Hash([0xff; 32]),
timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0),
pow: core::ProofOfWork {
pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1,
nonce: 4956988373127691,
proof: core::Proof::new(vec![
proof: Proof::new(vec![
0xa420dc, 0xc8ffee, 0x10e433e, 0x1de9428, 0x2ed4cea, 0x52d907b, 0x5af0e3f,
0x6b8fcae, 0x8319b53, 0x845ca8c, 0x8d2a13e, 0x8d6e4cc, 0x9349e8d, 0xa7a33c5,
0xaeac3cb, 0xb193e23, 0xb502e19, 0xb5d9804, 0xc9ac184, 0xd4f4de3, 0xd7a23b8,
@ -112,11 +112,11 @@ pub fn genesis_main() -> core::Block {
height: 0,
previous: core::hash::Hash([0xff; 32]),
timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0),
pow: core::ProofOfWork {
pow: ProofOfWork {
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
scaling_difficulty: 1,
nonce: global::get_genesis_nonce(),
proof: core::Proof::zero(consensus::PROOFSIZE),
proof: Proof::zero(consensus::PROOFSIZE),
},
..Default::default()
})

View file

@ -22,7 +22,7 @@ use consensus::{
DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, MEDIAN_TIME_WINDOW, PROOFSIZE,
REFERENCE_SIZESHIFT,
};
use core::target::Difficulty;
use pow::Difficulty;
/// An enum collecting sets of parameters used throughout the
/// code wherever mining is needed. This should allow for
/// different sets of parameters for different purposes,
@ -145,8 +145,11 @@ pub fn coinbase_maturity(height: u64) -> u64 {
match *param_ref {
ChainTypes::AutomatedTesting => AUTOMATED_TESTING_COINBASE_MATURITY,
ChainTypes::UserTesting => USER_TESTING_COINBASE_MATURITY,
_ => if height < COINBASE_MATURITY_FORK_HEIGHT { OLD_COINBASE_MATURITY }
else { COINBASE_MATURITY },
_ => if height < COINBASE_MATURITY_FORK_HEIGHT {
OLD_COINBASE_MATURITY
} else {
COINBASE_MATURITY
},
}
}

View file

@ -21,8 +21,8 @@ use std::cmp;
use std::collections::HashSet;
use core::BlockHeader;
use core::Proof;
use pow::siphash::siphash24;
use pow::Proof;
const MAXPATHLEN: usize = 8192;
@ -318,7 +318,6 @@ fn u8_to_u64(p: &[u8], i: usize) -> u64 {
mod test {
use super::*;
use blake2;
use core::Proof;
static V1: [u64; 42] = [
0x3bbd, 0x4e96, 0x1013b, 0x1172b, 0x1371b, 0x13e6a, 0x1aaa6, 0x1b575, 0x1e237, 0x1ee88,

View file

@ -37,15 +37,17 @@ extern crate grin_util as util;
pub mod cuckoo;
mod siphash;
mod types;
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use consensus;
use core::target::Difficulty;
use core::{Block, BlockHeader};
use genesis;
use global;
use pow::cuckoo::{Cuckoo, Error};
pub use self::types::*;
/// Validates the proof of work of a given header, and that the proof of work
/// satisfies the requirements of the header.
pub fn verify_size(bh: &BlockHeader, cuckoo_sz: u8) -> bool {
@ -92,8 +94,8 @@ pub fn pow_size(
// if we found a cycle (not guaranteed) and the proof hash is higher that the
// diff, we're all good
if let Ok(proof) = cuckoo::Miner::new(bh, consensus::EASINESS, proof_size, sz).mine() {
if proof.to_difficulty() >= diff {
bh.pow.proof = proof.clone();
bh.pow.proof = proof;
if bh.pow.to_difficulty() >= diff {
return Ok(());
}
}
@ -113,7 +115,6 @@ pub fn pow_size(
#[cfg(test)]
mod test {
use super::*;
use core::target::Difficulty;
use genesis;
use global;
@ -130,7 +131,7 @@ mod test {
global::min_sizeshift(),
).unwrap();
assert!(b.header.pow.nonce != 310);
assert!(b.header.pow.proof.to_difficulty() >= Difficulty::one());
assert!(b.header.pow.to_difficulty() >= Difficulty::one());
assert!(verify_size(&b.header, global::min_sizeshift()));
}
}

425
core/src/pow/types.rs Normal file
View file

@ -0,0 +1,425 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Types for a Cuckoo proof of work and its encapsulation as a fully usable
/// proof of work within a block header.
use std::cmp::max;
use std::ops::{Add, Div, Mul, Sub};
use std::{fmt, iter};
use rand::{thread_rng, Rng};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use consensus::SECOND_POW_SIZESHIFT;
use core::hash::Hashed;
use global;
use ser::{self, Readable, Reader, Writeable, Writer};
/// The difficulty is defined as the maximum target divided by the block hash.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord)]
pub struct Difficulty {
num: u64,
}
impl Difficulty {
/// Difficulty of zero, which is invalid (no target can be
/// calculated from it) but very useful as a start for additions.
pub fn zero() -> Difficulty {
Difficulty { num: 0 }
}
/// Difficulty of one, which is the minimum difficulty
/// (when the hash equals the max target)
pub fn one() -> Difficulty {
Difficulty { num: 1 }
}
/// Convert a `u32` into a `Difficulty`
pub fn from_num(num: u64) -> Difficulty {
// can't have difficulty lower than 1
Difficulty { num: max(num, 1) }
}
/// Computes the difficulty from a hash. Divides the maximum target by the
/// provided hash and applies the Cuckoo sizeshift adjustment factor (see
/// https://lists.launchpad.net/mimblewimble/msg00494.html).
pub fn from_proof_adjusted(proof: &Proof) -> Difficulty {
let max_target = <u64>::max_value();
let target = proof.hash().to_u64();
let shift = proof.cuckoo_sizeshift;
// Adjust the difficulty based on a 2^(N-M)*(N-1) factor, with M being
// the minimum sizeshift and N the provided sizeshift
let adjust_factor = (1 << (shift - global::ref_sizeshift()) as u64) * (shift as u64 - 1);
let difficulty = (max_target / target) * adjust_factor;
Difficulty::from_num(difficulty)
}
/// Same as `from_proof_adjusted` but instead of an adjustment based on
/// cycle size, scales based on a provided factor. Used by dual PoW system
/// to scale one PoW against the other.
pub fn from_proof_scaled(proof: &Proof, scaling: u64) -> Difficulty {
let max_target = <u64>::max_value();
let target = proof.hash().to_u64();
// Scaling between 2 proof of work algos
Difficulty::from_num((max_target / scaling) / target)
}
/// Converts the difficulty into a u64
pub fn to_num(&self) -> u64 {
self.num
}
}
impl fmt::Display for Difficulty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.num)
}
}
impl Add<Difficulty> for Difficulty {
type Output = Difficulty;
fn add(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num + other.num,
}
}
}
impl Sub<Difficulty> for Difficulty {
type Output = Difficulty;
fn sub(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num - other.num,
}
}
}
impl Mul<Difficulty> for Difficulty {
type Output = Difficulty;
fn mul(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num * other.num,
}
}
}
impl Div<Difficulty> for Difficulty {
type Output = Difficulty;
fn div(self, other: Difficulty) -> Difficulty {
Difficulty {
num: self.num / other.num,
}
}
}
impl Writeable for Difficulty {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_u64(self.num)
}
}
impl Readable for Difficulty {
fn read(reader: &mut Reader) -> Result<Difficulty, ser::Error> {
let data = reader.read_u64()?;
Ok(Difficulty { num: data })
}
}
impl Serialize for Difficulty {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u64(self.num)
}
}
impl<'de> Deserialize<'de> for Difficulty {
fn deserialize<D>(deserializer: D) -> Result<Difficulty, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u64(DiffVisitor)
}
}
struct DiffVisitor;
impl<'de> de::Visitor<'de> for DiffVisitor {
type Value = Difficulty;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a difficulty")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let num_in = s.parse::<u64>();
if num_in.is_err() {
return Err(de::Error::invalid_value(
de::Unexpected::Str(s),
&"a value number",
));
};
Ok(Difficulty {
num: num_in.unwrap(),
})
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(Difficulty { num: value })
}
}
/// Block header information pertaining to the proof of work
#[derive(Clone, Debug, PartialEq)]
pub struct ProofOfWork {
/// Total accumulated difficulty since genesis block
pub total_difficulty: Difficulty,
/// Difficulty scaling factor between the different proofs of work
pub scaling_difficulty: u64,
/// Nonce increment used to mine this block.
pub nonce: u64,
/// Proof of work data.
pub proof: Proof,
}
impl Default for ProofOfWork {
fn default() -> ProofOfWork {
let proof_size = global::proofsize();
ProofOfWork {
total_difficulty: Difficulty::one(),
scaling_difficulty: 1,
nonce: 0,
proof: Proof::zero(proof_size),
}
}
}
impl ProofOfWork {
/// Read implementation, can't define as trait impl as we need a version
pub fn read(ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
let (total_difficulty, scaling_difficulty) = if ver == 1 {
// read earlier in the header on older versions
(Difficulty::one(), 1)
} else {
(Difficulty::read(reader)?, reader.read_u64()?)
};
let nonce = reader.read_u64()?;
let proof = Proof::read(reader)?;
Ok(ProofOfWork {
total_difficulty,
scaling_difficulty,
nonce,
proof,
})
}
/// Write implementation, can't define as trait impl as we need a version
pub fn write<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
if writer.serialization_mode() != ser::SerializationMode::Hash {
self.write_pre_pow(ver, writer)?;
}
writer.write_u64(self.nonce)?;
self.proof.write(writer)?;
Ok(())
}
/// Write the pre-hash portion of the header
pub fn write_pre_pow<W: Writer>(&self, ver: u16, writer: &mut W) -> Result<(), ser::Error> {
if ver > 1 {
ser_multiwrite!(
writer,
[write_u64, self.total_difficulty.to_num()],
[write_u64, self.scaling_difficulty]
);
}
Ok(())
}
/// Maximum difficulty this proof of work can achieve
pub fn to_difficulty(&self) -> Difficulty {
// 2 proof of works, Cuckoo29 (for now) and Cuckoo30+, which are scaled
// differently (scaling not controlled for now)
if self.proof.cuckoo_sizeshift == SECOND_POW_SIZESHIFT {
Difficulty::from_proof_scaled(&self.proof, self.scaling_difficulty)
} else {
Difficulty::from_proof_adjusted(&self.proof)
}
}
/// The shift used for the cuckoo cycle size on this proof
pub fn cuckoo_sizeshift(&self) -> u8 {
self.proof.cuckoo_sizeshift
}
}
/// A Cuckoo Cycle proof of work, consisting of the shift to get the graph
/// size (i.e. 31 for Cuckoo31 with a 2^31 or 1<<31 graph size) and the nonces
/// of the graph solution. While being expressed as u64 for simplicity, each
/// nonce is strictly less than half the cycle size (i.e. <2^30 for Cuckoo 31).
///
/// The hash of the `Proof` is the hash of its packed nonces when serializing
/// them at their exact bit size. The resulting bit sequence is padded to be
/// byte-aligned.
///
#[derive(Clone, PartialOrd, PartialEq)]
pub struct Proof {
/// Power of 2 used for the size of the cuckoo graph
pub cuckoo_sizeshift: u8,
/// The nonces
pub nonces: Vec<u64>,
}
impl fmt::Debug for Proof {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Cuckoo{}(", self.cuckoo_sizeshift)?;
for (i, val) in self.nonces[..].iter().enumerate() {
write!(f, "{:x}", val)?;
if i < self.nonces.len() - 1 {
write!(f, " ")?;
}
}
write!(f, ")")
}
}
impl Eq for Proof {}
impl Proof {
/// Builds a proof with provided nonces at default sizeshift
pub fn new(mut in_nonces: Vec<u64>) -> Proof {
in_nonces.sort();
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: in_nonces,
}
}
/// Builds a proof with all bytes zeroed out
pub fn zero(proof_size: usize) -> Proof {
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: vec![0; proof_size],
}
}
/// Builds a proof with random POW data,
/// needed so that tests that ignore POW
/// don't fail due to duplicate hashes
pub fn random(proof_size: usize) -> Proof {
let sizeshift = global::min_sizeshift();
let nonce_mask = (1 << (sizeshift - 1)) - 1;
let mut rng = thread_rng();
// force the random num to be within sizeshift bits
let mut v: Vec<u64> = iter::repeat(())
.map(|()| (rng.gen::<u32>() & nonce_mask) as u64)
.take(proof_size)
.collect();
v.sort();
Proof {
cuckoo_sizeshift: global::min_sizeshift(),
nonces: v,
}
}
/// Returns the proof size
pub fn proof_size(&self) -> usize {
self.nonces.len()
}
}
impl Readable for Proof {
fn read(reader: &mut Reader) -> Result<Proof, ser::Error> {
let cuckoo_sizeshift = reader.read_u8()?;
if cuckoo_sizeshift == 0 || cuckoo_sizeshift > 64 {
return Err(ser::Error::CorruptedData);
}
let mut nonces = Vec::with_capacity(global::proofsize());
let nonce_bits = cuckoo_sizeshift as usize - 1;
let bytes_len = BitVec::bytes_len(nonce_bits * global::proofsize());
let bits = reader.read_fixed_bytes(bytes_len)?;
let bitvec = BitVec { bits };
for n in 0..global::proofsize() {
let mut nonce = 0;
for bit in 0..nonce_bits {
if bitvec.bit_at(n * nonce_bits + (bit as usize)) {
nonce |= 1 << bit;
}
}
nonces.push(nonce);
}
Ok(Proof {
cuckoo_sizeshift,
nonces,
})
}
}
impl Writeable for Proof {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
if writer.serialization_mode() != ser::SerializationMode::Hash {
writer.write_u8(self.cuckoo_sizeshift)?;
}
let nonce_bits = self.cuckoo_sizeshift as usize - 1;
let mut bitvec = BitVec::new(nonce_bits * global::proofsize());
for (n, nonce) in self.nonces.iter().enumerate() {
for bit in 0..nonce_bits {
if nonce & (1 << bit) != 0 {
bitvec.set_bit_at(n * nonce_bits + (bit as usize))
}
}
}
writer.write_fixed_bytes(&bitvec.bits)?;
Ok(())
}
}
// TODO this could likely be optimized by writing whole bytes (or even words)
// in the `BitVec` at once, dealing with the truncation, instead of bits by bits
struct BitVec {
bits: Vec<u8>,
}
impl BitVec {
/// Number of bytes required to store the provided number of bits
fn bytes_len(bits_len: usize) -> usize {
(bits_len + 7) / 8
}
fn new(bits_len: usize) -> BitVec {
BitVec {
bits: vec![0; BitVec::bytes_len(bits_len)],
}
}
fn set_bit_at(&mut self, pos: usize) {
self.bits[pos / 8] |= 1 << (pos % 8) as u8;
}
fn bit_at(&self, pos: usize) -> bool {
self.bits[pos / 8] & (1 << (pos % 8) as u8) != 0
}
}

View file

@ -408,8 +408,9 @@ pub struct BinWriter<'a> {
}
impl<'a> BinWriter<'a> {
/// Wraps a standard Write in a new BinWriter
pub fn new(write: &'a mut Write) -> BinWriter<'a> {
BinWriter{sink: write}
BinWriter { sink: write }
}
}

View file

@ -20,8 +20,8 @@ extern crate grin_util as util;
extern crate grin_wallet as wallet;
use grin_core::core::block::{Block, BlockHeader};
use grin_core::core::target::Difficulty;
use grin_core::core::Transaction;
use grin_core::pow::Difficulty;
use keychain::{Identifier, Keychain};
use wallet::libtx::build::{self, input, output, with_fee};
use wallet::libtx::reward;

View file

@ -21,8 +21,8 @@ use core::consensus::{
next_difficulty, valid_header_version, TargetError, BLOCK_TIME_WINDOW, DAMP_FACTOR,
DIFFICULTY_ADJUST_WINDOW, MEDIAN_TIME_INDEX, MEDIAN_TIME_WINDOW, UPPER_TIME_BOUND,
};
use core::core::target::Difficulty;
use core::global;
use core::pow::Difficulty;
use std::fmt::{self, Display};
/// Last n blocks for difficulty calculation purposes

View file

@ -20,7 +20,7 @@ use rand::os::OsRng;
use rand::RngCore;
use core::core::hash::Hash;
use core::core::target::Difficulty;
use core::pow::Difficulty;
use msg::{read_message, write_message, Hand, Shake, SockAddr, Type, PROTOCOL_VERSION, USER_AGENT};
use peer::Peer;
use types::{Capabilities, Direction, Error, P2PConfig, PeerInfo};

View file

@ -21,8 +21,8 @@ use std::{thread, time};
use core::consensus;
use core::core::hash::Hash;
use core::core::target::Difficulty;
use core::core::BlockHeader;
use core::pow::Difficulty;
use core::ser::{self, Readable, Reader, Writeable, Writer};
use types::{Capabilities, Error, ReasonForBan, MAX_BLOCK_HEADERS, MAX_LOCATORS, MAX_PEER_ADDRS};

View file

@ -19,7 +19,7 @@ use std::sync::{Arc, RwLock};
use conn;
use core::core;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::pow::Difficulty;
use handshake::Handshake;
use msg::{self, BanReason, GetPeerAddrs, Locator, Ping, TxHashSetRequest};
use protocol::Protocol;

View file

@ -22,7 +22,7 @@ use rand::{thread_rng, Rng};
use chrono::prelude::Utc;
use core::core;
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::pow::Difficulty;
use util::LOGGER;
use peer::Peer;

View file

@ -23,7 +23,7 @@ use lmdb;
use core::core;
use core::core::hash::Hash;
use core::core::target::Difficulty;
use core::pow::Difficulty;
use handshake::Handshake;
use peer::Peer;
use peers::Peers;

View file

@ -19,7 +19,7 @@ use std::net::{IpAddr, SocketAddr};
use std::sync::mpsc;
use core::core::hash::Hash;
use core::core::target::Difficulty;
use core::pow::Difficulty;
use core::{core, ser};
use grin_store;

View file

@ -19,12 +19,12 @@ extern crate grin_store as store;
extern crate grin_util as util;
use std::net::{SocketAddr, TcpListener, TcpStream};
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::{thread, time};
use core::core::hash::Hash;
use core::core::target::Difficulty;
use core::pow::Difficulty;
use p2p::Peer;
fn open_port() -> u16 {

View file

@ -32,8 +32,8 @@ use core::core::{Block, BlockHeader};
use chain::txhashset;
use chain::types::Tip;
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::core::verifier_cache::LruVerifierCache;
use core::pow::Difficulty;
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;

View file

@ -36,8 +36,8 @@ use common::{
clean_output_dir, test_setup, test_source, test_transaction,
test_transaction_spending_coinbase, ChainAdapter,
};
use core::core::target::Difficulty;
use core::core::verifier_cache::LruVerifierCache;
use core::pow::Difficulty;
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;

View file

@ -34,9 +34,9 @@ use common::{
test_transaction_spending_coinbase, ChainAdapter,
};
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::core::verifier_cache::LruVerifierCache;
use core::core::{transaction, Block, BlockHeader};
use core::pow::Difficulty;
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;

View file

@ -25,10 +25,10 @@ use std::time::Instant;
use chain::{self, ChainAdapter, Options, Tip};
use common::types::{self, ChainValidationMode, ServerConfig, SyncState, SyncStatus};
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::core::transaction::Transaction;
use core::core::verifier_cache::VerifierCache;
use core::core::{BlockHeader, CompactBlock};
use core::pow::Difficulty;
use core::{core, global};
use p2p;
use pool;

View file

@ -29,8 +29,8 @@ use common::adapters::{
use common::stats::{DiffBlock, DiffStats, PeerStats, ServerStateInfo, ServerStats};
use common::types::{Error, ServerConfig, StratumServerConfig, SyncState};
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use core::pow::Difficulty;
use core::{consensus, genesis, global, pow};
use grin::{dandelion_monitor, seed, sync};
use mining::stratumserver;

View file

@ -22,8 +22,8 @@ use std::{cmp, thread};
use chain;
use common::types::{Error, SyncState, SyncStatus};
use core::core::hash::{Hash, Hashed};
use core::core::target::Difficulty;
use core::global;
use core::pow::Difficulty;
use grin::sync;
use p2p::{self, Peer, Peers};
use util::LOGGER;

View file

@ -270,9 +270,7 @@ impl StratumServer {
{
let mut writer = ser::BinWriter::new(&mut header_buf);
bh.write_pre_pow(&mut writer).unwrap();
bh.pow
.write_pre_pow(bh.version, false, &mut writer)
.unwrap();
bh.pow.write_pre_pow(bh.version, &mut writer).unwrap();
}
let pre_pow = util::to_hex(header_buf);
let job_template = JobTemplate {

View file

@ -25,8 +25,8 @@ use chain;
use common::types::StratumServerConfig;
use core::core::hash::{Hash, Hashed};
use core::core::verifier_cache::VerifierCache;
use core::core::{Block, BlockHeader, Proof};
use core::pow::cuckoo;
use core::core::{Block, BlockHeader};
use core::pow::{cuckoo, Proof};
use core::{consensus, global};
use mining::mine_block;
use pool;
@ -72,13 +72,13 @@ impl Miner {
/// The inner part of mining loop for the internal miner
/// kept around mostly for automated testing purposes
pub fn inner_mining_loop(
fn inner_mining_loop(
&self,
b: &mut Block,
head: &BlockHeader,
attempt_time_per_block: u32,
latest_hash: &mut Hash,
) -> Option<Proof> {
) -> bool {
// look for a pow for at most 2 sec on the same block (to give a chance to new
// transactions) and as long as the head hasn't changed
let deadline = Utc::now().timestamp() + attempt_time_per_block as i64;
@ -95,7 +95,6 @@ impl Miner {
);
let mut iter_count = 0;
let mut sol = None;
while head.hash() == *latest_hash && Utc::now().timestamp() < deadline {
if let Ok(proof) = cuckoo::Miner::new(
&b.header,
@ -104,10 +103,10 @@ impl Miner {
global::min_sizeshift(),
).mine()
{
let proof_diff = proof.to_difficulty();
b.header.pow.proof = proof;
let proof_diff = b.header.pow.to_difficulty();
if proof_diff >= (b.header.total_difficulty() - head.total_difficulty()) {
sol = Some(proof);
break;
return true;
}
}
@ -116,16 +115,13 @@ impl Miner {
iter_count += 1;
}
if sol == None {
debug!(
LOGGER,
"(Server ID: {}) No solution found after {} iterations, continuing...",
self.debug_output_id,
iter_count
)
}
sol
debug!(
LOGGER,
"(Server ID: {}) No solution found after {} iterations, continuing...",
self.debug_output_id,
iter_count
);
false
}
/// Starts the mining loop, building a new block on top of the existing
@ -163,8 +159,7 @@ impl Miner {
);
// we found a solution, push our block through the chain processing pipeline
if let Some(proof) = sol {
b.header.pow.proof = proof;
if sol {
info!(
LOGGER,
"(Server ID: {}) Found valid proof of work, adding block {}.",