mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
[SYNC PERFORMANCE] Adjust DifficultyIterator to no longer deserialize PoW proof nonces (#3671)
* replace bitvec with more efficient bitpack algorithm * optimise proof_unpack_len * move proof pack length calculation * small refactor * first pass attempt at not deserializing proof nonces in difficulty iter * another 10 seconds gained by not deserialising the proof from the difficulty iterator * add new deser parameters to tests where needed * add skip_proof variants to store * remove hash from difficulty iterator struct, rename HeaderInfo to HeaderDifficultyInfo * replace bitvec with more efficient bitpack algorithm * optimise proof_unpack_len * move proof pack length calculation * small refactor * first pass attempt at not deserializing proof nonces in difficulty iter * another 10 seconds gained by not deserialising the proof from the difficulty iterator * add new deser parameters to tests where needed * add skip_proof variants to store * remove hash from difficulty iterator struct, rename HeaderInfo to HeaderDifficultyInfo
This commit is contained in:
parent
7725a05ac1
commit
63c65605bb
20 changed files with 331 additions and 163 deletions
|
@ -277,7 +277,7 @@ impl OutputHandler {
|
|||
.context(ErrorKind::Internal("cain error".to_owned()))?;
|
||||
|
||||
Ok(BlockOutputs {
|
||||
header: BlockHeaderInfo::from_header(&header),
|
||||
header: BlockHeaderDifficultyInfo::from_header(&header),
|
||||
outputs: outputs,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
use super::utils::w;
|
||||
use crate::core::core::hash::Hashed;
|
||||
use crate::core::core::Transaction;
|
||||
use crate::core::ser::{self, ProtocolVersion};
|
||||
use crate::core::ser::{self, DeserializationMode, ProtocolVersion};
|
||||
use crate::pool::{self, BlockChain, PoolAdapter, PoolEntry};
|
||||
use crate::rest::*;
|
||||
use crate::router::{Handler, ResponseFuture};
|
||||
|
@ -138,7 +138,8 @@ where
|
|||
|
||||
// All wallet api interaction explicitly uses protocol version 1 for now.
|
||||
let version = ProtocolVersion(1);
|
||||
let tx: Transaction = ser::deserialize(&mut &tx_bin[..], version)
|
||||
let tx: Transaction =
|
||||
ser::deserialize(&mut &tx_bin[..], version, DeserializationMode::default())
|
||||
.map_err(|e| ErrorKind::RequestError(format!("Bad request: {}", e)))?;
|
||||
|
||||
let source = pool::TxSource::PushApi;
|
||||
|
|
|
@ -530,7 +530,7 @@ impl TxKernelPrintable {
|
|||
|
||||
// Just the information required for wallet reconstruction
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BlockHeaderInfo {
|
||||
pub struct BlockHeaderDifficultyInfo {
|
||||
// Hash
|
||||
pub hash: String,
|
||||
/// Height of this block since the genesis block (height 0)
|
||||
|
@ -539,9 +539,9 @@ pub struct BlockHeaderInfo {
|
|||
pub previous: String,
|
||||
}
|
||||
|
||||
impl BlockHeaderInfo {
|
||||
pub fn from_header(header: &core::BlockHeader) -> BlockHeaderInfo {
|
||||
BlockHeaderInfo {
|
||||
impl BlockHeaderDifficultyInfo {
|
||||
pub fn from_header(header: &core::BlockHeader) -> BlockHeaderDifficultyInfo {
|
||||
BlockHeaderDifficultyInfo {
|
||||
hash: header.hash().to_hex(),
|
||||
height: header.height,
|
||||
previous: header.prev_hash.to_hex(),
|
||||
|
@ -705,7 +705,7 @@ impl CompactBlockPrintable {
|
|||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BlockOutputs {
|
||||
/// The block header
|
||||
pub header: BlockHeaderInfo,
|
||||
pub header: BlockHeaderDifficultyInfo,
|
||||
/// A printable version of the outputs
|
||||
pub outputs: Vec<OutputPrintable>,
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ pub trait ListIndex {
|
|||
/// Key is "prefix|commit".
|
||||
/// Note the key for an individual entry in the list is "prefix|commit|pos".
|
||||
fn get_list(&self, batch: &Batch<'_>, commit: Commitment) -> Result<Option<Self::List>, Error> {
|
||||
batch.db.get_ser(&self.list_key(commit))
|
||||
batch.db.get_ser(&self.list_key(commit), None)
|
||||
}
|
||||
|
||||
/// Returns one of "head", "tail" or "middle" entry variants.
|
||||
|
@ -95,7 +95,7 @@ pub trait ListIndex {
|
|||
commit: Commitment,
|
||||
pos: u64,
|
||||
) -> Result<Option<Self::Entry>, Error> {
|
||||
batch.db.get_ser(&self.entry_key(commit, pos))
|
||||
batch.db.get_ser(&self.entry_key(commit, pos), None)
|
||||
}
|
||||
|
||||
/// Peek the head of the list for the specified commitment.
|
||||
|
|
|
@ -14,11 +14,11 @@
|
|||
|
||||
//! Implements storage primitives required by the chain
|
||||
|
||||
use crate::core::consensus::HeaderInfo;
|
||||
use crate::core::consensus::HeaderDifficultyInfo;
|
||||
use crate::core::core::hash::{Hash, Hashed};
|
||||
use crate::core::core::{Block, BlockHeader, BlockSums};
|
||||
use crate::core::pow::Difficulty;
|
||||
use crate::core::ser::{ProtocolVersion, Readable, Writeable};
|
||||
use crate::core::ser::{DeserializationMode, ProtocolVersion, Readable, Writeable};
|
||||
use crate::linked_list::MultiIndex;
|
||||
use crate::types::{CommitPos, Tip};
|
||||
use crate::util::secp::pedersen::Commitment;
|
||||
|
@ -60,19 +60,19 @@ impl ChainStore {
|
|||
|
||||
/// The current chain head.
|
||||
pub fn head(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[HEAD_PREFIX]), || "HEAD".to_owned())
|
||||
option_to_not_found(self.db.get_ser(&[HEAD_PREFIX], None), || "HEAD".to_owned())
|
||||
}
|
||||
|
||||
/// The current header head (may differ from chain head).
|
||||
pub fn header_head(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX]), || {
|
||||
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX], None), || {
|
||||
"HEADER_HEAD".to_owned()
|
||||
})
|
||||
}
|
||||
|
||||
/// The current chain "tail" (earliest block in the store).
|
||||
pub fn tail(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX]), || "TAIL".to_owned())
|
||||
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX], None), || "TAIL".to_owned())
|
||||
}
|
||||
|
||||
/// Header of the block at the head of the block chain (not the same thing as header_head).
|
||||
|
@ -82,7 +82,7 @@ impl ChainStore {
|
|||
|
||||
/// Get full block.
|
||||
pub fn get_block(&self, h: &Hash) -> Result<Block, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_PREFIX, h)), || {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_PREFIX, h), None), || {
|
||||
format!("BLOCK: {}", h)
|
||||
})
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ impl ChainStore {
|
|||
|
||||
/// Get block_sums for the block hash.
|
||||
pub fn get_block_sums(&self, h: &Hash) -> Result<BlockSums, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_SUMS_PREFIX, h)), || {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_SUMS_PREFIX, h), None), || {
|
||||
format!("Block sums for block: {}", h)
|
||||
})
|
||||
}
|
||||
|
@ -104,11 +104,32 @@ impl ChainStore {
|
|||
self.get_block_header(&header.prev_hash)
|
||||
}
|
||||
|
||||
/// Get previous header without deserializing the proof nonces
|
||||
pub fn get_previous_header_skip_proof(
|
||||
&self,
|
||||
header: &BlockHeader,
|
||||
) -> Result<BlockHeader, Error> {
|
||||
self.get_block_header_skip_proof(&header.prev_hash)
|
||||
}
|
||||
|
||||
/// Get block header.
|
||||
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, h)), || {
|
||||
format!("BLOCK HEADER: {}", h)
|
||||
})
|
||||
option_to_not_found(
|
||||
self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, h), None),
|
||||
|| format!("BLOCK HEADER: {}", h),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get block header without deserializing the full PoW Proof; currently used
|
||||
/// for difficulty iterator which is called many times but doesn't need the proof
|
||||
pub fn get_block_header_skip_proof(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||
option_to_not_found(
|
||||
self.db.get_ser(
|
||||
&to_key(BLOCK_HEADER_PREFIX, h),
|
||||
Some(ser::DeserializationMode::SkipPow),
|
||||
),
|
||||
|| format!("BLOCK HEADER: {}", h),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get PMMR pos for the given output commitment.
|
||||
|
@ -124,7 +145,7 @@ impl ChainStore {
|
|||
|
||||
/// Get PMMR pos and block height for the given output commitment.
|
||||
pub fn get_output_pos_height(&self, commit: &Commitment) -> Result<Option<CommitPos>, Error> {
|
||||
self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit))
|
||||
self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit), None)
|
||||
}
|
||||
|
||||
/// Builds a new batch to be used with this store.
|
||||
|
@ -145,17 +166,17 @@ pub struct Batch<'a> {
|
|||
impl<'a> Batch<'a> {
|
||||
/// The head.
|
||||
pub fn head(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[HEAD_PREFIX]), || "HEAD".to_owned())
|
||||
option_to_not_found(self.db.get_ser(&[HEAD_PREFIX], None), || "HEAD".to_owned())
|
||||
}
|
||||
|
||||
/// The tail.
|
||||
pub fn tail(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX]), || "TAIL".to_owned())
|
||||
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX], None), || "TAIL".to_owned())
|
||||
}
|
||||
|
||||
/// The current header head (may differ from chain head).
|
||||
pub fn header_head(&self) -> Result<Tip, Error> {
|
||||
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX]), || {
|
||||
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX], None), || {
|
||||
"HEADER_HEAD".to_owned()
|
||||
})
|
||||
}
|
||||
|
@ -182,7 +203,7 @@ impl<'a> Batch<'a> {
|
|||
|
||||
/// get block
|
||||
pub fn get_block(&self, h: &Hash) -> Result<Block, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_PREFIX, h)), || {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_PREFIX, h), None), || {
|
||||
format!("Block with hash: {}", h)
|
||||
})
|
||||
}
|
||||
|
@ -269,7 +290,7 @@ impl<'a> Batch<'a> {
|
|||
let key = to_key(OUTPUT_POS_PREFIX, "");
|
||||
let protocol_version = self.db.protocol_version();
|
||||
self.db.iter(&key, move |k, mut v| {
|
||||
ser::deserialize(&mut v, protocol_version)
|
||||
ser::deserialize(&mut v, protocol_version, DeserializationMode::default())
|
||||
.map(|pos| (k.to_vec(), pos))
|
||||
.map_err(From::from)
|
||||
})
|
||||
|
@ -288,7 +309,7 @@ impl<'a> Batch<'a> {
|
|||
|
||||
/// Get output_pos and block height from index.
|
||||
pub fn get_output_pos_height(&self, commit: &Commitment) -> Result<Option<CommitPos>, Error> {
|
||||
self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit))
|
||||
self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit), None)
|
||||
}
|
||||
|
||||
/// Get the previous header.
|
||||
|
@ -296,11 +317,33 @@ impl<'a> Batch<'a> {
|
|||
self.get_block_header(&header.prev_hash)
|
||||
}
|
||||
|
||||
/// Get the previous header, without deserializing the full PoW Proof (or the ability to derive the
|
||||
/// block hash, this is used for the difficulty iterator).
|
||||
pub fn get_previous_header_skip_proof(
|
||||
&self,
|
||||
header: &BlockHeader,
|
||||
) -> Result<BlockHeader, Error> {
|
||||
self.get_block_header_skip_proof(&header.prev_hash)
|
||||
}
|
||||
|
||||
/// Get block header.
|
||||
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, h)), || {
|
||||
format!("BLOCK HEADER: {}", h)
|
||||
})
|
||||
option_to_not_found(
|
||||
self.db.get_ser(&to_key(BLOCK_HEADER_PREFIX, h), None),
|
||||
|| format!("BLOCK HEADER: {}", h),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get block header without deserializing the full PoW Proof; currently used
|
||||
/// for difficulty iterator which is called many times but doesn't need the proof
|
||||
pub fn get_block_header_skip_proof(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||
option_to_not_found(
|
||||
self.db.get_ser(
|
||||
&to_key(BLOCK_HEADER_PREFIX, h),
|
||||
Some(ser::DeserializationMode::SkipPow),
|
||||
),
|
||||
|| format!("BLOCK HEADER: {}", h),
|
||||
)
|
||||
}
|
||||
|
||||
/// Delete the block spent index.
|
||||
|
@ -315,7 +358,7 @@ impl<'a> Batch<'a> {
|
|||
|
||||
/// Get block_sums for the block.
|
||||
pub fn get_block_sums(&self, h: &Hash) -> Result<BlockSums, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_SUMS_PREFIX, h)), || {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_SUMS_PREFIX, h), None), || {
|
||||
format!("Block sums for block: {}", h)
|
||||
})
|
||||
}
|
||||
|
@ -339,9 +382,10 @@ impl<'a> Batch<'a> {
|
|||
/// Get the "spent index" from the db for the specified block.
|
||||
/// If we need to rewind a block then we use this to "unspend" the spent outputs.
|
||||
pub fn get_spent_index(&self, bh: &Hash) -> Result<Vec<CommitPos>, Error> {
|
||||
option_to_not_found(self.db.get_ser(&to_key(BLOCK_SPENT_PREFIX, bh)), || {
|
||||
format!("spent index: {}", bh)
|
||||
})
|
||||
option_to_not_found(
|
||||
self.db.get_ser(&to_key(BLOCK_SPENT_PREFIX, bh), None),
|
||||
|| format!("spent index: {}", bh),
|
||||
)
|
||||
}
|
||||
|
||||
/// Commits this batch. If it's a child batch, it will be merged with the
|
||||
|
@ -364,7 +408,8 @@ impl<'a> Batch<'a> {
|
|||
let key = to_key(BLOCK_PREFIX, "");
|
||||
let protocol_version = self.db.protocol_version();
|
||||
self.db.iter(&key, move |_, mut v| {
|
||||
ser::deserialize(&mut v, protocol_version).map_err(From::from)
|
||||
ser::deserialize(&mut v, protocol_version, DeserializationMode::default())
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -425,16 +470,20 @@ impl<'a> DifficultyIter<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Iterator for DifficultyIter<'a> {
|
||||
type Item = HeaderInfo;
|
||||
type Item = HeaderDifficultyInfo;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// Get both header and previous_header if this is the initial iteration.
|
||||
// Otherwise move prev_header to header and get the next prev_header.
|
||||
// Note that due to optimizations being called in `get_block_header_skip_proof`,
|
||||
// Items returned by this iterator cannot be expected to correctly
|
||||
// calculate their own hash - This iterator is purely for iterating through
|
||||
// difficulty information
|
||||
self.header = if self.header.is_none() {
|
||||
if let Some(ref batch) = self.batch {
|
||||
batch.get_block_header(&self.start).ok()
|
||||
batch.get_block_header_skip_proof(&self.start).ok()
|
||||
} else if let Some(ref store) = self.store {
|
||||
store.get_block_header(&self.start).ok()
|
||||
store.get_block_header_skip_proof(&self.start).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -446,9 +495,9 @@ impl<'a> Iterator for DifficultyIter<'a> {
|
|||
// Otherwise we are done.
|
||||
if let Some(header) = self.header.clone() {
|
||||
if let Some(ref batch) = self.batch {
|
||||
self.prev_header = batch.get_previous_header(&header).ok();
|
||||
self.prev_header = batch.get_previous_header_skip_proof(&header).ok();
|
||||
} else if let Some(ref store) = self.store {
|
||||
self.prev_header = store.get_previous_header(&header).ok();
|
||||
self.prev_header = store.get_previous_header_skip_proof(&header).ok();
|
||||
} else {
|
||||
self.prev_header = None;
|
||||
}
|
||||
|
@ -460,8 +509,7 @@ impl<'a> Iterator for DifficultyIter<'a> {
|
|||
let difficulty = header.total_difficulty() - prev_difficulty;
|
||||
let scaling = header.pow.secondary_scaling;
|
||||
|
||||
Some(HeaderInfo::new(
|
||||
header.hash(),
|
||||
Some(HeaderDifficultyInfo::new(
|
||||
header.timestamp.timestamp() as u64,
|
||||
difficulty,
|
||||
scaling,
|
||||
|
|
|
@ -509,7 +509,9 @@ impl Readable for BitmapBlockSerialization {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::ser::{BinReader, BinWriter, ProtocolVersion, Readable, Writeable};
|
||||
use crate::core::ser::{
|
||||
BinReader, BinWriter, DeserializationMode, ProtocolVersion, Readable, Writeable,
|
||||
};
|
||||
use byteorder::ReadBytesExt;
|
||||
use grin_util::secp::rand::Rng;
|
||||
use rand::thread_rng;
|
||||
|
@ -546,7 +548,11 @@ mod tests {
|
|||
|
||||
// Deserialize
|
||||
cursor.set_position(0);
|
||||
let mut reader = BinReader::new(&mut cursor, ProtocolVersion(1));
|
||||
let mut reader = BinReader::new(
|
||||
&mut cursor,
|
||||
ProtocolVersion(1),
|
||||
DeserializationMode::default(),
|
||||
);
|
||||
let block2: BitmapBlock = Readable::read(&mut reader).unwrap();
|
||||
assert_eq!(block, block2);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use self::chain::txhashset::{BitmapAccumulator, BitmapSegment};
|
||||
use self::core::core::pmmr::segment::{Segment, SegmentIdentifier};
|
||||
use self::core::ser::{BinReader, BinWriter, ProtocolVersion, Readable, Writeable};
|
||||
use self::core::ser::{
|
||||
BinReader, BinWriter, DeserializationMode, ProtocolVersion, Readable, Writeable,
|
||||
};
|
||||
use croaring::Bitmap;
|
||||
use grin_chain as chain;
|
||||
use grin_core as core;
|
||||
|
@ -52,7 +54,11 @@ fn test_roundtrip(entries: usize) {
|
|||
|
||||
// Read `BitmapSegment`
|
||||
cursor.set_position(0);
|
||||
let mut reader = BinReader::new(&mut cursor, ProtocolVersion(1));
|
||||
let mut reader = BinReader::new(
|
||||
&mut cursor,
|
||||
ProtocolVersion(1),
|
||||
DeserializationMode::default(),
|
||||
);
|
||||
let bms2: BitmapSegment = Readable::read(&mut reader).unwrap();
|
||||
assert_eq!(bms, bms2);
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
//! here.
|
||||
|
||||
use crate::core::block::HeaderVersion;
|
||||
use crate::core::hash::{Hash, ZERO_HASH};
|
||||
use crate::global;
|
||||
use crate::pow::Difficulty;
|
||||
use std::cmp::{max, min};
|
||||
|
@ -227,11 +226,11 @@ pub const UNIT_DIFFICULTY: u64 =
|
|||
pub const INITIAL_DIFFICULTY: u64 = 1_000_000 * UNIT_DIFFICULTY;
|
||||
|
||||
/// Minimal header information required for the Difficulty calculation to
|
||||
/// take place
|
||||
/// take place. Used to iterate through a number of blocks. Note that an instance
|
||||
/// of this is unable to calculate its own hash, due to an optimization that prevents
|
||||
/// the header's PoW proof nonces from being deserialized on read
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct HeaderInfo {
|
||||
/// Block hash, ZERO_HASH when this is a sythetic entry.
|
||||
pub block_hash: Hash,
|
||||
pub struct HeaderDifficultyInfo {
|
||||
/// Timestamp of the header, 1 when not used (returned info)
|
||||
pub timestamp: u64,
|
||||
/// Network difficulty or next difficulty to use
|
||||
|
@ -242,17 +241,15 @@ pub struct HeaderInfo {
|
|||
pub is_secondary: bool,
|
||||
}
|
||||
|
||||
impl HeaderInfo {
|
||||
impl HeaderDifficultyInfo {
|
||||
/// Default constructor
|
||||
pub fn new(
|
||||
block_hash: Hash,
|
||||
timestamp: u64,
|
||||
difficulty: Difficulty,
|
||||
secondary_scaling: u32,
|
||||
is_secondary: bool,
|
||||
) -> HeaderInfo {
|
||||
HeaderInfo {
|
||||
block_hash,
|
||||
) -> HeaderDifficultyInfo {
|
||||
HeaderDifficultyInfo {
|
||||
timestamp,
|
||||
difficulty,
|
||||
secondary_scaling,
|
||||
|
@ -262,9 +259,8 @@ impl HeaderInfo {
|
|||
|
||||
/// Constructor from a timestamp and difficulty, setting a default secondary
|
||||
/// PoW factor
|
||||
pub fn from_ts_diff(timestamp: u64, difficulty: Difficulty) -> HeaderInfo {
|
||||
HeaderInfo {
|
||||
block_hash: ZERO_HASH,
|
||||
pub fn from_ts_diff(timestamp: u64, difficulty: Difficulty) -> HeaderDifficultyInfo {
|
||||
HeaderDifficultyInfo {
|
||||
timestamp,
|
||||
difficulty,
|
||||
secondary_scaling: global::initial_graph_weight(),
|
||||
|
@ -275,9 +271,11 @@ impl HeaderInfo {
|
|||
|
||||
/// Constructor from a difficulty and secondary factor, setting a default
|
||||
/// timestamp
|
||||
pub fn from_diff_scaling(difficulty: Difficulty, secondary_scaling: u32) -> HeaderInfo {
|
||||
HeaderInfo {
|
||||
block_hash: ZERO_HASH,
|
||||
pub fn from_diff_scaling(
|
||||
difficulty: Difficulty,
|
||||
secondary_scaling: u32,
|
||||
) -> HeaderDifficultyInfo {
|
||||
HeaderDifficultyInfo {
|
||||
timestamp: 1,
|
||||
difficulty,
|
||||
secondary_scaling,
|
||||
|
@ -300,9 +298,9 @@ pub fn clamp(actual: u64, goal: u64, clamp_factor: u64) -> u64 {
|
|||
/// Takes an iterator over past block headers information, from latest
|
||||
/// (highest height) to oldest (lowest height).
|
||||
/// Uses either the old dma DAA or, starting from HF4, the new wtema DAA
|
||||
pub fn next_difficulty<T>(height: u64, cursor: T) -> HeaderInfo
|
||||
pub fn next_difficulty<T>(height: u64, cursor: T) -> HeaderDifficultyInfo
|
||||
where
|
||||
T: IntoIterator<Item = HeaderInfo>,
|
||||
T: IntoIterator<Item = HeaderDifficultyInfo>,
|
||||
{
|
||||
if header_version(height) < HeaderVersion(5) {
|
||||
next_dma_difficulty(height, cursor)
|
||||
|
@ -316,9 +314,9 @@ where
|
|||
/// The corresponding timespan is calculated
|
||||
/// by using the difference between the timestamps at the beginning
|
||||
/// and the end of the window, with a damping toward the target block time.
|
||||
pub fn next_dma_difficulty<T>(height: u64, cursor: T) -> HeaderInfo
|
||||
pub fn next_dma_difficulty<T>(height: u64, cursor: T) -> HeaderDifficultyInfo
|
||||
where
|
||||
T: IntoIterator<Item = HeaderInfo>,
|
||||
T: IntoIterator<Item = HeaderDifficultyInfo>,
|
||||
{
|
||||
// Create vector of difficulty data running from earliest
|
||||
// to latest, and pad with simulated pre-genesis data to allow earlier
|
||||
|
@ -348,14 +346,14 @@ where
|
|||
// minimum difficulty avoids getting stuck due to dampening
|
||||
let difficulty = max(MIN_DMA_DIFFICULTY, diff_sum * BLOCK_TIME_SEC / adj_ts);
|
||||
|
||||
HeaderInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling)
|
||||
HeaderDifficultyInfo::from_diff_scaling(Difficulty::from_num(difficulty), sec_pow_scaling)
|
||||
}
|
||||
|
||||
/// Difficulty calculation based on a Weighted Target Exponential Moving Average
|
||||
/// of difficulty, using the ratio of the last block time over the target block time.
|
||||
pub fn next_wtema_difficulty<T>(_height: u64, cursor: T) -> HeaderInfo
|
||||
pub fn next_wtema_difficulty<T>(_height: u64, cursor: T) -> HeaderDifficultyInfo
|
||||
where
|
||||
T: IntoIterator<Item = HeaderInfo>,
|
||||
T: IntoIterator<Item = HeaderDifficultyInfo>,
|
||||
{
|
||||
let mut last_headers = cursor.into_iter();
|
||||
|
||||
|
@ -375,18 +373,18 @@ where
|
|||
// since 16384 * WTEMA_HALF_LIFE / (WTEMA_HALF_LIFE - 1) > 16384
|
||||
let difficulty = max(Difficulty::min_wtema(), Difficulty::from_num(next_diff));
|
||||
|
||||
HeaderInfo::from_diff_scaling(difficulty, 0) // no more secondary PoW
|
||||
HeaderDifficultyInfo::from_diff_scaling(difficulty, 0) // no more secondary PoW
|
||||
}
|
||||
|
||||
/// Count, in units of 1/100 (a percent), the number of "secondary" (AR) blocks in the provided window of blocks.
|
||||
pub fn ar_count(_height: u64, diff_data: &[HeaderInfo]) -> u64 {
|
||||
pub fn ar_count(_height: u64, diff_data: &[HeaderDifficultyInfo]) -> u64 {
|
||||
100 * diff_data.iter().filter(|n| n.is_secondary).count() as u64
|
||||
}
|
||||
|
||||
/// The secondary proof-of-work factor is calculated along the same lines as in next_dma_difficulty,
|
||||
/// as an adjustment on the deviation against the ideal value.
|
||||
/// Factor by which the secondary proof of work difficulty will be adjusted
|
||||
pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderInfo]) -> u32 {
|
||||
pub fn secondary_pow_scaling(height: u64, diff_data: &[HeaderDifficultyInfo]) -> u32 {
|
||||
// Get the scaling factor sum of the last DMA_WINDOW elements
|
||||
let scale_sum: u64 = diff_data.iter().map(|dd| dd.secondary_scaling as u64).sum();
|
||||
|
||||
|
|
|
@ -2281,7 +2281,9 @@ mod test {
|
|||
for version in vec![ProtocolVersion(1), ProtocolVersion(2)] {
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, version, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap();
|
||||
let kernel2: TxKernel =
|
||||
ser::deserialize(&mut &vec[..], version, ser::DeserializationMode::default())
|
||||
.unwrap();
|
||||
assert_eq!(kernel2.features, KernelFeatures::Plain { fee: 10.into() });
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
|
@ -2321,7 +2323,9 @@ mod test {
|
|||
for version in vec![ProtocolVersion(1), ProtocolVersion(2)] {
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, version, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap();
|
||||
let kernel2: TxKernel =
|
||||
ser::deserialize(&mut &vec[..], version, ser::DeserializationMode::default())
|
||||
.unwrap();
|
||||
assert_eq!(kernel.features, kernel2.features);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
|
@ -2363,7 +2367,9 @@ mod test {
|
|||
for version in vec![ProtocolVersion(1), ProtocolVersion(2)] {
|
||||
let mut vec = vec![];
|
||||
ser::serialize(&mut vec, version, &kernel).expect("serialized failed");
|
||||
let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap();
|
||||
let kernel2: TxKernel =
|
||||
ser::deserialize(&mut &vec[..], version, ser::DeserializationMode::default())
|
||||
.unwrap();
|
||||
assert_eq!(kernel.features, kernel2.features);
|
||||
assert_eq!(kernel2.excess, commit);
|
||||
assert_eq!(kernel2.excess_sig, sig.clone());
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
//! should be used sparingly.
|
||||
|
||||
use crate::consensus::{
|
||||
graph_weight, header_version, HeaderInfo, BASE_EDGE_BITS, BLOCK_TIME_SEC, C32_GRAPH_WEIGHT,
|
||||
COINBASE_MATURITY, CUT_THROUGH_HORIZON, DAY_HEIGHT, DEFAULT_MIN_EDGE_BITS, DMA_WINDOW,
|
||||
GRIN_BASE, INITIAL_DIFFICULTY, KERNEL_WEIGHT, MAX_BLOCK_WEIGHT, OUTPUT_WEIGHT, PROOFSIZE,
|
||||
SECOND_POW_EDGE_BITS, STATE_SYNC_THRESHOLD,
|
||||
graph_weight, header_version, HeaderDifficultyInfo, BASE_EDGE_BITS, BLOCK_TIME_SEC,
|
||||
C32_GRAPH_WEIGHT, COINBASE_MATURITY, CUT_THROUGH_HORIZON, DAY_HEIGHT, DEFAULT_MIN_EDGE_BITS,
|
||||
DMA_WINDOW, GRIN_BASE, INITIAL_DIFFICULTY, KERNEL_WEIGHT, MAX_BLOCK_WEIGHT, OUTPUT_WEIGHT,
|
||||
PROOFSIZE, SECOND_POW_EDGE_BITS, STATE_SYNC_THRESHOLD,
|
||||
};
|
||||
use crate::core::block::HeaderVersion;
|
||||
use crate::pow::{
|
||||
|
@ -453,13 +453,14 @@ pub fn is_testnet() -> bool {
|
|||
/// vector and pads if needed (which will) only be needed for the first few
|
||||
/// blocks after genesis
|
||||
|
||||
pub fn difficulty_data_to_vector<T>(cursor: T) -> Vec<HeaderInfo>
|
||||
pub fn difficulty_data_to_vector<T>(cursor: T) -> Vec<HeaderDifficultyInfo>
|
||||
where
|
||||
T: IntoIterator<Item = HeaderInfo>,
|
||||
T: IntoIterator<Item = HeaderDifficultyInfo>,
|
||||
{
|
||||
// Convert iterator to vector, so we can append to it if necessary
|
||||
let needed_block_count = DMA_WINDOW as usize + 1;
|
||||
let mut last_n: Vec<HeaderInfo> = cursor.into_iter().take(needed_block_count).collect();
|
||||
let mut last_n: Vec<HeaderDifficultyInfo> =
|
||||
cursor.into_iter().take(needed_block_count).collect();
|
||||
|
||||
// Only needed just after blockchain launch... basically ensures there's
|
||||
// always enough data by simulating perfectly timed pre-genesis
|
||||
|
@ -477,7 +478,7 @@ where
|
|||
let mut last_ts = last_n.last().unwrap().timestamp;
|
||||
for _ in n..needed_block_count {
|
||||
last_ts = last_ts.saturating_sub(last_ts_delta);
|
||||
last_n.push(HeaderInfo::from_ts_diff(last_ts, last_diff));
|
||||
last_n.push(HeaderDifficultyInfo::from_ts_diff(last_ts, last_diff));
|
||||
}
|
||||
}
|
||||
last_n.reverse();
|
||||
|
|
|
@ -16,7 +16,7 @@ use crate::consensus::{graph_weight, MIN_DMA_DIFFICULTY, SECOND_POW_EDGE_BITS};
|
|||
use crate::core::hash::{DefaultHashable, Hashed};
|
||||
use crate::global;
|
||||
use crate::pow::error::Error;
|
||||
use crate::ser::{self, Readable, Reader, Writeable, Writer};
|
||||
use crate::ser::{self, DeserializationMode, Readable, Reader, Writeable, Writer};
|
||||
use rand::{thread_rng, Rng};
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
/// Types for a Cuck(at)oo proof of work and its encapsulation as a fully usable
|
||||
|
@ -491,6 +491,8 @@ impl Readable for Proof {
|
|||
}
|
||||
|
||||
// prepare nonces and read the right number of bytes
|
||||
// If skipping pow proof, we can stop after reading edge bits
|
||||
if reader.deserialization_mode() != DeserializationMode::SkipPow {
|
||||
let mut nonces = Vec::with_capacity(global::proofsize());
|
||||
let nonce_bits = edge_bits as usize;
|
||||
let bytes_len = Proof::pack_len(edge_bits);
|
||||
|
@ -498,7 +500,6 @@ impl Readable for Proof {
|
|||
return Err(ser::Error::CorruptedData);
|
||||
}
|
||||
let bits = reader.read_fixed_bytes(bytes_len)?;
|
||||
|
||||
for n in 0..global::proofsize() {
|
||||
nonces.push(read_number(&bits, n * nonce_bits, nonce_bits));
|
||||
}
|
||||
|
@ -509,8 +510,13 @@ impl Readable for Proof {
|
|||
if read_number(&bits, end_of_data, bytes_len * 8 - end_of_data) != 0 {
|
||||
return Err(ser::Error::CorruptedData);
|
||||
}
|
||||
|
||||
Ok(Proof { edge_bits, nonces })
|
||||
} else {
|
||||
Ok(Proof {
|
||||
edge_bits,
|
||||
nonces: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -526,7 +532,7 @@ impl Writeable for Proof {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::ser::{BinReader, BinWriter, ProtocolVersion};
|
||||
use crate::ser::{BinReader, BinWriter, DeserializationMode, ProtocolVersion};
|
||||
use rand::Rng;
|
||||
use std::io::Cursor;
|
||||
|
||||
|
@ -542,7 +548,11 @@ mod tests {
|
|||
panic!("failed to write proof {:?}", e);
|
||||
}
|
||||
buf.set_position(0);
|
||||
let mut r = BinReader::new(&mut buf, ProtocolVersion::local());
|
||||
let mut r = BinReader::new(
|
||||
&mut buf,
|
||||
ProtocolVersion::local(),
|
||||
DeserializationMode::default(),
|
||||
);
|
||||
match Proof::read(&mut r) {
|
||||
Err(e) => panic!("failed to read proof: {:?}", e),
|
||||
Ok(p) => assert_eq!(p, proof),
|
||||
|
|
|
@ -214,9 +214,27 @@ pub trait Writer {
|
|||
}
|
||||
}
|
||||
|
||||
/// Signal to a deserializable object how much of its data should be deserialized
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub enum DeserializationMode {
|
||||
/// Deserialize everything sufficiently to fully reconstruct the object
|
||||
Full,
|
||||
/// For Block Headers, skip reading proof
|
||||
SkipPow,
|
||||
}
|
||||
|
||||
impl DeserializationMode {
|
||||
/// Default deserialization mode
|
||||
pub fn default() -> Self {
|
||||
DeserializationMode::Full
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations defined how different numbers and binary structures are
|
||||
/// read from an underlying stream or container (depending on implementation).
|
||||
pub trait Reader {
|
||||
/// The mode this reader is reading from
|
||||
fn deserialization_mode(&self) -> DeserializationMode;
|
||||
/// Read a u8 from the underlying Read
|
||||
fn read_u8(&mut self) -> Result<u8, Error>;
|
||||
/// Read a u16 from the underlying Read
|
||||
|
@ -391,14 +409,19 @@ where
|
|||
pub fn deserialize<T: Readable, R: Read>(
|
||||
source: &mut R,
|
||||
version: ProtocolVersion,
|
||||
mode: DeserializationMode,
|
||||
) -> Result<T, Error> {
|
||||
let mut reader = BinReader::new(source, version);
|
||||
let mut reader = BinReader::new(source, version, mode);
|
||||
T::read(&mut reader)
|
||||
}
|
||||
|
||||
/// Deserialize a Readable based on our default "local" protocol version.
|
||||
pub fn deserialize_default<T: Readable, R: Read>(source: &mut R) -> Result<T, Error> {
|
||||
deserialize(source, ProtocolVersion::local())
|
||||
deserialize(
|
||||
source,
|
||||
ProtocolVersion::local(),
|
||||
DeserializationMode::default(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Serializes a Writeable into any std::io::Write implementation.
|
||||
|
@ -428,12 +451,17 @@ pub fn ser_vec<W: Writeable>(thing: &W, version: ProtocolVersion) -> Result<Vec<
|
|||
pub struct BinReader<'a, R: Read> {
|
||||
source: &'a mut R,
|
||||
version: ProtocolVersion,
|
||||
deser_mode: DeserializationMode,
|
||||
}
|
||||
|
||||
impl<'a, R: Read> BinReader<'a, R> {
|
||||
/// Constructor for a new BinReader for the provided source and protocol version.
|
||||
pub fn new(source: &'a mut R, version: ProtocolVersion) -> Self {
|
||||
BinReader { source, version }
|
||||
pub fn new(source: &'a mut R, version: ProtocolVersion, mode: DeserializationMode) -> Self {
|
||||
BinReader {
|
||||
source,
|
||||
version,
|
||||
deser_mode: mode,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -444,6 +472,9 @@ fn map_io_err(err: io::Error) -> Error {
|
|||
/// Utility wrapper for an underlying byte Reader. Defines higher level methods
|
||||
/// to read numbers, byte vectors, hashes, etc.
|
||||
impl<'a, R: Read> Reader for BinReader<'a, R> {
|
||||
fn deserialization_mode(&self) -> DeserializationMode {
|
||||
self.deser_mode
|
||||
}
|
||||
fn read_u8(&mut self) -> Result<u8, Error> {
|
||||
self.source.read_u8().map_err(map_io_err)
|
||||
}
|
||||
|
@ -504,6 +535,7 @@ pub struct StreamingReader<'a> {
|
|||
total_bytes_read: u64,
|
||||
version: ProtocolVersion,
|
||||
stream: &'a mut dyn Read,
|
||||
deser_mode: DeserializationMode,
|
||||
}
|
||||
|
||||
impl<'a> StreamingReader<'a> {
|
||||
|
@ -514,6 +546,7 @@ impl<'a> StreamingReader<'a> {
|
|||
total_bytes_read: 0,
|
||||
version,
|
||||
stream,
|
||||
deser_mode: DeserializationMode::Full,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -525,6 +558,9 @@ impl<'a> StreamingReader<'a> {
|
|||
|
||||
/// Note: We use read_fixed_bytes() here to ensure our "async" I/O behaves as expected.
|
||||
impl<'a> Reader for StreamingReader<'a> {
|
||||
fn deserialization_mode(&self) -> DeserializationMode {
|
||||
self.deser_mode
|
||||
}
|
||||
fn read_u8(&mut self) -> Result<u8, Error> {
|
||||
let buf = self.read_fixed_bytes(1)?;
|
||||
Ok(buf[0])
|
||||
|
@ -587,6 +623,7 @@ pub struct BufReader<'a, B: Buf> {
|
|||
inner: &'a mut B,
|
||||
version: ProtocolVersion,
|
||||
bytes_read: usize,
|
||||
deser_mode: DeserializationMode,
|
||||
}
|
||||
|
||||
impl<'a, B: Buf> BufReader<'a, B> {
|
||||
|
@ -596,6 +633,7 @@ impl<'a, B: Buf> BufReader<'a, B> {
|
|||
inner: buf,
|
||||
version,
|
||||
bytes_read: 0,
|
||||
deser_mode: DeserializationMode::Full,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -621,6 +659,10 @@ impl<'a, B: Buf> BufReader<'a, B> {
|
|||
}
|
||||
|
||||
impl<'a, B: Buf> Reader for BufReader<'a, B> {
|
||||
fn deserialization_mode(&self) -> DeserializationMode {
|
||||
self.deser_mode
|
||||
}
|
||||
|
||||
fn read_u8(&mut self) -> Result<u8, Error> {
|
||||
self.has_remaining(1)?;
|
||||
Ok(self.inner.get_u8())
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
|
||||
use chrono::Utc;
|
||||
use grin_core::consensus::{
|
||||
next_dma_difficulty, next_wtema_difficulty, HeaderInfo, AR_SCALE_DAMP_FACTOR, BLOCK_TIME_SEC,
|
||||
DMA_WINDOW, MIN_AR_SCALE, YEAR_HEIGHT,
|
||||
next_dma_difficulty, next_wtema_difficulty, HeaderDifficultyInfo, AR_SCALE_DAMP_FACTOR,
|
||||
BLOCK_TIME_SEC, DMA_WINDOW, MIN_AR_SCALE, YEAR_HEIGHT,
|
||||
};
|
||||
use grin_core::global;
|
||||
use grin_core::pow::Difficulty;
|
||||
|
@ -27,7 +27,7 @@ fn next_dma_difficulty_adjustment() {
|
|||
let diff_min = Difficulty::min_dma();
|
||||
|
||||
// Check we don't get stuck on difficulty <= Difficulty::min_dma (at 4x faster blocks at least)
|
||||
let mut hi = HeaderInfo::from_diff_scaling(diff_min, AR_SCALE_DAMP_FACTOR as u32);
|
||||
let mut hi = HeaderDifficultyInfo::from_diff_scaling(diff_min, AR_SCALE_DAMP_FACTOR as u32);
|
||||
hi.is_secondary = false;
|
||||
let hinext = next_dma_difficulty(1, repeat(BLOCK_TIME_SEC / 4, hi.clone(), DMA_WINDOW, None));
|
||||
|
||||
|
@ -46,7 +46,11 @@ fn next_dma_difficulty_adjustment() {
|
|||
|
||||
// check pre difficulty_data_to_vector effect on retargetting
|
||||
assert_eq!(
|
||||
next_dma_difficulty(1, vec![HeaderInfo::from_ts_diff(42, hi.difficulty)]).difficulty,
|
||||
next_dma_difficulty(
|
||||
1,
|
||||
vec![HeaderDifficultyInfo::from_ts_diff(42, hi.difficulty)]
|
||||
)
|
||||
.difficulty,
|
||||
Difficulty::from_num(14913)
|
||||
);
|
||||
|
||||
|
@ -123,7 +127,7 @@ fn next_wtema_difficulty_adjustment() {
|
|||
let diff_min = Difficulty::min_wtema();
|
||||
|
||||
// Check we don't get stuck on mainnet difficulty <= Difficulty::min_wtema (on 59s blocks)
|
||||
let mut hi = HeaderInfo::from_diff_scaling(diff_min, 0);
|
||||
let mut hi = HeaderDifficultyInfo::from_diff_scaling(diff_min, 0);
|
||||
hi.is_secondary = false;
|
||||
let hinext = next_wtema_difficulty(hf4, repeat(BLOCK_TIME_SEC - 1, hi.clone(), 2, None));
|
||||
|
||||
|
@ -191,7 +195,12 @@ fn next_wtema_difficulty_adjustment() {
|
|||
|
||||
// Builds an iterator for next difficulty calculation with the provided
|
||||
// constant time interval, difficulty and total length.
|
||||
fn repeat(interval: u64, diff: HeaderInfo, len: u64, cur_time: Option<u64>) -> Vec<HeaderInfo> {
|
||||
fn repeat(
|
||||
interval: u64,
|
||||
diff: HeaderDifficultyInfo,
|
||||
len: u64,
|
||||
cur_time: Option<u64>,
|
||||
) -> Vec<HeaderDifficultyInfo> {
|
||||
let cur_time = match cur_time {
|
||||
Some(t) => t,
|
||||
None => Utc::now().timestamp() as u64,
|
||||
|
@ -203,8 +212,7 @@ fn repeat(interval: u64, diff: HeaderInfo, len: u64, cur_time: Option<u64>) -> V
|
|||
let pairs = times.zip(diffs.iter());
|
||||
pairs
|
||||
.map(|(t, d)| {
|
||||
HeaderInfo::new(
|
||||
diff.block_hash,
|
||||
HeaderDifficultyInfo::new(
|
||||
cur_time + t as u64,
|
||||
*d,
|
||||
diff.secondary_scaling,
|
||||
|
@ -214,10 +222,10 @@ fn repeat(interval: u64, diff: HeaderInfo, len: u64, cur_time: Option<u64>) -> V
|
|||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn repeat_offs(interval: u64, diff: u64, len: u64, from: u64) -> Vec<HeaderInfo> {
|
||||
fn repeat_offs(interval: u64, diff: u64, len: u64, from: u64) -> Vec<HeaderDifficultyInfo> {
|
||||
repeat(
|
||||
interval,
|
||||
HeaderInfo::from_ts_diff(1, Difficulty::from_num(diff)),
|
||||
HeaderDifficultyInfo::from_ts_diff(1, Difficulty::from_num(diff)),
|
||||
len,
|
||||
Some(from),
|
||||
)
|
||||
|
|
|
@ -76,27 +76,31 @@ impl Display for DiffBlock {
|
|||
}
|
||||
|
||||
// Creates a new chain with a genesis at a simulated difficulty
|
||||
fn create_chain_sim(diff: u64) -> Vec<(HeaderInfo, DiffStats)> {
|
||||
fn create_chain_sim(diff: u64) -> Vec<(HeaderDifficultyInfo, DiffStats)> {
|
||||
println!(
|
||||
"adding create: {}, {}",
|
||||
Utc::now().timestamp(),
|
||||
Difficulty::from_num(diff)
|
||||
);
|
||||
let return_vec = vec![HeaderInfo::from_ts_diff(
|
||||
let return_vec = vec![HeaderDifficultyInfo::from_ts_diff(
|
||||
Utc::now().timestamp() as u64,
|
||||
Difficulty::from_num(diff),
|
||||
)];
|
||||
let diff_stats = get_diff_stats(&return_vec);
|
||||
vec![(
|
||||
HeaderInfo::from_ts_diff(Utc::now().timestamp() as u64, Difficulty::from_num(diff)),
|
||||
HeaderDifficultyInfo::from_ts_diff(
|
||||
Utc::now().timestamp() as u64,
|
||||
Difficulty::from_num(diff),
|
||||
),
|
||||
diff_stats,
|
||||
)]
|
||||
}
|
||||
|
||||
fn get_diff_stats(chain_sim: &[HeaderInfo]) -> DiffStats {
|
||||
fn get_diff_stats(chain_sim: &[HeaderDifficultyInfo]) -> DiffStats {
|
||||
// Fill out some difficulty stats for convenience
|
||||
let diff_iter = chain_sim.to_vec();
|
||||
let last_blocks: Vec<HeaderInfo> = global::difficulty_data_to_vector(diff_iter.iter().cloned());
|
||||
let last_blocks: Vec<HeaderDifficultyInfo> =
|
||||
global::difficulty_data_to_vector(diff_iter.iter().cloned());
|
||||
|
||||
let mut last_time = last_blocks[0].timestamp;
|
||||
let tip_height = chain_sim.len();
|
||||
|
@ -107,7 +111,8 @@ fn get_diff_stats(chain_sim: &[HeaderInfo]) -> DiffStats {
|
|||
|
||||
let mut i = 1;
|
||||
|
||||
let sum_blocks: Vec<HeaderInfo> = global::difficulty_data_to_vector(diff_iter.iter().cloned())
|
||||
let sum_blocks: Vec<HeaderDifficultyInfo> =
|
||||
global::difficulty_data_to_vector(diff_iter.iter().cloned())
|
||||
.into_iter()
|
||||
.take(DMA_WINDOW as usize)
|
||||
.collect();
|
||||
|
@ -170,19 +175,23 @@ fn get_diff_stats(chain_sim: &[HeaderInfo]) -> DiffStats {
|
|||
// from the difficulty adjustment at interval seconds from the previous block
|
||||
fn add_block(
|
||||
interval: u64,
|
||||
chain_sim: Vec<(HeaderInfo, DiffStats)>,
|
||||
) -> Vec<(HeaderInfo, DiffStats)> {
|
||||
chain_sim: Vec<(HeaderDifficultyInfo, DiffStats)>,
|
||||
) -> Vec<(HeaderDifficultyInfo, DiffStats)> {
|
||||
let mut ret_chain_sim = chain_sim.clone();
|
||||
let mut return_chain: Vec<HeaderInfo> = chain_sim.clone().iter().map(|e| e.0.clone()).collect();
|
||||
let mut return_chain: Vec<HeaderDifficultyInfo> =
|
||||
chain_sim.clone().iter().map(|e| e.0.clone()).collect();
|
||||
// get last interval
|
||||
let diff = next_difficulty(1, return_chain.clone());
|
||||
let last_elem = chain_sim.first().unwrap().clone().0;
|
||||
let time = last_elem.timestamp + interval;
|
||||
return_chain.insert(0, HeaderInfo::from_ts_diff(time, diff.difficulty));
|
||||
return_chain.insert(0, HeaderDifficultyInfo::from_ts_diff(time, diff.difficulty));
|
||||
let diff_stats = get_diff_stats(&return_chain);
|
||||
ret_chain_sim.insert(
|
||||
0,
|
||||
(HeaderInfo::from_ts_diff(time, diff.difficulty), diff_stats),
|
||||
(
|
||||
HeaderDifficultyInfo::from_ts_diff(time, diff.difficulty),
|
||||
diff_stats,
|
||||
),
|
||||
);
|
||||
ret_chain_sim
|
||||
}
|
||||
|
@ -190,9 +199,9 @@ fn add_block(
|
|||
// Adds another n 'blocks' to the iterator, with difficulty calculated
|
||||
fn add_block_repeated(
|
||||
interval: u64,
|
||||
chain_sim: Vec<(HeaderInfo, DiffStats)>,
|
||||
chain_sim: Vec<(HeaderDifficultyInfo, DiffStats)>,
|
||||
iterations: usize,
|
||||
) -> Vec<(HeaderInfo, DiffStats)> {
|
||||
) -> Vec<(HeaderDifficultyInfo, DiffStats)> {
|
||||
let mut return_chain = chain_sim;
|
||||
for _ in 0..iterations {
|
||||
return_chain = add_block(interval, return_chain.clone());
|
||||
|
@ -202,7 +211,7 @@ fn add_block_repeated(
|
|||
|
||||
// Prints the contents of the iterator and its difficulties.. useful for
|
||||
// tweaking
|
||||
fn print_chain_sim(chain_sim: Vec<(HeaderInfo, DiffStats)>) {
|
||||
fn print_chain_sim(chain_sim: Vec<(HeaderDifficultyInfo, DiffStats)>) {
|
||||
let mut chain_sim = chain_sim;
|
||||
chain_sim.reverse();
|
||||
let mut last_time = 0;
|
||||
|
@ -361,7 +370,7 @@ fn test_secondary_pow_scale() {
|
|||
global::set_local_chain_type(global::ChainTypes::Mainnet);
|
||||
|
||||
let window = DMA_WINDOW;
|
||||
let mut hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 100);
|
||||
let mut hi = HeaderDifficultyInfo::from_diff_scaling(Difficulty::from_num(10), 100);
|
||||
|
||||
// all primary, factor should increase so it becomes easier to find a high
|
||||
// difficulty block
|
||||
|
@ -385,7 +394,8 @@ fn test_secondary_pow_scale() {
|
|||
50
|
||||
);
|
||||
// same as above, testing lowest bound
|
||||
let mut low_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), MIN_AR_SCALE as u32);
|
||||
let mut low_hi =
|
||||
HeaderDifficultyInfo::from_diff_scaling(Difficulty::from_num(10), MIN_AR_SCALE as u32);
|
||||
low_hi.is_secondary = true;
|
||||
assert_eq!(
|
||||
secondary_pow_scaling(
|
||||
|
@ -395,7 +405,7 @@ fn test_secondary_pow_scale() {
|
|||
MIN_AR_SCALE as u32
|
||||
);
|
||||
// the right ratio of 95% secondary
|
||||
let mut primary_hi = HeaderInfo::from_diff_scaling(Difficulty::from_num(10), 50);
|
||||
let mut primary_hi = HeaderDifficultyInfo::from_diff_scaling(Difficulty::from_num(10), 50);
|
||||
primary_hi.is_secondary = false;
|
||||
assert_eq!(
|
||||
secondary_pow_scaling(
|
||||
|
|
|
@ -24,7 +24,8 @@ use crate::core::core::{
|
|||
};
|
||||
use crate::core::pow::Difficulty;
|
||||
use crate::core::ser::{
|
||||
self, ProtocolVersion, Readable, Reader, StreamingReader, Writeable, Writer,
|
||||
self, DeserializationMode, ProtocolVersion, Readable, Reader, StreamingReader, Writeable,
|
||||
Writer,
|
||||
};
|
||||
use crate::core::{consensus, global};
|
||||
use crate::types::{
|
||||
|
@ -177,7 +178,8 @@ pub fn read_header<R: Read>(
|
|||
) -> Result<MsgHeaderWrapper, Error> {
|
||||
let mut head = vec![0u8; MsgHeader::LEN];
|
||||
stream.read_exact(&mut head)?;
|
||||
let header: MsgHeaderWrapper = ser::deserialize(&mut &head[..], version)?;
|
||||
let header: MsgHeaderWrapper =
|
||||
ser::deserialize(&mut &head[..], version, DeserializationMode::default())?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
|
@ -202,7 +204,7 @@ pub fn read_body<T: Readable, R: Read>(
|
|||
) -> Result<T, Error> {
|
||||
let mut body = vec![0u8; h.msg_len as usize];
|
||||
stream.read_exact(&mut body)?;
|
||||
ser::deserialize(&mut &body[..], version).map_err(From::from)
|
||||
ser::deserialize(&mut &body[..], version, DeserializationMode::default()).map_err(From::from)
|
||||
}
|
||||
|
||||
/// Read (an unknown) message from the provided stream and discard it.
|
||||
|
|
|
@ -18,7 +18,7 @@ use chrono::Utc;
|
|||
use num::FromPrimitive;
|
||||
use rand::prelude::*;
|
||||
|
||||
use crate::core::ser::{self, Readable, Reader, Writeable, Writer};
|
||||
use crate::core::ser::{self, DeserializationMode, Readable, Reader, Writeable, Writer};
|
||||
use crate::types::{Capabilities, PeerAddr, ReasonForBan};
|
||||
use grin_store::{self, option_to_not_found, to_key, Error};
|
||||
|
||||
|
@ -137,7 +137,7 @@ impl PeerStore {
|
|||
}
|
||||
|
||||
pub fn get_peer(&self, peer_addr: PeerAddr) -> Result<PeerData, Error> {
|
||||
option_to_not_found(self.db.get_ser(&peer_key(peer_addr)[..]), || {
|
||||
option_to_not_found(self.db.get_ser(&peer_key(peer_addr)[..], None), || {
|
||||
format!("Peer at address: {}", peer_addr)
|
||||
})
|
||||
}
|
||||
|
@ -173,7 +173,8 @@ impl PeerStore {
|
|||
let key = to_key(PEER_PREFIX, "");
|
||||
let protocol_version = self.db.protocol_version();
|
||||
self.db.iter(&key, move |_, mut v| {
|
||||
ser::deserialize(&mut v, protocol_version).map_err(From::from)
|
||||
ser::deserialize(&mut v, protocol_version, DeserializationMode::default())
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -189,10 +190,10 @@ impl PeerStore {
|
|||
pub fn update_state(&self, peer_addr: PeerAddr, new_state: State) -> Result<(), Error> {
|
||||
let batch = self.db.batch()?;
|
||||
|
||||
let mut peer =
|
||||
option_to_not_found(batch.get_ser::<PeerData>(&peer_key(peer_addr)[..]), || {
|
||||
format!("Peer at address: {}", peer_addr)
|
||||
})?;
|
||||
let mut peer = option_to_not_found(
|
||||
batch.get_ser::<PeerData>(&peer_key(peer_addr)[..], None),
|
||||
|| format!("Peer at address: {}", peer_addr),
|
||||
)?;
|
||||
peer.flags = new_state;
|
||||
if new_state == State::Banned {
|
||||
peer.last_banned = Utc::now().timestamp();
|
||||
|
|
|
@ -40,7 +40,7 @@ use crate::common::stats::{
|
|||
ChainStats, DiffBlock, DiffStats, PeerStats, ServerStateInfo, ServerStats, TxStats,
|
||||
};
|
||||
use crate::common::types::{Error, ServerConfig, StratumServerConfig};
|
||||
use crate::core::core::hash::Hashed;
|
||||
use crate::core::core::hash::{Hashed, ZERO_HASH};
|
||||
use crate::core::ser::ProtocolVersion;
|
||||
use crate::core::{consensus, genesis, global, pow};
|
||||
use crate::grin::{dandelion_monitor, seed, sync};
|
||||
|
@ -435,7 +435,7 @@ impl Server {
|
|||
// code clean. This may be handy for testing but not really needed
|
||||
// for release
|
||||
let diff_stats = {
|
||||
let last_blocks: Vec<consensus::HeaderInfo> =
|
||||
let last_blocks: Vec<consensus::HeaderDifficultyInfo> =
|
||||
global::difficulty_data_to_vector(self.chain.difficulty_iter()?)
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
@ -451,9 +451,17 @@ impl Server {
|
|||
|
||||
height += 1;
|
||||
|
||||
// We need to query again for the actual block hash, as
|
||||
// the difficulty iterator doesn't contain enough info to
|
||||
// create a hash
|
||||
let block_hash = match self.chain.get_header_by_height(height as u64) {
|
||||
Ok(h) => h.hash(),
|
||||
Err(_) => ZERO_HASH,
|
||||
};
|
||||
|
||||
DiffBlock {
|
||||
block_height: height,
|
||||
block_hash: next.block_hash,
|
||||
block_hash,
|
||||
difficulty: next.difficulty.to_num(),
|
||||
time: next.timestamp,
|
||||
duration: next.timestamp - prev.timestamp,
|
||||
|
|
|
@ -22,7 +22,7 @@ use lmdb_zero::traits::CreateCursor;
|
|||
use lmdb_zero::LmdbResultExt;
|
||||
|
||||
use crate::core::global;
|
||||
use crate::core::ser::{self, ProtocolVersion};
|
||||
use crate::core::ser::{self, DeserializationMode, ProtocolVersion};
|
||||
use crate::util::RwLock;
|
||||
|
||||
/// number of bytes to grow the database by when needed
|
||||
|
@ -271,16 +271,23 @@ impl Store {
|
|||
|
||||
/// Gets a `Readable` value from the db, provided its key.
|
||||
/// Note: Creates a new read transaction so will *not* see any uncommitted data.
|
||||
pub fn get_ser<T: ser::Readable>(&self, key: &[u8]) -> Result<Option<T>, Error> {
|
||||
pub fn get_ser<T: ser::Readable>(
|
||||
&self,
|
||||
key: &[u8],
|
||||
deser_mode: Option<DeserializationMode>,
|
||||
) -> Result<Option<T>, Error> {
|
||||
let lock = self.db.read();
|
||||
let db = lock
|
||||
.as_ref()
|
||||
.ok_or_else(|| Error::NotFoundErr("chain db is None".to_string()))?;
|
||||
let txn = lmdb::ReadTransaction::new(self.env.clone())?;
|
||||
let access = txn.access();
|
||||
|
||||
let d = match deser_mode {
|
||||
Some(d) => d,
|
||||
_ => DeserializationMode::default(),
|
||||
};
|
||||
self.get_with(key, &access, &db, |_, mut data| {
|
||||
ser::deserialize(&mut data, self.protocol_version()).map_err(From::from)
|
||||
ser::deserialize(&mut data, self.protocol_version(), d).map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -402,10 +409,18 @@ impl<'a> Batch<'a> {
|
|||
self.store.iter(prefix, deserialize)
|
||||
}
|
||||
|
||||
/// Gets a `Readable` value from the db by provided key and default deserialization strategy.
|
||||
pub fn get_ser<T: ser::Readable>(&self, key: &[u8]) -> Result<Option<T>, Error> {
|
||||
/// Gets a `Readable` value from the db by provided key and provided deserialization strategy.
|
||||
pub fn get_ser<T: ser::Readable>(
|
||||
&self,
|
||||
key: &[u8],
|
||||
deser_mode: Option<DeserializationMode>,
|
||||
) -> Result<Option<T>, Error> {
|
||||
let d = match deser_mode {
|
||||
Some(d) => d,
|
||||
_ => DeserializationMode::default(),
|
||||
};
|
||||
self.get_with(key, |_, mut data| {
|
||||
match ser::deserialize(&mut data, self.protocol_version()) {
|
||||
match ser::deserialize(&mut data, self.protocol_version(), d) {
|
||||
Ok(res) => Ok(res),
|
||||
Err(e) => Err(From::from(e)),
|
||||
}
|
||||
|
|
|
@ -15,7 +15,8 @@
|
|||
use tempfile::tempfile;
|
||||
|
||||
use crate::core::ser::{
|
||||
self, BinWriter, ProtocolVersion, Readable, Reader, StreamingReader, Writeable, Writer,
|
||||
self, BinWriter, DeserializationMode, ProtocolVersion, Readable, Reader, StreamingReader,
|
||||
Writeable, Writer,
|
||||
};
|
||||
use std::fmt::Debug;
|
||||
use std::fs::{self, File, OpenOptions};
|
||||
|
@ -442,7 +443,7 @@ where
|
|||
|
||||
fn read_as_elmt(&self, pos: u64) -> io::Result<T> {
|
||||
let data = self.read(pos)?;
|
||||
ser::deserialize(&mut &data[..], self.version)
|
||||
ser::deserialize(&mut &data[..], self.version, DeserializationMode::default())
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,8 @@ use crate::core::core::pmmr;
|
|||
use crate::core::core::pmmr::segment::{Segment, SegmentIdentifier};
|
||||
use crate::core::core::pmmr::{Backend, ReadablePMMR, ReadonlyPMMR, PMMR};
|
||||
use crate::core::ser::{
|
||||
BinReader, BinWriter, Error, PMMRable, ProtocolVersion, Readable, Reader, Writeable, Writer,
|
||||
BinReader, BinWriter, DeserializationMode, Error, PMMRable, ProtocolVersion, Readable, Reader,
|
||||
Writeable, Writer,
|
||||
};
|
||||
use crate::store::pmmr::PMMRBackend;
|
||||
use chrono::Utc;
|
||||
|
@ -364,7 +365,11 @@ fn ser_round_trip() {
|
|||
);
|
||||
cursor.set_position(0);
|
||||
|
||||
let mut reader = BinReader::new(&mut cursor, ProtocolVersion(1));
|
||||
let mut reader = BinReader::new(
|
||||
&mut cursor,
|
||||
ProtocolVersion(1),
|
||||
DeserializationMode::default(),
|
||||
);
|
||||
let segment2: Segment<TestElem> = Readable::read(&mut reader).unwrap();
|
||||
assert_eq!(segment, segment2);
|
||||
|
||||
|
|
Loading…
Reference in a new issue