mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
validate root of header MMR when processing headers (during sync and full blocks) (#1836)
validate root of header MMR when processing headers (during sync and full blocks) (#1836)
This commit is contained in:
parent
e8f4c47178
commit
12be191ecd
26 changed files with 466 additions and 302 deletions
|
@ -25,7 +25,7 @@ use hyper::{Body, Request, StatusCode};
|
||||||
use auth::BasicAuthMiddleware;
|
use auth::BasicAuthMiddleware;
|
||||||
use chain;
|
use chain;
|
||||||
use core::core::hash::{Hash, Hashed};
|
use core::core::hash::{Hash, Hashed};
|
||||||
use core::core::{OutputFeatures, OutputIdentifier, Transaction};
|
use core::core::{BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
|
||||||
use core::ser;
|
use core::ser;
|
||||||
use p2p;
|
use p2p;
|
||||||
use p2p::types::{PeerInfoDisplay, ReasonForBan};
|
use p2p::types::{PeerInfoDisplay, ReasonForBan};
|
||||||
|
@ -569,7 +569,9 @@ impl HeaderHandler {
|
||||||
}
|
}
|
||||||
if let Ok(height) = input.parse() {
|
if let Ok(height) = input.parse() {
|
||||||
match w(&self.chain).get_header_by_height(height) {
|
match w(&self.chain).get_header_by_height(height) {
|
||||||
Ok(header) => return Ok(BlockHeaderPrintable::from_header(&header)),
|
Ok(header) => {
|
||||||
|
return self.convert_header(&header);
|
||||||
|
}
|
||||||
Err(_) => return Err(ErrorKind::NotFound)?,
|
Err(_) => return Err(ErrorKind::NotFound)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -580,13 +582,18 @@ impl HeaderHandler {
|
||||||
let header = w(&self.chain)
|
let header = w(&self.chain)
|
||||||
.get_block_header(&h)
|
.get_block_header(&h)
|
||||||
.context(ErrorKind::NotFound)?;
|
.context(ErrorKind::NotFound)?;
|
||||||
Ok(BlockHeaderPrintable::from_header(&header))
|
self.convert_header(&header)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a header into a "printable" version for json serialization.
|
||||||
|
fn convert_header(&self, header: &BlockHeader) -> Result<BlockHeaderPrintable, Error> {
|
||||||
|
return Ok(BlockHeaderPrintable::from_header(header));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_header_for_output(&self, commit_id: String) -> Result<BlockHeaderPrintable, Error> {
|
fn get_header_for_output(&self, commit_id: String) -> Result<BlockHeaderPrintable, Error> {
|
||||||
let oid = get_output(&self.chain, &commit_id)?.1;
|
let oid = get_output(&self.chain, &commit_id)?.1;
|
||||||
match w(&self.chain).get_header_for_output(&oid) {
|
match w(&self.chain).get_header_for_output(&oid) {
|
||||||
Ok(header) => return Ok(BlockHeaderPrintable::from_header(&header)),
|
Ok(header) => return self.convert_header(&header),
|
||||||
Err(_) => return Err(ErrorKind::NotFound)?,
|
Err(_) => return Err(ErrorKind::NotFound)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -472,11 +472,11 @@ pub struct BlockHeaderInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockHeaderInfo {
|
impl BlockHeaderInfo {
|
||||||
pub fn from_header(h: &core::BlockHeader) -> BlockHeaderInfo {
|
pub fn from_header(header: &core::BlockHeader) -> BlockHeaderInfo {
|
||||||
BlockHeaderInfo {
|
BlockHeaderInfo {
|
||||||
hash: util::to_hex(h.hash().to_vec()),
|
hash: util::to_hex(header.hash().to_vec()),
|
||||||
height: h.height,
|
height: header.height,
|
||||||
previous: util::to_hex(h.previous.to_vec()),
|
previous: util::to_hex(header.prev_hash.to_vec()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -516,23 +516,23 @@ pub struct BlockHeaderPrintable {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockHeaderPrintable {
|
impl BlockHeaderPrintable {
|
||||||
pub fn from_header(h: &core::BlockHeader) -> BlockHeaderPrintable {
|
pub fn from_header(header: &core::BlockHeader) -> BlockHeaderPrintable {
|
||||||
BlockHeaderPrintable {
|
BlockHeaderPrintable {
|
||||||
hash: util::to_hex(h.hash().to_vec()),
|
hash: util::to_hex(header.hash().to_vec()),
|
||||||
version: h.version,
|
version: header.version,
|
||||||
height: h.height,
|
height: header.height,
|
||||||
previous: util::to_hex(h.previous.to_vec()),
|
previous: util::to_hex(header.prev_hash.to_vec()),
|
||||||
prev_root: util::to_hex(h.prev_root.to_vec()),
|
prev_root: util::to_hex(header.prev_root.to_vec()),
|
||||||
timestamp: h.timestamp.to_rfc3339(),
|
timestamp: header.timestamp.to_rfc3339(),
|
||||||
output_root: util::to_hex(h.output_root.to_vec()),
|
output_root: util::to_hex(header.output_root.to_vec()),
|
||||||
range_proof_root: util::to_hex(h.range_proof_root.to_vec()),
|
range_proof_root: util::to_hex(header.range_proof_root.to_vec()),
|
||||||
kernel_root: util::to_hex(h.kernel_root.to_vec()),
|
kernel_root: util::to_hex(header.kernel_root.to_vec()),
|
||||||
nonce: h.pow.nonce,
|
nonce: header.pow.nonce,
|
||||||
edge_bits: h.pow.edge_bits(),
|
edge_bits: header.pow.edge_bits(),
|
||||||
cuckoo_solution: h.pow.proof.nonces.clone(),
|
cuckoo_solution: header.pow.proof.nonces.clone(),
|
||||||
total_difficulty: h.pow.total_difficulty.to_num(),
|
total_difficulty: header.pow.total_difficulty.to_num(),
|
||||||
secondary_scaling: h.pow.secondary_scaling,
|
secondary_scaling: header.pow.secondary_scaling,
|
||||||
total_kernel_offset: h.total_kernel_offset.to_hex(),
|
total_kernel_offset: header.total_kernel_offset.to_hex(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -181,13 +181,35 @@ impl Chain {
|
||||||
|
|
||||||
setup_head(genesis.clone(), store.clone(), &mut txhashset)?;
|
setup_head(genesis.clone(), store.clone(), &mut txhashset)?;
|
||||||
|
|
||||||
let head = store.head()?;
|
{
|
||||||
debug!(
|
let head = store.head()?;
|
||||||
"Chain init: {} @ {} [{}]",
|
debug!(
|
||||||
head.total_difficulty.to_num(),
|
"init: head: {} @ {} [{}]",
|
||||||
head.height,
|
head.total_difficulty.to_num(),
|
||||||
head.last_block_h,
|
head.height,
|
||||||
);
|
head.last_block_h,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let header_head = store.header_head()?;
|
||||||
|
debug!(
|
||||||
|
"init: header_head: {} @ {} [{}]",
|
||||||
|
header_head.total_difficulty.to_num(),
|
||||||
|
header_head.height,
|
||||||
|
header_head.last_block_h,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let sync_head = store.get_sync_head()?;
|
||||||
|
debug!(
|
||||||
|
"init: sync_head: {} @ {} [{}]",
|
||||||
|
sync_head.total_difficulty.to_num(),
|
||||||
|
sync_head.height,
|
||||||
|
sync_head.last_block_h,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Chain {
|
Ok(Chain {
|
||||||
db_root: db_root,
|
db_root: db_root,
|
||||||
|
@ -475,15 +497,23 @@ impl Chain {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the txhashset roots on a brand new block by applying the block on
|
/// *** Only used in tests. ***
|
||||||
/// the current txhashset state.
|
/// Convenience for setting roots on a block header when
|
||||||
pub fn set_txhashset_roots(&self, b: &mut Block, is_fork: bool) -> Result<(), Error> {
|
/// creating a chain fork during tests.
|
||||||
|
pub fn set_txhashset_roots_forked(
|
||||||
|
&self,
|
||||||
|
b: &mut Block,
|
||||||
|
prev: &BlockHeader,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let prev_block = self.get_block(&prev.hash())?;
|
||||||
let mut txhashset = self.txhashset.write();
|
let mut txhashset = self.txhashset.write();
|
||||||
let (prev_root, roots, sizes) =
|
let (prev_root, roots, sizes) =
|
||||||
txhashset::extending_readonly(&mut txhashset, |extension| {
|
txhashset::extending_readonly(&mut txhashset, |extension| {
|
||||||
if is_fork {
|
// Put the txhashset in the correct state as of the previous block.
|
||||||
pipe::rewind_and_apply_fork(b, extension)?;
|
// We cannot use the new block to do this because we have no
|
||||||
}
|
// explicit previous linkage (and prev_root not yet setup).
|
||||||
|
pipe::rewind_and_apply_fork(&prev_block, extension)?;
|
||||||
|
extension.apply_block(&prev_block)?;
|
||||||
|
|
||||||
// Retrieve the header root before we apply the new block
|
// Retrieve the header root before we apply the new block
|
||||||
let prev_root = extension.header_root();
|
let prev_root = extension.header_root();
|
||||||
|
@ -513,6 +543,40 @@ impl Chain {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the txhashset roots on a brand new block by applying the block on
|
||||||
|
/// the current txhashset state.
|
||||||
|
pub fn set_txhashset_roots(&self, b: &mut Block) -> Result<(), Error> {
|
||||||
|
let mut txhashset = self.txhashset.write();
|
||||||
|
let (prev_root, roots, sizes) =
|
||||||
|
txhashset::extending_readonly(&mut txhashset, |extension| {
|
||||||
|
// Retrieve the header root before we apply the new block
|
||||||
|
let prev_root = extension.header_root();
|
||||||
|
|
||||||
|
// Apply the latest block to the chain state via the extension.
|
||||||
|
extension.apply_block(b)?;
|
||||||
|
|
||||||
|
Ok((prev_root, extension.roots(), extension.sizes()))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Set the prev_root on the header.
|
||||||
|
b.header.prev_root = prev_root;
|
||||||
|
|
||||||
|
// Set the output, rangeproof and kernel MMR roots.
|
||||||
|
b.header.output_root = roots.output_root;
|
||||||
|
b.header.range_proof_root = roots.rproof_root;
|
||||||
|
b.header.kernel_root = roots.kernel_root;
|
||||||
|
|
||||||
|
// Set the output and kernel MMR sizes.
|
||||||
|
{
|
||||||
|
// Carefully destructure these correctly...
|
||||||
|
let (_, output_mmr_size, _, kernel_mmr_size) = sizes;
|
||||||
|
b.header.output_mmr_size = output_mmr_size;
|
||||||
|
b.header.kernel_mmr_size = kernel_mmr_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Return a pre-built Merkle proof for the given commitment from the store.
|
/// Return a pre-built Merkle proof for the given commitment from the store.
|
||||||
pub fn get_merkle_proof(
|
pub fn get_merkle_proof(
|
||||||
&self,
|
&self,
|
||||||
|
@ -580,9 +644,7 @@ impl Chain {
|
||||||
header: &BlockHeader,
|
header: &BlockHeader,
|
||||||
txhashset: &txhashset::TxHashSet,
|
txhashset: &txhashset::TxHashSet,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
debug!(
|
debug!("validate_kernel_history: rewinding and validating kernel history (readonly)");
|
||||||
"chain: validate_kernel_history: rewinding and validating kernel history (readonly)"
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
let mut current = header.clone();
|
let mut current = header.clone();
|
||||||
|
@ -590,14 +652,14 @@ impl Chain {
|
||||||
while current.height > 0 {
|
while current.height > 0 {
|
||||||
view.rewind(¤t)?;
|
view.rewind(¤t)?;
|
||||||
view.validate_root()?;
|
view.validate_root()?;
|
||||||
current = view.batch().get_block_header(¤t.previous)?;
|
current = view.batch().get_previous_header(¤t)?;
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"chain: validate_kernel_history: validated kernel root on {} headers",
|
"validate_kernel_history: validated kernel root on {} headers",
|
||||||
count,
|
count,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -667,13 +729,13 @@ impl Chain {
|
||||||
|
|
||||||
// The txhashset.zip contains the output, rangeproof and kernel MMRs.
|
// The txhashset.zip contains the output, rangeproof and kernel MMRs.
|
||||||
// We must rebuild the header MMR ourselves based on the headers in our db.
|
// We must rebuild the header MMR ourselves based on the headers in our db.
|
||||||
self.rebuild_header_mmr(&Tip::from_block(&header), &mut txhashset)?;
|
self.rebuild_header_mmr(&Tip::from_header(&header), &mut txhashset)?;
|
||||||
|
|
||||||
// Validate the full kernel history (kernel MMR root for every block header).
|
// Validate the full kernel history (kernel MMR root for every block header).
|
||||||
self.validate_kernel_history(&header, &txhashset)?;
|
self.validate_kernel_history(&header, &txhashset)?;
|
||||||
|
|
||||||
// all good, prepare a new batch and update all the required records
|
// all good, prepare a new batch and update all the required records
|
||||||
debug!("chain: txhashset_write: rewinding a 2nd time (writeable)");
|
debug!("txhashset_write: rewinding a 2nd time (writeable)");
|
||||||
|
|
||||||
let mut batch = self.store.batch()?;
|
let mut batch = self.store.batch()?;
|
||||||
|
|
||||||
|
@ -697,13 +759,13 @@ impl Chain {
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
debug!("chain: txhashset_write: finished validating and rebuilding");
|
debug!("txhashset_write: finished validating and rebuilding");
|
||||||
|
|
||||||
status.on_save();
|
status.on_save();
|
||||||
|
|
||||||
// Save the new head to the db and rebuild the header by height index.
|
// Save the new head to the db and rebuild the header by height index.
|
||||||
{
|
{
|
||||||
let tip = Tip::from_block(&header);
|
let tip = Tip::from_header(&header);
|
||||||
batch.save_body_head(&tip)?;
|
batch.save_body_head(&tip)?;
|
||||||
batch.save_header_height(&header)?;
|
batch.save_header_height(&header)?;
|
||||||
batch.build_by_height_index(&header, true)?;
|
batch.build_by_height_index(&header, true)?;
|
||||||
|
@ -712,7 +774,7 @@ impl Chain {
|
||||||
// Commit all the changes to the db.
|
// Commit all the changes to the db.
|
||||||
batch.commit()?;
|
batch.commit()?;
|
||||||
|
|
||||||
debug!("chain: txhashset_write: finished committing the batch (head etc.)");
|
debug!("txhashset_write: finished committing the batch (head etc.)");
|
||||||
|
|
||||||
// Replace the chain txhashset with the newly built one.
|
// Replace the chain txhashset with the newly built one.
|
||||||
{
|
{
|
||||||
|
@ -720,7 +782,7 @@ impl Chain {
|
||||||
*txhashset_ref = txhashset;
|
*txhashset_ref = txhashset;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("chain: txhashset_write: replaced our txhashset with the new one");
|
debug!("txhashset_write: replaced our txhashset with the new one");
|
||||||
|
|
||||||
// Check for any orphan blocks and process them based on the new chain state.
|
// Check for any orphan blocks and process them based on the new chain state.
|
||||||
self.check_orphans(header.height + 1);
|
self.check_orphans(header.height + 1);
|
||||||
|
@ -761,7 +823,7 @@ impl Chain {
|
||||||
let cutoff = head.height.saturating_sub(horizon);
|
let cutoff = head.height.saturating_sub(horizon);
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"chain: compact_blocks_db: head height: {}, horizon: {}, cutoff: {}",
|
"compact_blocks_db: head height: {}, horizon: {}, cutoff: {}",
|
||||||
head.height, horizon, cutoff,
|
head.height, horizon, cutoff,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -791,14 +853,14 @@ impl Chain {
|
||||||
if current.height <= 1 {
|
if current.height <= 1 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
match batch.get_block_header(¤t.previous) {
|
match batch.get_previous_header(¤t) {
|
||||||
Ok(h) => current = h,
|
Ok(h) => current = h,
|
||||||
Err(NotFoundErr(_)) => break,
|
Err(NotFoundErr(_)) => break,
|
||||||
Err(e) => return Err(From::from(e)),
|
Err(e) => return Err(From::from(e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
batch.commit()?;
|
batch.commit()?;
|
||||||
debug!("chain: compact_blocks_db: removed {} blocks.", count);
|
debug!("compact_blocks_db: removed {} blocks.", count);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -909,6 +971,13 @@ impl Chain {
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "chain get header".to_owned()).into())
|
.map_err(|e| ErrorKind::StoreErr(e, "chain get header".to_owned()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get previous block header.
|
||||||
|
pub fn get_previous_header(&self, header: &BlockHeader) -> Result<BlockHeader, Error> {
|
||||||
|
self.store
|
||||||
|
.get_previous_header(header)
|
||||||
|
.map_err(|e| ErrorKind::StoreErr(e, "chain get previous header".to_owned()).into())
|
||||||
|
}
|
||||||
|
|
||||||
/// Get block_sums by header hash.
|
/// Get block_sums by header hash.
|
||||||
pub fn get_block_sums(&self, h: &Hash) -> Result<BlockSums, Error> {
|
pub fn get_block_sums(&self, h: &Hash) -> Result<BlockSums, Error> {
|
||||||
self.store
|
self.store
|
||||||
|
@ -928,12 +997,16 @@ impl Chain {
|
||||||
&self,
|
&self,
|
||||||
output_ref: &OutputIdentifier,
|
output_ref: &OutputIdentifier,
|
||||||
) -> Result<BlockHeader, Error> {
|
) -> Result<BlockHeader, Error> {
|
||||||
let mut txhashset = self.txhashset.write();
|
let pos = {
|
||||||
let (_, pos) = txhashset.is_unspent(output_ref)?;
|
let mut txhashset = self.txhashset.write();
|
||||||
|
let (_, pos) = txhashset.is_unspent(output_ref)?;
|
||||||
|
pos
|
||||||
|
};
|
||||||
|
|
||||||
let mut min = 1;
|
let mut min = 1;
|
||||||
let mut max = {
|
let mut max = {
|
||||||
let h = self.head()?;
|
let head = self.head()?;
|
||||||
h.height
|
head.height
|
||||||
};
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -1034,7 +1107,7 @@ fn setup_head(
|
||||||
if header.height > 0 && extension.batch.get_block_sums(&header.hash()).is_err()
|
if header.height > 0 && extension.batch.get_block_sums(&header.hash()).is_err()
|
||||||
{
|
{
|
||||||
debug!(
|
debug!(
|
||||||
"chain: init: building (missing) block sums for {} @ {}",
|
"init: building (missing) block sums for {} @ {}",
|
||||||
header.height,
|
header.height,
|
||||||
header.hash()
|
header.hash()
|
||||||
);
|
);
|
||||||
|
@ -1054,7 +1127,7 @@ fn setup_head(
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"chain: init: rewinding and validating before we start... {} at {}",
|
"init: rewinding and validating before we start... {} at {}",
|
||||||
header.hash(),
|
header.hash(),
|
||||||
header.height,
|
header.height,
|
||||||
);
|
);
|
||||||
|
@ -1070,27 +1143,33 @@ fn setup_head(
|
||||||
let prev_header = batch.get_block_header(&head.prev_block_h)?;
|
let prev_header = batch.get_block_header(&head.prev_block_h)?;
|
||||||
let _ = batch.delete_block(&header.hash());
|
let _ = batch.delete_block(&header.hash());
|
||||||
let _ = batch.setup_height(&prev_header, &head)?;
|
let _ = batch.setup_height(&prev_header, &head)?;
|
||||||
head = Tip::from_block(&prev_header);
|
head = Tip::from_header(&prev_header);
|
||||||
batch.save_head(&head)?;
|
batch.save_head(&head)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(NotFoundErr(_)) => {
|
Err(NotFoundErr(_)) => {
|
||||||
|
// Save the genesis header with a "zero" header_root.
|
||||||
|
// We will update this later once we have the correct header_root.
|
||||||
|
batch.save_block_header(&genesis.header)?;
|
||||||
batch.save_block(&genesis)?;
|
batch.save_block(&genesis)?;
|
||||||
let tip = Tip::from_block(&genesis.header);
|
|
||||||
|
let tip = Tip::from_header(&genesis.header);
|
||||||
batch.save_head(&tip)?;
|
batch.save_head(&tip)?;
|
||||||
batch.setup_height(&genesis.header, &tip)?;
|
batch.setup_height(&genesis.header, &tip)?;
|
||||||
|
|
||||||
// Apply the genesis block to our empty MMRs.
|
// Initialize our header MM with the genesis header.
|
||||||
txhashset::extending(txhashset, &mut batch, |extension| {
|
txhashset::header_extending(txhashset, &mut batch, |extension| {
|
||||||
extension.apply_block(&genesis)?;
|
extension.apply_header(&genesis.header)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
batch.save_block_header(&genesis.header)?;
|
||||||
|
|
||||||
// Save the block_sums to the db for use later.
|
// Save the block_sums to the db for use later.
|
||||||
batch.save_block_sums(&genesis.hash(), &BlockSums::default())?;
|
batch.save_block_sums(&genesis.hash(), &BlockSums::default())?;
|
||||||
|
|
||||||
info!("chain: init: saved genesis: {:?}", genesis.hash());
|
info!("init: saved genesis: {:?}", genesis.hash());
|
||||||
}
|
}
|
||||||
Err(e) => return Err(ErrorKind::StoreErr(e, "chain init load head".to_owned()))?,
|
Err(e) => return Err(ErrorKind::StoreErr(e, "chain init load head".to_owned()))?,
|
||||||
};
|
};
|
||||||
|
|
|
@ -56,10 +56,35 @@ pub struct BlockContext<'a> {
|
||||||
pub orphans: Arc<OrphanBlockPool>,
|
pub orphans: Arc<OrphanBlockPool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this block is the next block *immediately*
|
/// Process a block header as part of processing a full block.
|
||||||
// after our current chain head.
|
/// We want to make sure the header is valid before we process the full block.
|
||||||
fn is_next_block(header: &BlockHeader, head: &Tip) -> bool {
|
fn process_header_for_block(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
|
||||||
header.previous == head.last_block_h
|
// If we do not have the previous header then treat the block for this header
|
||||||
|
// as an orphan.
|
||||||
|
if ctx.batch.get_previous_header(header).is_err() {
|
||||||
|
return Err(ErrorKind::Orphan.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
txhashset::header_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
|
||||||
|
extension.force_rollback();
|
||||||
|
|
||||||
|
// Optimize this if "next" header
|
||||||
|
rewind_and_apply_header_fork(header, extension)?;
|
||||||
|
|
||||||
|
// Check the current root is correct.
|
||||||
|
extension.validate_root(header)?;
|
||||||
|
|
||||||
|
// Apply the new header to our header extension.
|
||||||
|
extension.apply_header(header)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
validate_header(header, ctx)?;
|
||||||
|
add_block_header(header, &ctx.batch)?;
|
||||||
|
update_header_head(header, ctx)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs the block processing pipeline, including validation and finding a
|
/// Runs the block processing pipeline, including validation and finding a
|
||||||
|
@ -70,7 +95,7 @@ pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, E
|
||||||
// spend resources reading the full block when its header is invalid
|
// spend resources reading the full block when its header is invalid
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"pipe: process_block {} at {} with {} inputs, {} outputs, {} kernels",
|
"pipe: process_block {} at {}, in/out/kern: {}/{}/{}",
|
||||||
b.hash(),
|
b.hash(),
|
||||||
b.header.height,
|
b.header.height,
|
||||||
b.inputs().len(),
|
b.inputs().len(),
|
||||||
|
@ -96,15 +121,12 @@ pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, E
|
||||||
}
|
}
|
||||||
|
|
||||||
// Header specific processing.
|
// Header specific processing.
|
||||||
{
|
process_header_for_block(&b.header, ctx)?;
|
||||||
validate_header(&b.header, ctx)?;
|
|
||||||
add_block_header(&b.header, ctx)?;
|
|
||||||
update_header_head(&b.header, ctx)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if are processing the "next" block relative to the current chain head.
|
// Check if are processing the "next" block relative to the current chain head.
|
||||||
|
let prev_header = ctx.batch.get_previous_header(&b.header)?;
|
||||||
let head = ctx.batch.head()?;
|
let head = ctx.batch.head()?;
|
||||||
if is_next_block(&b.header, &head) {
|
if prev_header.hash() == head.last_block_h {
|
||||||
// If this is the "next" block then either -
|
// If this is the "next" block then either -
|
||||||
// * common case where we process blocks sequentially.
|
// * common case where we process blocks sequentially.
|
||||||
// * special case where this is the first fast sync full block
|
// * special case where this is the first fast sync full block
|
||||||
|
@ -123,11 +145,9 @@ pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, E
|
||||||
// Start a chain extension unit of work dependent on the success of the
|
// Start a chain extension unit of work dependent on the success of the
|
||||||
// internal validation and saving operations
|
// internal validation and saving operations
|
||||||
txhashset::extending(&mut ctx.txhashset, &mut ctx.batch, |mut extension| {
|
txhashset::extending(&mut ctx.txhashset, &mut ctx.batch, |mut extension| {
|
||||||
// First we rewind the txhashset extension if necessary
|
let prev = extension.batch.get_previous_header(&b.header)?;
|
||||||
// to put it into a consistent state for validating the block.
|
if prev.hash() == head.last_block_h {
|
||||||
// We can skip this step if the previous header is the latest header we saw.
|
// Not a fork so we do not need to rewind or reapply any blocks.
|
||||||
if is_next_block(&b.header, &head) {
|
|
||||||
// No need to rewind if we are processing the next block.
|
|
||||||
} else {
|
} else {
|
||||||
// Rewind and re-apply blocks on the forked chain to
|
// Rewind and re-apply blocks on the forked chain to
|
||||||
// put the txhashset in the correct forked state
|
// put the txhashset in the correct forked state
|
||||||
|
@ -165,14 +185,10 @@ pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, E
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
trace!(
|
// Add the validated block to the db.
|
||||||
"pipe: process_block: {} at {} is valid, save and append.",
|
// We do this even if we have not increased the total cumulative work
|
||||||
b.hash(),
|
// so we can maintain multiple (in progress) forks.
|
||||||
b.header.height,
|
add_block(b, &ctx.batch)?;
|
||||||
);
|
|
||||||
|
|
||||||
// Add the newly accepted block and header to our index.
|
|
||||||
add_block(b, ctx)?;
|
|
||||||
|
|
||||||
// Update the chain head if total work is increased.
|
// Update the chain head if total work is increased.
|
||||||
let res = update_head(b, ctx)?;
|
let res = update_head(b, ctx)?;
|
||||||
|
@ -203,24 +219,30 @@ pub fn sync_block_headers(
|
||||||
};
|
};
|
||||||
|
|
||||||
if !all_known {
|
if !all_known {
|
||||||
for header in headers {
|
|
||||||
validate_header(header, ctx)?;
|
|
||||||
add_block_header(header, ctx)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let first_header = headers.first().unwrap();
|
let first_header = headers.first().unwrap();
|
||||||
let prev_header = ctx.batch.get_block_header(&first_header.previous)?;
|
let prev_header = ctx.batch.get_previous_header(&first_header)?;
|
||||||
txhashset::sync_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
|
txhashset::sync_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
|
||||||
// Optimize this if "next" header
|
|
||||||
extension.rewind(&prev_header)?;
|
extension.rewind(&prev_header)?;
|
||||||
|
|
||||||
for header in headers {
|
for header in headers {
|
||||||
|
// Check the current root is correct.
|
||||||
extension.validate_root(header)?;
|
extension.validate_root(header)?;
|
||||||
|
|
||||||
|
// Apply the header to the header MMR.
|
||||||
extension.apply_header(header)?;
|
extension.apply_header(header)?;
|
||||||
|
|
||||||
|
// Save the header to the db.
|
||||||
|
add_block_header(header, &extension.batch)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
// Validate all our headers now that we have added each "previous"
|
||||||
|
// header to the db in this batch above.
|
||||||
|
for header in headers {
|
||||||
|
validate_header(header, ctx)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update header_head (if most work) and sync_head (regardless) in all cases,
|
// Update header_head (if most work) and sync_head (regardless) in all cases,
|
||||||
|
@ -329,7 +351,8 @@ fn check_known_store(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(),
|
||||||
// We cannot assume we can use the chain head for this
|
// We cannot assume we can use the chain head for this
|
||||||
// as we may be dealing with a fork (with less work currently).
|
// as we may be dealing with a fork (with less work currently).
|
||||||
fn check_prev_store(header: &BlockHeader, batch: &mut store::Batch) -> Result<(), Error> {
|
fn check_prev_store(header: &BlockHeader, batch: &mut store::Batch) -> Result<(), Error> {
|
||||||
match batch.block_exists(&header.previous) {
|
let prev = batch.get_previous_header(&header)?;
|
||||||
|
match batch.block_exists(&prev.hash()) {
|
||||||
Ok(true) => {
|
Ok(true) => {
|
||||||
// We have the previous block in the store, so we can proceed.
|
// We have the previous block in the store, so we can proceed.
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -381,13 +404,14 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
}
|
}
|
||||||
|
|
||||||
// first I/O cost, better as late as possible
|
// first I/O cost, better as late as possible
|
||||||
let prev = match ctx.batch.get_block_header(&header.previous) {
|
let prev = match ctx.batch.get_previous_header(&header) {
|
||||||
Ok(prev) => prev,
|
Ok(prev) => prev,
|
||||||
Err(grin_store::Error::NotFoundErr(_)) => return Err(ErrorKind::Orphan.into()),
|
Err(grin_store::Error::NotFoundErr(_)) => return Err(ErrorKind::Orphan.into()),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return Err(
|
return Err(ErrorKind::StoreErr(
|
||||||
ErrorKind::StoreErr(e, format!("previous header {}", header.previous)).into(),
|
e,
|
||||||
)
|
format!("Failed to find previous header to {}", header.hash()),
|
||||||
|
).into())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -424,8 +448,9 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
// explicit check to ensure total_difficulty has increased by exactly
|
// explicit check to ensure total_difficulty has increased by exactly
|
||||||
// the _network_ difficulty of the previous block
|
// the _network_ difficulty of the previous block
|
||||||
// (during testnet1 we use _block_ difficulty here)
|
// (during testnet1 we use _block_ difficulty here)
|
||||||
|
let prev = ctx.batch.get_previous_header(&header)?;
|
||||||
let child_batch = ctx.batch.child()?;
|
let child_batch = ctx.batch.child()?;
|
||||||
let diff_iter = store::DifficultyIter::from_batch(header.previous, child_batch);
|
let diff_iter = store::DifficultyIter::from_batch(prev.hash(), child_batch);
|
||||||
let next_header_info = consensus::next_difficulty(header.height, diff_iter);
|
let next_header_info = consensus::next_difficulty(header.height, diff_iter);
|
||||||
if target_difficulty != next_header_info.difficulty {
|
if target_difficulty != next_header_info.difficulty {
|
||||||
info!(
|
info!(
|
||||||
|
@ -439,8 +464,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
if header.pow.secondary_scaling != next_header_info.secondary_scaling {
|
if header.pow.secondary_scaling != next_header_info.secondary_scaling {
|
||||||
info!(
|
info!(
|
||||||
"validate_header: header secondary scaling {} != {}",
|
"validate_header: header secondary scaling {} != {}",
|
||||||
header.pow.secondary_scaling,
|
header.pow.secondary_scaling, next_header_info.secondary_scaling
|
||||||
next_header_info.secondary_scaling
|
|
||||||
);
|
);
|
||||||
return Err(ErrorKind::InvalidScaling.into());
|
return Err(ErrorKind::InvalidScaling.into());
|
||||||
}
|
}
|
||||||
|
@ -450,7 +474,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
|
fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
|
||||||
let prev = ctx.batch.get_block_header(&block.header.previous)?;
|
let prev = ctx.batch.get_previous_header(&block.header)?;
|
||||||
block
|
block
|
||||||
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
|
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
|
||||||
.map_err(|e| ErrorKind::InvalidBlockProof(e))?;
|
.map_err(|e| ErrorKind::InvalidBlockProof(e))?;
|
||||||
|
@ -472,8 +496,10 @@ fn verify_coinbase_maturity(block: &Block, ext: &mut txhashset::Extension) -> Re
|
||||||
/// based on block_sums of previous block, accounting for the inputs|outputs|kernels
|
/// based on block_sums of previous block, accounting for the inputs|outputs|kernels
|
||||||
/// of the new block.
|
/// of the new block.
|
||||||
fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
|
fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
|
||||||
|
// TODO - this is 2 db calls, can we optimize this?
|
||||||
// Retrieve the block_sums for the previous block.
|
// Retrieve the block_sums for the previous block.
|
||||||
let block_sums = ext.batch.get_block_sums(&b.header.previous)?;
|
let prev = ext.batch.get_previous_header(&b.header)?;
|
||||||
|
let block_sums = ext.batch.get_block_sums(&prev.hash())?;
|
||||||
|
|
||||||
// Overage is based purely on the new block.
|
// Overage is based purely on the new block.
|
||||||
// Previous block_sums have taken all previous overage into account.
|
// Previous block_sums have taken all previous overage into account.
|
||||||
|
@ -509,22 +535,20 @@ fn apply_block_to_txhashset(block: &Block, ext: &mut txhashset::Extension) -> Re
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Officially adds the block to our chain.
|
/// Officially adds the block to our chain.
|
||||||
fn add_block(b: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
|
/// Header must be added separately (assume this has been done previously).
|
||||||
// Save the block itself to the db (via the batch).
|
fn add_block(b: &Block, batch: &store::Batch) -> Result<(), Error> {
|
||||||
ctx.batch
|
batch
|
||||||
.save_block(b)
|
.save_block(b)
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "pipe save block".to_owned()))?;
|
.map_err(|e| ErrorKind::StoreErr(e, "pipe save block".to_owned()))?;
|
||||||
|
|
||||||
// Build the block_input_bitmap, save to the db (via the batch) and cache locally.
|
|
||||||
ctx.batch.build_and_cache_block_input_bitmap(&b)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Officially adds the block header to our header chain.
|
/// Officially adds the block header to our header chain.
|
||||||
fn add_block_header(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
|
fn add_block_header(bh: &BlockHeader, batch: &store::Batch) -> Result<(), Error> {
|
||||||
ctx.batch
|
batch
|
||||||
.save_block_header(bh)
|
.save_block_header(bh)
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header".to_owned()).into())
|
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header".to_owned()))?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Directly updates the head if we've just appended a new block to it or handle
|
/// Directly updates the head if we've just appended a new block to it or handle
|
||||||
|
@ -540,7 +564,7 @@ fn update_head(b: &Block, ctx: &BlockContext) -> Result<Option<Tip>, Error> {
|
||||||
.setup_height(&b.header, &head)
|
.setup_height(&b.header, &head)
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "pipe setup height".to_owned()))?;
|
.map_err(|e| ErrorKind::StoreErr(e, "pipe setup height".to_owned()))?;
|
||||||
|
|
||||||
let tip = Tip::from_block(&b.header);
|
let tip = Tip::from_header(&b.header);
|
||||||
|
|
||||||
ctx.batch
|
ctx.batch
|
||||||
.save_body_head(&tip)
|
.save_body_head(&tip)
|
||||||
|
@ -564,7 +588,7 @@ fn has_more_work(header: &BlockHeader, head: &Tip) -> bool {
|
||||||
|
|
||||||
/// Update the sync head so we can keep syncing from where we left off.
|
/// Update the sync head so we can keep syncing from where we left off.
|
||||||
fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch) -> Result<(), Error> {
|
fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch) -> Result<(), Error> {
|
||||||
let tip = Tip::from_block(bh);
|
let tip = Tip::from_header(bh);
|
||||||
batch
|
batch
|
||||||
.save_sync_head(&tip)
|
.save_sync_head(&tip)
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "pipe save sync head".to_owned()))?;
|
.map_err(|e| ErrorKind::StoreErr(e, "pipe save sync head".to_owned()))?;
|
||||||
|
@ -576,7 +600,7 @@ fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch) -> Result<(), Er
|
||||||
fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option<Tip>, Error> {
|
fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option<Tip>, Error> {
|
||||||
let header_head = ctx.batch.header_head()?;
|
let header_head = ctx.batch.header_head()?;
|
||||||
if has_more_work(&bh, &header_head) {
|
if has_more_work(&bh, &header_head) {
|
||||||
let tip = Tip::from_block(bh);
|
let tip = Tip::from_header(bh);
|
||||||
ctx.batch
|
ctx.batch
|
||||||
.save_header_head(&tip)
|
.save_header_head(&tip)
|
||||||
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header head".to_owned()))?;
|
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header head".to_owned()))?;
|
||||||
|
@ -592,6 +616,35 @@ fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rewind the header chain and reapply headers on a fork.
|
||||||
|
pub fn rewind_and_apply_header_fork(
|
||||||
|
header: &BlockHeader,
|
||||||
|
ext: &mut txhashset::HeaderExtension,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let mut fork_hashes = vec![];
|
||||||
|
let mut current = ext.batch.get_previous_header(header)?;
|
||||||
|
while current.height > 0 && !ext.batch.is_on_current_chain(¤t).is_ok() {
|
||||||
|
fork_hashes.push(current.hash());
|
||||||
|
current = ext.batch.get_previous_header(¤t)?;
|
||||||
|
}
|
||||||
|
fork_hashes.reverse();
|
||||||
|
|
||||||
|
let forked_header = current;
|
||||||
|
|
||||||
|
// Rewind the txhashset state back to the block where we forked from the most work chain.
|
||||||
|
ext.rewind(&forked_header)?;
|
||||||
|
|
||||||
|
// Re-apply all headers on this fork.
|
||||||
|
for h in fork_hashes {
|
||||||
|
let header = ext
|
||||||
|
.batch
|
||||||
|
.get_block_header(&h)
|
||||||
|
.map_err(|e| ErrorKind::StoreErr(e, format!("getting forked headers")))?;
|
||||||
|
ext.apply_header(&header)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Utility function to handle forks. From the forked block, jump backward
|
/// Utility function to handle forks. From the forked block, jump backward
|
||||||
/// to find to fork root. Rewind the txhashset to the root and apply all the
|
/// to find to fork root. Rewind the txhashset to the root and apply all the
|
||||||
/// forked blocks prior to the one being processed to set the txhashset in
|
/// forked blocks prior to the one being processed to set the txhashset in
|
||||||
|
@ -599,36 +652,21 @@ fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option
|
||||||
pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
|
pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
|
||||||
// extending a fork, first identify the block where forking occurred
|
// extending a fork, first identify the block where forking occurred
|
||||||
// keeping the hashes of blocks along the fork
|
// keeping the hashes of blocks along the fork
|
||||||
let mut current = b.header.previous;
|
|
||||||
let mut fork_hashes = vec![];
|
let mut fork_hashes = vec![];
|
||||||
loop {
|
let mut current = ext.batch.get_previous_header(&b.header)?;
|
||||||
let curr_header = ext.batch.get_block_header(¤t)?;
|
while current.height > 0 && !ext.batch.is_on_current_chain(¤t).is_ok() {
|
||||||
|
fork_hashes.push(current.hash());
|
||||||
if let Ok(_) = ext.batch.is_on_current_chain(&curr_header) {
|
current = ext.batch.get_previous_header(¤t)?;
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
fork_hashes.insert(0, (curr_header.height, curr_header.hash()));
|
|
||||||
current = curr_header.previous;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
fork_hashes.reverse();
|
||||||
|
|
||||||
let forked_header = ext.batch.get_block_header(¤t)?;
|
let forked_header = current;
|
||||||
|
|
||||||
trace!(
|
|
||||||
"rewind_and_apply_fork @ {} [{}], was @ {} [{}]",
|
|
||||||
forked_header.height,
|
|
||||||
forked_header.hash(),
|
|
||||||
b.header.height,
|
|
||||||
b.header.hash()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Rewind the txhashset state back to the block where we forked from the most work chain.
|
// Rewind the txhashset state back to the block where we forked from the most work chain.
|
||||||
ext.rewind(&forked_header)?;
|
ext.rewind(&forked_header)?;
|
||||||
|
|
||||||
trace!("rewind_and_apply_fork: blocks on fork: {:?}", fork_hashes,);
|
|
||||||
|
|
||||||
// Now re-apply all blocks on this fork.
|
// Now re-apply all blocks on this fork.
|
||||||
for (_, h) in fork_hashes {
|
for h in fork_hashes {
|
||||||
let fb = ext
|
let fb = ext
|
||||||
.batch
|
.batch
|
||||||
.get_block(&h)
|
.get_block(&h)
|
||||||
|
|
|
@ -122,12 +122,16 @@ impl ChainStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_previous_header(&self, header: &BlockHeader) -> Result<BlockHeader, Error> {
|
||||||
|
self.get_block_header(&header.prev_hash)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||||
{
|
{
|
||||||
let mut header_cache = self.header_cache.write();
|
let mut cache = self.header_cache.write();
|
||||||
|
|
||||||
// cache hit - return the value from the cache
|
// cache hit - return the value from the cache
|
||||||
if let Some(header) = header_cache.get_mut(h) {
|
if let Some(header) = cache.get_mut(h) {
|
||||||
return Ok(header.clone());
|
return Ok(header.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -141,8 +145,8 @@ impl ChainStore {
|
||||||
// cache miss - so adding to the cache for next time
|
// cache miss - so adding to the cache for next time
|
||||||
if let Ok(header) = header {
|
if let Ok(header) = header {
|
||||||
{
|
{
|
||||||
let mut header_cache = self.header_cache.write();
|
let mut cache = self.header_cache.write();
|
||||||
header_cache.insert(*h, header.clone());
|
cache.insert(*h, header.clone());
|
||||||
}
|
}
|
||||||
Ok(header)
|
Ok(header)
|
||||||
} else {
|
} else {
|
||||||
|
@ -283,11 +287,16 @@ impl<'a> Batch<'a> {
|
||||||
self.db.exists(&to_key(BLOCK_PREFIX, &mut h.to_vec()))
|
self.db.exists(&to_key(BLOCK_PREFIX, &mut h.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save the block and its header, caching the header.
|
/// Save the block and the associated input bitmap.
|
||||||
|
/// Note: the block header is not saved to the db here, assumes this has already been done.
|
||||||
pub fn save_block(&self, b: &Block) -> Result<(), Error> {
|
pub fn save_block(&self, b: &Block) -> Result<(), Error> {
|
||||||
self.save_block_header(&b.header)?;
|
// Build the "input bitmap" for this new block and cache it locally.
|
||||||
|
self.build_and_cache_block_input_bitmap(&b)?;
|
||||||
|
|
||||||
|
// Save the block itself to the db.
|
||||||
self.db
|
self.db
|
||||||
.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)?;
|
.put_ser(&to_key(BLOCK_PREFIX, &mut b.hash().to_vec())[..], b)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,13 +319,16 @@ impl<'a> Batch<'a> {
|
||||||
pub fn save_block_header(&self, header: &BlockHeader) -> Result<(), Error> {
|
pub fn save_block_header(&self, header: &BlockHeader) -> Result<(), Error> {
|
||||||
let hash = header.hash();
|
let hash = header.hash();
|
||||||
|
|
||||||
|
// Cache the header.
|
||||||
{
|
{
|
||||||
let mut header_cache = self.header_cache.write();
|
let mut header_cache = self.header_cache.write();
|
||||||
header_cache.insert(hash, header.clone());
|
header_cache.insert(hash, header.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Store the header itself indexed by hash.
|
||||||
self.db
|
self.db
|
||||||
.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut hash.to_vec())[..], header)?;
|
.put_ser(&to_key(BLOCK_HEADER_PREFIX, &mut hash.to_vec())[..], header)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,12 +361,16 @@ impl<'a> Batch<'a> {
|
||||||
.delete(&to_key(COMMIT_POS_PREFIX, &mut commit.to_vec()))
|
.delete(&to_key(COMMIT_POS_PREFIX, &mut commit.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_previous_header(&self, header: &BlockHeader) -> Result<BlockHeader, Error> {
|
||||||
|
self.get_block_header(&header.prev_hash)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
|
||||||
{
|
{
|
||||||
let mut header_cache = self.header_cache.write();
|
let mut cache = self.header_cache.write();
|
||||||
|
|
||||||
// cache hit - return the value from the cache
|
// cache hit - return the value from the cache
|
||||||
if let Some(header) = header_cache.get_mut(h) {
|
if let Some(header) = cache.get_mut(h) {
|
||||||
return Ok(header.clone());
|
return Ok(header.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,8 +384,8 @@ impl<'a> Batch<'a> {
|
||||||
// cache miss - so adding to the cache for next time
|
// cache miss - so adding to the cache for next time
|
||||||
if let Ok(header) = header {
|
if let Ok(header) = header {
|
||||||
{
|
{
|
||||||
let mut header_cache = self.header_cache.write();
|
let mut cache = self.header_cache.write();
|
||||||
header_cache.insert(*h, header.clone());
|
cache.insert(*h, header.clone());
|
||||||
}
|
}
|
||||||
Ok(header)
|
Ok(header)
|
||||||
} else {
|
} else {
|
||||||
|
@ -479,7 +495,7 @@ impl<'a> Batch<'a> {
|
||||||
self.save_header_height(&header)?;
|
self.save_header_height(&header)?;
|
||||||
|
|
||||||
if header.height > 0 {
|
if header.height > 0 {
|
||||||
let mut prev_header = self.get_block_header(&header.previous)?;
|
let mut prev_header = self.get_previous_header(&header)?;
|
||||||
while prev_header.height > 0 {
|
while prev_header.height > 0 {
|
||||||
if !force {
|
if !force {
|
||||||
if let Ok(_) = self.is_on_current_chain(&prev_header) {
|
if let Ok(_) = self.is_on_current_chain(&prev_header) {
|
||||||
|
@ -488,7 +504,7 @@ impl<'a> Batch<'a> {
|
||||||
}
|
}
|
||||||
self.save_header_height(&prev_header)?;
|
self.save_header_height(&prev_header)?;
|
||||||
|
|
||||||
prev_header = self.get_block_header(&prev_header.previous)?;
|
prev_header = self.get_previous_header(&prev_header)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -504,7 +520,7 @@ impl<'a> Batch<'a> {
|
||||||
Ok(bitmap)
|
Ok(bitmap)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_and_cache_block_input_bitmap(&self, block: &Block) -> Result<Bitmap, Error> {
|
fn build_and_cache_block_input_bitmap(&self, block: &Block) -> Result<Bitmap, Error> {
|
||||||
// Build the bitmap.
|
// Build the bitmap.
|
||||||
let bitmap = self.build_block_input_bitmap(block)?;
|
let bitmap = self.build_block_input_bitmap(block)?;
|
||||||
|
|
||||||
|
@ -637,10 +653,10 @@ impl<'a> Iterator for DifficultyIter<'a> {
|
||||||
// Otherwise we are done.
|
// Otherwise we are done.
|
||||||
if let Some(header) = self.header.clone() {
|
if let Some(header) = self.header.clone() {
|
||||||
if let Some(ref batch) = self.batch {
|
if let Some(ref batch) = self.batch {
|
||||||
self.prev_header = batch.get_block_header(&header.previous).ok();
|
self.prev_header = batch.get_previous_header(&header).ok();
|
||||||
} else {
|
} else {
|
||||||
if let Some(ref store) = self.store {
|
if let Some(ref store) = self.store {
|
||||||
self.prev_header = store.get_block_header(&header.previous).ok();
|
self.prev_header = store.get_previous_header(&header).ok();
|
||||||
} else {
|
} else {
|
||||||
self.prev_header = None;
|
self.prev_header = None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -544,9 +544,9 @@ where
|
||||||
let res: Result<T, Error>;
|
let res: Result<T, Error>;
|
||||||
let rollback: bool;
|
let rollback: bool;
|
||||||
|
|
||||||
// We want to use the current head of the header chain unless
|
// We want to use the current head of the most work chain unless
|
||||||
// we explicitly rewind the extension.
|
// we explicitly rewind the extension.
|
||||||
let head = batch.header_head()?;
|
let head = batch.head()?;
|
||||||
let header = batch.get_block_header(&head.last_block_h)?;
|
let header = batch.get_block_header(&head.last_block_h)?;
|
||||||
|
|
||||||
// create a child transaction so if the state is rolled back by itself, all
|
// create a child transaction so if the state is rolled back by itself, all
|
||||||
|
@ -620,13 +620,18 @@ impl<'a> HeaderExtension<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Force the rollback of this extension, no matter the result.
|
||||||
|
pub fn force_rollback(&mut self) {
|
||||||
|
self.rollback = true;
|
||||||
|
}
|
||||||
|
|
||||||
/// Apply a new header to the header MMR extension.
|
/// Apply a new header to the header MMR extension.
|
||||||
/// This may be either the header MMR or the sync MMR depending on the
|
/// This may be either the header MMR or the sync MMR depending on the
|
||||||
/// extension.
|
/// extension.
|
||||||
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
|
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<Hash, Error> {
|
||||||
self.pmmr.push(&header).map_err(&ErrorKind::TxHashSetErr)?;
|
self.pmmr.push(header).map_err(&ErrorKind::TxHashSetErr)?;
|
||||||
self.header = header.clone();
|
self.header = header.clone();
|
||||||
Ok(())
|
Ok(self.root())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Rewind the header extension to the specified header.
|
/// Rewind the header extension to the specified header.
|
||||||
|
@ -676,7 +681,7 @@ impl<'a> HeaderExtension<'a> {
|
||||||
let mut current = self.batch.get_block_header(&head.last_block_h)?;
|
let mut current = self.batch.get_block_header(&head.last_block_h)?;
|
||||||
while current.height > 0 {
|
while current.height > 0 {
|
||||||
header_hashes.push(current.hash());
|
header_hashes.push(current.hash());
|
||||||
current = self.batch.get_block_header(¤t.previous)?;
|
current = self.batch.get_previous_header(¤t)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
header_hashes.reverse();
|
header_hashes.reverse();
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
//! Base types that the block chain pipeline requires.
|
//! Base types that the block chain pipeline requires.
|
||||||
|
|
||||||
use core::core::hash::{Hash, Hashed};
|
use core::core::hash::{Hash, Hashed, ZERO_HASH};
|
||||||
use core::core::{Block, BlockHeader};
|
use core::core::{Block, BlockHeader};
|
||||||
use core::pow::Difficulty;
|
use core::pow::Difficulty;
|
||||||
use core::ser;
|
use core::ser;
|
||||||
|
@ -57,30 +57,32 @@ pub struct Tip {
|
||||||
pub height: u64,
|
pub height: u64,
|
||||||
/// Last block pushed to the fork
|
/// Last block pushed to the fork
|
||||||
pub last_block_h: Hash,
|
pub last_block_h: Hash,
|
||||||
/// Block previous to last
|
/// Previous block
|
||||||
pub prev_block_h: Hash,
|
pub prev_block_h: Hash,
|
||||||
/// Total difficulty accumulated on that fork
|
/// Total difficulty accumulated on that fork
|
||||||
pub total_difficulty: Difficulty,
|
pub total_difficulty: Difficulty,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tip {
|
impl Tip {
|
||||||
/// Creates a new tip at height zero and the provided genesis hash.
|
/// TODO - why do we have Tip when we could just use a block header?
|
||||||
pub fn new(gbh: Hash) -> Tip {
|
/// Creates a new tip based on header.
|
||||||
|
pub fn from_header(header: &BlockHeader) -> Tip {
|
||||||
Tip {
|
Tip {
|
||||||
height: 0,
|
height: header.height,
|
||||||
last_block_h: gbh,
|
last_block_h: header.hash(),
|
||||||
prev_block_h: gbh,
|
prev_block_h: header.prev_hash,
|
||||||
total_difficulty: Difficulty::min(),
|
total_difficulty: header.total_difficulty(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Append a new block to this tip, returning a new updated tip.
|
impl Default for Tip {
|
||||||
pub fn from_block(bh: &BlockHeader) -> Tip {
|
fn default() -> Self {
|
||||||
Tip {
|
Tip {
|
||||||
height: bh.height,
|
height: 0,
|
||||||
last_block_h: bh.hash(),
|
last_block_h: ZERO_HASH,
|
||||||
prev_block_h: bh.previous,
|
prev_block_h: ZERO_HASH,
|
||||||
total_difficulty: bh.total_difficulty(),
|
total_difficulty: Difficulty::min(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ fn data_files() {
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut b.header,
|
&mut b.header,
|
||||||
|
@ -101,7 +101,6 @@ fn data_files() {
|
||||||
global::min_edge_bits(),
|
global::min_edge_bits(),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
let _bhash = b.hash();
|
|
||||||
chain
|
chain
|
||||||
.process_block(b.clone(), chain::Options::MINE)
|
.process_block(b.clone(), chain::Options::MINE)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -118,7 +117,7 @@ fn data_files() {
|
||||||
|
|
||||||
fn _prepare_block(kc: &ExtKeychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
|
fn _prepare_block(kc: &ExtKeychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
|
||||||
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
|
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,13 +129,13 @@ fn _prepare_block_tx(
|
||||||
txs: Vec<&Transaction>,
|
txs: Vec<&Transaction>,
|
||||||
) -> Block {
|
) -> Block {
|
||||||
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
|
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _prepare_fork_block(kc: &ExtKeychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
|
fn _prepare_fork_block(kc: &ExtKeychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
|
||||||
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
|
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
|
||||||
chain.set_txhashset_roots(&mut b, true).unwrap();
|
chain.set_txhashset_roots_forked(&mut b, prev).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +147,7 @@ fn _prepare_fork_block_tx(
|
||||||
txs: Vec<&Transaction>,
|
txs: Vec<&Transaction>,
|
||||||
) -> Block {
|
) -> Block {
|
||||||
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
|
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
|
||||||
chain.set_txhashset_roots(&mut b, true).unwrap();
|
chain.set_txhashset_roots_forked(&mut b, prev).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ fn mine_empty_chain() {
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
|
|
||||||
let edge_bits = if n == 2 {
|
let edge_bits = if n == 2 {
|
||||||
global::min_edge_bits() + 1
|
global::min_edge_bits() + 1
|
||||||
|
@ -201,45 +201,35 @@ fn longer_fork() {
|
||||||
// then send back on the 1st
|
// then send back on the 1st
|
||||||
let genesis = pow::mine_genesis_block().unwrap();
|
let genesis = pow::mine_genesis_block().unwrap();
|
||||||
let chain = setup(".grin4", genesis.clone());
|
let chain = setup(".grin4", genesis.clone());
|
||||||
let chain_fork = setup(".grin5", genesis);
|
|
||||||
|
|
||||||
// add blocks to both chains, 20 on the main one, only the first 5
|
// add blocks to both chains, 20 on the main one, only the first 5
|
||||||
// for the forked chain
|
// for the forked chain
|
||||||
let mut prev = chain.head_header().unwrap();
|
let mut prev = chain.head_header().unwrap();
|
||||||
for n in 0..10 {
|
for n in 0..10 {
|
||||||
let b = prepare_block(&kc, &prev, &chain, 2 * n + 2);
|
let b = prepare_block(&kc, &prev, &chain, 2 * n + 2);
|
||||||
let bh = b.header.clone();
|
prev = b.header.clone();
|
||||||
|
|
||||||
if n < 5 {
|
|
||||||
chain_fork
|
|
||||||
.process_block(b.clone(), chain::Options::SKIP_POW)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
chain.process_block(b, chain::Options::SKIP_POW).unwrap();
|
chain.process_block(b, chain::Options::SKIP_POW).unwrap();
|
||||||
prev = bh;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// check both chains are in the expected state
|
let forked_block = chain.get_header_by_height(5).unwrap();
|
||||||
|
|
||||||
let head = chain.head_header().unwrap();
|
let head = chain.head_header().unwrap();
|
||||||
assert_eq!(head.height, 10);
|
assert_eq!(head.height, 10);
|
||||||
assert_eq!(head.hash(), prev.hash());
|
assert_eq!(head.hash(), prev.hash());
|
||||||
let head_fork = chain_fork.head_header().unwrap();
|
|
||||||
assert_eq!(head_fork.height, 5);
|
|
||||||
|
|
||||||
let mut prev_fork = head_fork.clone();
|
let mut prev = forked_block;
|
||||||
for n in 0..7 {
|
for n in 0..7 {
|
||||||
let b_fork = prepare_block(&kc, &prev_fork, &chain_fork, 2 * n + 11);
|
let b = prepare_fork_block(&kc, &prev, &chain, 2 * n + 11);
|
||||||
let bh_fork = b_fork.header.clone();
|
prev = b.header.clone();
|
||||||
|
|
||||||
let b = b_fork.clone();
|
|
||||||
chain.process_block(b, chain::Options::SKIP_POW).unwrap();
|
chain.process_block(b, chain::Options::SKIP_POW).unwrap();
|
||||||
|
|
||||||
chain_fork
|
|
||||||
.process_block(b_fork, chain::Options::SKIP_POW)
|
|
||||||
.unwrap();
|
|
||||||
prev_fork = bh_fork;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let new_head = prev;
|
||||||
|
|
||||||
|
// After all this the chain should have switched to the fork.
|
||||||
|
let head = chain.head_header().unwrap();
|
||||||
|
assert_eq!(head.height, 12);
|
||||||
|
assert_eq!(head.hash(), new_head.hash());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -398,7 +388,7 @@ fn output_header_mappings() {
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
|
|
||||||
let edge_bits = if n == 2 {
|
let edge_bits = if n == 2 {
|
||||||
global::min_edge_bits() + 1
|
global::min_edge_bits() + 1
|
||||||
|
@ -432,12 +422,13 @@ fn output_header_mappings() {
|
||||||
assert_eq!(header_for_output.height, n as u64);
|
assert_eq!(header_for_output.height, n as u64);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_block<K>(kc: &K, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block
|
fn prepare_block<K>(kc: &K, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block
|
||||||
where
|
where
|
||||||
K: Keychain,
|
K: Keychain,
|
||||||
{
|
{
|
||||||
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
|
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -452,7 +443,7 @@ where
|
||||||
K: Keychain,
|
K: Keychain,
|
||||||
{
|
{
|
||||||
let mut b = prepare_block_nosum(kc, prev, diff, txs);
|
let mut b = prepare_block_nosum(kc, prev, diff, txs);
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -461,7 +452,7 @@ where
|
||||||
K: Keychain,
|
K: Keychain,
|
||||||
{
|
{
|
||||||
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
|
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
|
||||||
chain.set_txhashset_roots(&mut b, true).unwrap();
|
chain.set_txhashset_roots_forked(&mut b, prev).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -476,7 +467,7 @@ where
|
||||||
K: Keychain,
|
K: Keychain,
|
||||||
{
|
{
|
||||||
let mut b = prepare_block_nosum(kc, prev, diff, txs);
|
let mut b = prepare_block_nosum(kc, prev, diff, txs);
|
||||||
chain.set_txhashset_roots(&mut b, true).unwrap();
|
chain.set_txhashset_roots_forked(&mut b, prev).unwrap();
|
||||||
b
|
b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ extern crate rand;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chain::Tip;
|
use chain::{Error, Tip};
|
||||||
use core::core::hash::Hashed;
|
use core::core::hash::Hashed;
|
||||||
use core::core::{Block, BlockHeader};
|
use core::core::{Block, BlockHeader};
|
||||||
use core::global::{self, ChainTypes};
|
use core::global::{self, ChainTypes};
|
||||||
|
@ -35,6 +35,18 @@ fn clean_output_dir(dir_name: &str) {
|
||||||
let _ = fs::remove_dir_all(dir_name);
|
let _ = fs::remove_dir_all(dir_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup_chain(genesis: &Block, chain_store: Arc<chain::store::ChainStore>) -> Result<(), Error> {
|
||||||
|
let batch = chain_store.batch()?;
|
||||||
|
batch.save_block_header(&genesis.header)?;
|
||||||
|
batch.save_block(&genesis)?;
|
||||||
|
let head = Tip::from_header(&genesis.header);
|
||||||
|
batch.save_head(&head)?;
|
||||||
|
batch.setup_height(&genesis.header, &head)?;
|
||||||
|
batch.save_block_header(&genesis.header)?;
|
||||||
|
batch.commit()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_various_store_indices() {
|
fn test_various_store_indices() {
|
||||||
match env_logger::try_init() {
|
match env_logger::try_init() {
|
||||||
|
@ -48,29 +60,24 @@ fn test_various_store_indices() {
|
||||||
let key_id = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
|
let key_id = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
|
||||||
let db_env = Arc::new(store::new_env(chain_dir.to_string()));
|
let db_env = Arc::new(store::new_env(chain_dir.to_string()));
|
||||||
|
|
||||||
let chain_store = chain::store::ChainStore::new(db_env).unwrap();
|
let chain_store = Arc::new(chain::store::ChainStore::new(db_env).unwrap());
|
||||||
|
|
||||||
global::set_mining_mode(ChainTypes::AutomatedTesting);
|
global::set_mining_mode(ChainTypes::AutomatedTesting);
|
||||||
let genesis = pow::mine_genesis_block().unwrap();
|
let genesis = pow::mine_genesis_block().unwrap();
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap();
|
|
||||||
|
|
||||||
|
setup_chain(&genesis, chain_store.clone()).unwrap();
|
||||||
|
|
||||||
|
let reward = libtx::reward::output(&keychain, &key_id, 0, 1).unwrap();
|
||||||
let block = Block::new(&genesis.header, vec![], Difficulty::min(), reward).unwrap();
|
let block = Block::new(&genesis.header, vec![], Difficulty::min(), reward).unwrap();
|
||||||
let block_hash = block.hash();
|
let block_hash = block.hash();
|
||||||
|
|
||||||
{
|
{
|
||||||
let batch = chain_store.batch().unwrap();
|
let batch = chain_store.batch().unwrap();
|
||||||
batch.save_block(&genesis).unwrap();
|
batch.save_block_header(&block.header).unwrap();
|
||||||
batch
|
|
||||||
.setup_height(&genesis.header, &Tip::new(genesis.hash()))
|
|
||||||
.unwrap();
|
|
||||||
batch.commit().unwrap();
|
|
||||||
}
|
|
||||||
{
|
|
||||||
let batch = chain_store.batch().unwrap();
|
|
||||||
batch.save_block(&block).unwrap();
|
batch.save_block(&block).unwrap();
|
||||||
batch
|
batch
|
||||||
.setup_height(&block.header, &Tip::from_block(&block.header))
|
.setup_height(&block.header, &Tip::from_header(&block.header))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
batch.commit().unwrap();
|
batch.commit().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +120,12 @@ fn test_store_header_height() {
|
||||||
clean_output_dir(chain_dir);
|
clean_output_dir(chain_dir);
|
||||||
|
|
||||||
let db_env = Arc::new(store::new_env(chain_dir.to_string()));
|
let db_env = Arc::new(store::new_env(chain_dir.to_string()));
|
||||||
let chain_store = chain::store::ChainStore::new(db_env).unwrap();
|
let chain_store = Arc::new(chain::store::ChainStore::new(db_env).unwrap());
|
||||||
|
|
||||||
|
global::set_mining_mode(ChainTypes::AutomatedTesting);
|
||||||
|
let genesis = pow::mine_genesis_block().unwrap();
|
||||||
|
|
||||||
|
setup_chain(&genesis, chain_store.clone()).unwrap();
|
||||||
|
|
||||||
let mut block_header = BlockHeader::default();
|
let mut block_header = BlockHeader::default();
|
||||||
block_header.height = 1;
|
block_header.height = 1;
|
||||||
|
|
|
@ -76,7 +76,7 @@ fn test_coinbase_maturity() {
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
|
@ -123,7 +123,7 @@ fn test_coinbase_maturity() {
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block).unwrap();
|
||||||
|
|
||||||
// Confirm the tx attempting to spend the coinbase output
|
// Confirm the tx attempting to spend the coinbase output
|
||||||
// is not valid at the current block height given the current chain state.
|
// is not valid at the current block height given the current chain state.
|
||||||
|
@ -156,7 +156,7 @@ fn test_coinbase_maturity() {
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
|
@ -183,7 +183,7 @@ fn test_coinbase_maturity() {
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
chain.set_txhashset_roots(&mut block, false).unwrap();
|
chain.set_txhashset_roots(&mut block).unwrap();
|
||||||
|
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut block.header,
|
&mut block.header,
|
||||||
|
|
|
@ -26,10 +26,7 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use chain::store::ChainStore;
|
use chain::store::ChainStore;
|
||||||
use chain::txhashset;
|
use chain::txhashset;
|
||||||
use chain::types::Tip;
|
use core::core::BlockHeader;
|
||||||
use core::core::{Block, BlockHeader};
|
|
||||||
use core::pow::Difficulty;
|
|
||||||
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
|
|
||||||
use util::file;
|
use util::file;
|
||||||
|
|
||||||
fn clean_output_dir(dir_name: &str) {
|
fn clean_output_dir(dir_name: &str) {
|
||||||
|
|
|
@ -118,7 +118,7 @@ pub struct BlockHeader {
|
||||||
/// Height of this block since the genesis block (height 0)
|
/// Height of this block since the genesis block (height 0)
|
||||||
pub height: u64,
|
pub height: u64,
|
||||||
/// Hash of the block previous to this in the chain.
|
/// Hash of the block previous to this in the chain.
|
||||||
pub previous: Hash,
|
pub prev_hash: Hash,
|
||||||
/// Root hash of the header MMR at the previous header.
|
/// Root hash of the header MMR at the previous header.
|
||||||
pub prev_root: Hash,
|
pub prev_root: Hash,
|
||||||
/// Timestamp at which the block was built.
|
/// Timestamp at which the block was built.
|
||||||
|
@ -147,10 +147,10 @@ fn fixed_size_of_serialized_header(_version: u16) -> usize {
|
||||||
size += mem::size_of::<u16>(); // version
|
size += mem::size_of::<u16>(); // version
|
||||||
size += mem::size_of::<u64>(); // height
|
size += mem::size_of::<u64>(); // height
|
||||||
size += mem::size_of::<i64>(); // timestamp
|
size += mem::size_of::<i64>(); // timestamp
|
||||||
// previous, prev_root, output_root, range_proof_root, kernel_root
|
// prev_hash, prev_root, output_root, range_proof_root, kernel_root
|
||||||
size += 5 * mem::size_of::<Hash>();
|
size += 5 * mem::size_of::<Hash>();
|
||||||
size += mem::size_of::<BlindingFactor>(); // total_kernel_offset
|
size += mem::size_of::<BlindingFactor>(); // total_kernel_offset
|
||||||
// output_mmr_size, kernel_mmr_size
|
// output_mmr_size, kernel_mmr_size
|
||||||
size += 2 * mem::size_of::<u64>();
|
size += 2 * mem::size_of::<u64>();
|
||||||
size += mem::size_of::<Difficulty>(); // total_difficulty
|
size += mem::size_of::<Difficulty>(); // total_difficulty
|
||||||
size += mem::size_of::<u32>(); // secondary_scaling
|
size += mem::size_of::<u32>(); // secondary_scaling
|
||||||
|
@ -177,7 +177,7 @@ impl Default for BlockHeader {
|
||||||
version: 1,
|
version: 1,
|
||||||
height: 0,
|
height: 0,
|
||||||
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),
|
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),
|
||||||
previous: ZERO_HASH,
|
prev_hash: ZERO_HASH,
|
||||||
prev_root: ZERO_HASH,
|
prev_root: ZERO_HASH,
|
||||||
output_root: ZERO_HASH,
|
output_root: ZERO_HASH,
|
||||||
range_proof_root: ZERO_HASH,
|
range_proof_root: ZERO_HASH,
|
||||||
|
@ -213,7 +213,7 @@ impl Writeable for BlockHeader {
|
||||||
impl Readable for BlockHeader {
|
impl Readable for BlockHeader {
|
||||||
fn read(reader: &mut Reader) -> Result<BlockHeader, ser::Error> {
|
fn read(reader: &mut Reader) -> Result<BlockHeader, ser::Error> {
|
||||||
let (version, height, timestamp) = ser_multiread!(reader, read_u16, read_u64, read_i64);
|
let (version, height, timestamp) = ser_multiread!(reader, read_u16, read_u64, read_i64);
|
||||||
let previous = Hash::read(reader)?;
|
let prev_hash = Hash::read(reader)?;
|
||||||
let prev_root = Hash::read(reader)?;
|
let prev_root = Hash::read(reader)?;
|
||||||
let output_root = Hash::read(reader)?;
|
let output_root = Hash::read(reader)?;
|
||||||
let range_proof_root = Hash::read(reader)?;
|
let range_proof_root = Hash::read(reader)?;
|
||||||
|
@ -232,7 +232,7 @@ impl Readable for BlockHeader {
|
||||||
version,
|
version,
|
||||||
height,
|
height,
|
||||||
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(timestamp, 0), Utc),
|
timestamp: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(timestamp, 0), Utc),
|
||||||
previous,
|
prev_hash,
|
||||||
prev_root,
|
prev_root,
|
||||||
output_root,
|
output_root,
|
||||||
range_proof_root,
|
range_proof_root,
|
||||||
|
@ -253,7 +253,7 @@ impl BlockHeader {
|
||||||
[write_u16, self.version],
|
[write_u16, self.version],
|
||||||
[write_u64, self.height],
|
[write_u64, self.height],
|
||||||
[write_i64, self.timestamp.timestamp()],
|
[write_i64, self.timestamp.timestamp()],
|
||||||
[write_fixed_bytes, &self.previous],
|
[write_fixed_bytes, &self.prev_hash],
|
||||||
[write_fixed_bytes, &self.prev_root],
|
[write_fixed_bytes, &self.prev_root],
|
||||||
[write_fixed_bytes, &self.output_root],
|
[write_fixed_bytes, &self.output_root],
|
||||||
[write_fixed_bytes, &self.range_proof_root],
|
[write_fixed_bytes, &self.range_proof_root],
|
||||||
|
@ -494,7 +494,7 @@ impl Block {
|
||||||
header: BlockHeader {
|
header: BlockHeader {
|
||||||
height: prev.height + 1,
|
height: prev.height + 1,
|
||||||
timestamp,
|
timestamp,
|
||||||
previous: prev.hash(),
|
prev_hash: prev.hash(),
|
||||||
total_kernel_offset,
|
total_kernel_offset,
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
total_difficulty: difficulty + prev.pow.total_difficulty,
|
total_difficulty: difficulty + prev.pow.total_difficulty,
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
use std::marker;
|
use std::marker;
|
||||||
|
|
||||||
use core::hash::Hash;
|
use core::hash::{Hash, ZERO_HASH};
|
||||||
use core::pmmr::{bintree_postorder_height, is_leaf, peak_map_height, peaks, HashOnlyBackend};
|
use core::pmmr::{bintree_postorder_height, is_leaf, peak_map_height, peaks, HashOnlyBackend};
|
||||||
use ser::{PMMRIndexHashable, PMMRable};
|
use ser::{PMMRIndexHashable, PMMRable};
|
||||||
|
|
||||||
|
@ -58,16 +58,17 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the unpruned size of the MMR.
|
|
||||||
pub fn unpruned_size(&self) -> u64 {
|
|
||||||
self.last_pos
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Is the MMR empty?
|
/// Is the MMR empty?
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.last_pos == 0
|
self.last_pos == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Total size of the tree, including intermediary nodes and ignoring any
|
||||||
|
/// pruning.
|
||||||
|
pub fn unpruned_size(&self) -> u64 {
|
||||||
|
self.last_pos
|
||||||
|
}
|
||||||
|
|
||||||
/// Rewind the MMR to the specified position.
|
/// Rewind the MMR to the specified position.
|
||||||
pub fn rewind(&mut self, position: u64) -> Result<(), String> {
|
pub fn rewind(&mut self, position: u64) -> Result<(), String> {
|
||||||
// Identify which actual position we should rewind to as the provided
|
// Identify which actual position we should rewind to as the provided
|
||||||
|
@ -140,6 +141,9 @@ where
|
||||||
|
|
||||||
/// Return the overall root hash for this MMR.
|
/// Return the overall root hash for this MMR.
|
||||||
pub fn root(&self) -> Hash {
|
pub fn root(&self) -> Hash {
|
||||||
|
if self.is_empty() {
|
||||||
|
return ZERO_HASH;
|
||||||
|
}
|
||||||
let mut res = None;
|
let mut res = None;
|
||||||
for peak in self.peaks().iter().rev() {
|
for peak in self.peaks().iter().rev() {
|
||||||
res = match res {
|
res = match res {
|
||||||
|
|
|
@ -353,9 +353,9 @@ where
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if this PMMR is (unpruned_size == 0).
|
/// Is the MMR empty?
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.unpruned_size() == 0
|
self.last_pos == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Total size of the tree, including intermediary nodes and ignoring any
|
/// Total size of the tree, including intermediary nodes and ignoring any
|
||||||
|
|
|
@ -27,7 +27,7 @@ use pow::{Difficulty, Proof, ProofOfWork};
|
||||||
pub fn genesis_dev() -> core::Block {
|
pub fn genesis_dev() -> core::Block {
|
||||||
core::Block::with_header(core::BlockHeader {
|
core::Block::with_header(core::BlockHeader {
|
||||||
height: 0,
|
height: 0,
|
||||||
previous: core::hash::Hash([0xff; 32]),
|
// previous: core::hash::Hash([0xff; 32]),
|
||||||
timestamp: Utc.ymd(1997, 8, 4).and_hms(0, 0, 0),
|
timestamp: Utc.ymd(1997, 8, 4).and_hms(0, 0, 0),
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
nonce: global::get_genesis_nonce(),
|
nonce: global::get_genesis_nonce(),
|
||||||
|
@ -63,7 +63,7 @@ pub fn genesis_testnet1() -> core::Block {
|
||||||
pub fn genesis_testnet2() -> core::Block {
|
pub fn genesis_testnet2() -> core::Block {
|
||||||
core::Block::with_header(core::BlockHeader {
|
core::Block::with_header(core::BlockHeader {
|
||||||
height: 0,
|
height: 0,
|
||||||
previous: core::hash::Hash([0xff; 32]),
|
// previous: core::hash::Hash([0xff; 32]),
|
||||||
timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0),
|
timestamp: Utc.ymd(2018, 3, 26).and_hms(16, 0, 0),
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||||
|
@ -86,7 +86,7 @@ pub fn genesis_testnet2() -> core::Block {
|
||||||
pub fn genesis_testnet3() -> core::Block {
|
pub fn genesis_testnet3() -> core::Block {
|
||||||
core::Block::with_header(core::BlockHeader {
|
core::Block::with_header(core::BlockHeader {
|
||||||
height: 0,
|
height: 0,
|
||||||
previous: core::hash::Hash([0xff; 32]),
|
// previous: core::hash::Hash([0xff; 32]),
|
||||||
timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0),
|
timestamp: Utc.ymd(2018, 7, 8).and_hms(18, 0, 0),
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||||
|
@ -110,7 +110,7 @@ pub fn genesis_testnet3() -> core::Block {
|
||||||
pub fn genesis_testnet4() -> core::Block {
|
pub fn genesis_testnet4() -> core::Block {
|
||||||
core::Block::with_header(core::BlockHeader {
|
core::Block::with_header(core::BlockHeader {
|
||||||
height: 0,
|
height: 0,
|
||||||
previous: core::hash::Hash([0xff; 32]),
|
// previous: core::hash::Hash([0xff; 32]),
|
||||||
timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0),
|
timestamp: Utc.ymd(2018, 10, 17).and_hms(20, 0, 0),
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||||
|
@ -133,7 +133,7 @@ pub fn genesis_testnet4() -> core::Block {
|
||||||
pub fn genesis_main() -> core::Block {
|
pub fn genesis_main() -> core::Block {
|
||||||
core::Block::with_header(core::BlockHeader {
|
core::Block::with_header(core::BlockHeader {
|
||||||
height: 0,
|
height: 0,
|
||||||
previous: core::hash::Hash([0xff; 32]),
|
// previous: core::hash::Hash([0xff; 32]),
|
||||||
timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0),
|
timestamp: Utc.ymd(2018, 8, 14).and_hms(0, 0, 0),
|
||||||
pow: ProofOfWork {
|
pow: ProofOfWork {
|
||||||
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
total_difficulty: Difficulty::from_num(global::initial_block_difficulty()),
|
||||||
|
|
|
@ -28,6 +28,7 @@ pub mod common;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use util::RwLock;
|
use util::RwLock;
|
||||||
|
|
||||||
|
use core::core::hash::Hashed;
|
||||||
use core::core::verifier_cache::LruVerifierCache;
|
use core::core::verifier_cache::LruVerifierCache;
|
||||||
use core::core::{Block, BlockHeader, Transaction};
|
use core::core::{Block, BlockHeader, Transaction};
|
||||||
use core::pow::Difficulty;
|
use core::pow::Difficulty;
|
||||||
|
@ -55,20 +56,25 @@ fn test_transaction_pool_block_building() {
|
||||||
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
|
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
|
||||||
let fee = txs.iter().map(|x| x.fee()).sum();
|
let fee = txs.iter().map(|x| x.fee()).sum();
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap();
|
||||||
let block = Block::new(&prev_header, txs, Difficulty::min(), reward).unwrap();
|
let mut block = Block::new(&prev_header, txs, Difficulty::min(), reward).unwrap();
|
||||||
|
|
||||||
|
// Set the prev_root to the prev hash for testing purposes (no MMR to obtain a root from).
|
||||||
|
block.header.prev_root = prev_header.hash();
|
||||||
|
|
||||||
chain.update_db_for_block(&block);
|
chain.update_db_for_block(&block);
|
||||||
block.header
|
block
|
||||||
};
|
};
|
||||||
|
|
||||||
let header = add_block(BlockHeader::default(), vec![], &mut chain);
|
let block = add_block(BlockHeader::default(), vec![], &mut chain);
|
||||||
|
let header = block.header;
|
||||||
|
|
||||||
// Now create tx to spend that first coinbase (now matured).
|
// Now create tx to spend that first coinbase (now matured).
|
||||||
// Provides us with some useful outputs to test with.
|
// Provides us with some useful outputs to test with.
|
||||||
let initial_tx = test_transaction_spending_coinbase(&keychain, &header, vec![10, 20, 30, 40]);
|
let initial_tx = test_transaction_spending_coinbase(&keychain, &header, vec![10, 20, 30, 40]);
|
||||||
|
|
||||||
// Mine that initial tx so we can spend it with multiple txs
|
// Mine that initial tx so we can spend it with multiple txs
|
||||||
let header = add_block(header, vec![initial_tx], &mut chain);
|
let block = add_block(header, vec![initial_tx], &mut chain);
|
||||||
|
let header = block.header;
|
||||||
|
|
||||||
// Initialize a new pool with our chain adapter.
|
// Initialize a new pool with our chain adapter.
|
||||||
let pool = RwLock::new(test_setup(Arc::new(chain.clone()), verifier_cache));
|
let pool = RwLock::new(test_setup(Arc::new(chain.clone()), verifier_cache));
|
||||||
|
@ -112,14 +118,7 @@ fn test_transaction_pool_block_building() {
|
||||||
// children should have been aggregated into parents
|
// children should have been aggregated into parents
|
||||||
assert_eq!(txs.len(), 3);
|
assert_eq!(txs.len(), 3);
|
||||||
|
|
||||||
let block = {
|
let block = add_block(header, txs, &mut chain);
|
||||||
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
|
||||||
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
|
|
||||||
Block::new(&header, txs, Difficulty::min(), reward)
|
|
||||||
}.unwrap();
|
|
||||||
|
|
||||||
chain.update_db_for_block(&block);
|
|
||||||
|
|
||||||
// Now reconcile the transaction pool with the new block
|
// Now reconcile the transaction pool with the new block
|
||||||
// and check the resulting contents of the pool are what we expect.
|
// and check the resulting contents of the pool are what we expect.
|
||||||
|
|
|
@ -31,6 +31,7 @@ use util::RwLock;
|
||||||
use core::core::{Block, BlockHeader};
|
use core::core::{Block, BlockHeader};
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
|
use core::core::hash::Hashed;
|
||||||
use core::core::verifier_cache::LruVerifierCache;
|
use core::core::verifier_cache::LruVerifierCache;
|
||||||
use core::pow::Difficulty;
|
use core::pow::Difficulty;
|
||||||
use keychain::{ExtKeychain, Keychain};
|
use keychain::{ExtKeychain, Keychain};
|
||||||
|
@ -53,7 +54,11 @@ fn test_transaction_pool_block_reconciliation() {
|
||||||
let height = 1;
|
let height = 1;
|
||||||
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
|
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
|
||||||
let block = Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap();
|
let genesis = BlockHeader::default();
|
||||||
|
let mut block = Block::new(&genesis, vec![], Difficulty::min(), reward).unwrap();
|
||||||
|
|
||||||
|
// Set the prev_root to the prev hash for testing purposes (no MMR to obtain a root from).
|
||||||
|
block.header.prev_root = genesis.hash();
|
||||||
|
|
||||||
chain.update_db_for_block(&block);
|
chain.update_db_for_block(&block);
|
||||||
|
|
||||||
|
@ -68,7 +73,10 @@ fn test_transaction_pool_block_reconciliation() {
|
||||||
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
||||||
let fees = initial_tx.fee();
|
let fees = initial_tx.fee();
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
|
||||||
let block = Block::new(&header, vec![initial_tx], Difficulty::min(), reward).unwrap();
|
let mut block = Block::new(&header, vec![initial_tx], Difficulty::min(), reward).unwrap();
|
||||||
|
|
||||||
|
// Set the prev_root to the prev hash for testing purposes (no MMR to obtain a root from).
|
||||||
|
block.header.prev_root = header.hash();
|
||||||
|
|
||||||
chain.update_db_for_block(&block);
|
chain.update_db_for_block(&block);
|
||||||
|
|
||||||
|
@ -158,7 +166,10 @@ fn test_transaction_pool_block_reconciliation() {
|
||||||
let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
||||||
let fees = block_txs.iter().map(|tx| tx.fee()).sum();
|
let fees = block_txs.iter().map(|tx| tx.fee()).sum();
|
||||||
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
|
||||||
let block = Block::new(&header, block_txs, Difficulty::min(), reward).unwrap();
|
let mut block = Block::new(&header, block_txs, Difficulty::min(), reward).unwrap();
|
||||||
|
|
||||||
|
// Set the prev_root to the prev hash for testing purposes (no MMR to obtain a root from).
|
||||||
|
block.header.prev_root = header.hash();
|
||||||
|
|
||||||
chain.update_db_for_block(&block);
|
chain.update_db_for_block(&block);
|
||||||
block
|
block
|
||||||
|
|
|
@ -66,13 +66,14 @@ impl ChainAdapter {
|
||||||
|
|
||||||
pub fn update_db_for_block(&self, block: &Block) {
|
pub fn update_db_for_block(&self, block: &Block) {
|
||||||
let header = &block.header;
|
let header = &block.header;
|
||||||
|
let tip = Tip::from_header(header);
|
||||||
let batch = self.store.batch().unwrap();
|
let batch = self.store.batch().unwrap();
|
||||||
let tip = Tip::from_block(&header);
|
|
||||||
batch.save_block_header(&header).unwrap();
|
batch.save_block_header(header).unwrap();
|
||||||
batch.save_head(&tip).unwrap();
|
batch.save_head(&tip).unwrap();
|
||||||
|
|
||||||
// Retrieve previous block_sums from the db.
|
// Retrieve previous block_sums from the db.
|
||||||
let prev_sums = if let Ok(prev_sums) = batch.get_block_sums(&header.previous) {
|
let prev_sums = if let Ok(prev_sums) = batch.get_block_sums(&tip.prev_block_h) {
|
||||||
prev_sums
|
prev_sums
|
||||||
} else {
|
} else {
|
||||||
BlockSums::default()
|
BlockSums::default()
|
||||||
|
|
|
@ -159,7 +159,7 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(prev) = self.chain().get_block_header(&cb.header.previous) {
|
if let Ok(prev) = self.chain().get_previous_header(&cb.header) {
|
||||||
if block
|
if block
|
||||||
.validate(&prev.total_kernel_offset, self.verifier_cache.clone())
|
.validate(&prev.total_kernel_offset, self.verifier_cache.clone())
|
||||||
.is_ok()
|
.is_ok()
|
||||||
|
@ -441,8 +441,9 @@ impl NetToChainAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let prev_hash = b.header.previous;
|
|
||||||
let bhash = b.hash();
|
let bhash = b.hash();
|
||||||
|
let previous = self.chain().get_previous_header(&b.header);
|
||||||
|
|
||||||
match self.chain().process_block(b, self.chain_opts()) {
|
match self.chain().process_block(b, self.chain_opts()) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
self.validate_chain(bhash);
|
self.validate_chain(bhash);
|
||||||
|
@ -465,10 +466,14 @@ impl NetToChainAdapter {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
match e.kind() {
|
match e.kind() {
|
||||||
chain::ErrorKind::Orphan => {
|
chain::ErrorKind::Orphan => {
|
||||||
// make sure we did not miss the parent block
|
if let Ok(previous) = previous {
|
||||||
if !self.chain().is_orphan(&prev_hash) && !self.sync_state.is_syncing() {
|
// make sure we did not miss the parent block
|
||||||
debug!("adapter: process_block: received an orphan block, checking the parent: {:}", prev_hash);
|
if !self.chain().is_orphan(&previous.hash())
|
||||||
self.request_block_by_hash(prev_hash, &addr)
|
&& !self.sync_state.is_syncing()
|
||||||
|
{
|
||||||
|
debug!("adapter: process_block: received an orphan block, checking the parent: {:}", previous.hash());
|
||||||
|
self.request_block_by_hash(previous.hash(), &addr)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ impl BodySync {
|
||||||
|
|
||||||
hashes.push(header.hash());
|
hashes.push(header.hash());
|
||||||
oldest_height = header.height;
|
oldest_height = header.height;
|
||||||
current = self.chain.get_block_header(&header.previous);
|
current = self.chain.get_previous_header(&header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//+ remove me after #1880 root cause found
|
//+ remove me after #1880 root cause found
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl HeaderSync {
|
||||||
// Reset sync_head to the same as current header_head.
|
// Reset sync_head to the same as current header_head.
|
||||||
self.chain.reset_sync_head(&header_head).unwrap();
|
self.chain.reset_sync_head(&header_head).unwrap();
|
||||||
|
|
||||||
// Rebuild the sync MMR to match our updates sync_head.
|
// Rebuild the sync MMR to match our updated sync_head.
|
||||||
self.chain.rebuild_sync_mmr(&header_head).unwrap();
|
self.chain.rebuild_sync_mmr(&header_head).unwrap();
|
||||||
|
|
||||||
self.history_locator.clear();
|
self.history_locator.clear();
|
||||||
|
@ -173,7 +173,7 @@ impl HeaderSync {
|
||||||
locator.push((header.height, header.hash()));
|
locator.push((header.height, header.hash()));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
header_cursor = self.chain.get_block_header(&header.previous);
|
header_cursor = self.chain.get_previous_header(&header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -260,9 +260,7 @@ mod test {
|
||||||
|
|
||||||
// just 1 locator in history
|
// just 1 locator in history
|
||||||
let heights: Vec<u64> = vec![64, 62, 58, 50, 34, 2, 0];
|
let heights: Vec<u64> = vec![64, 62, 58, 50, 34, 2, 0];
|
||||||
let history_locator: Vec<(u64, Hash)> = vec![
|
let history_locator: Vec<(u64, Hash)> = vec![(0, zh.clone())];
|
||||||
(0, zh.clone()),
|
|
||||||
];
|
|
||||||
let mut locator: Vec<(u64, Hash)> = vec![];
|
let mut locator: Vec<(u64, Hash)> = vec![];
|
||||||
for h in heights {
|
for h in heights {
|
||||||
if let Some(l) = close_enough(&history_locator, h) {
|
if let Some(l) = close_enough(&history_locator, h) {
|
||||||
|
@ -288,7 +286,7 @@ mod test {
|
||||||
|
|
||||||
// more realistic test with 11 history
|
// more realistic test with 11 history
|
||||||
let heights: Vec<u64> = vec![
|
let heights: Vec<u64> = vec![
|
||||||
2554, 2552, 2548, 2540, 2524, 2492, 2428, 2300, 2044, 1532, 508, 0
|
2554, 2552, 2548, 2540, 2524, 2492, 2428, 2300, 2044, 1532, 508, 0,
|
||||||
];
|
];
|
||||||
let history_locator: Vec<(u64, Hash)> = vec![
|
let history_locator: Vec<(u64, Hash)> = vec![
|
||||||
(2043, zh.clone()),
|
(2043, zh.clone()),
|
||||||
|
@ -310,15 +308,14 @@ mod test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
locator.dedup_by(|a, b| a.0 == b.0);
|
locator.dedup_by(|a, b| a.0 == b.0);
|
||||||
assert_eq!(locator, vec![
|
assert_eq!(
|
||||||
(2043, zh.clone()),
|
locator,
|
||||||
(1532, zh.clone()),
|
vec![(2043, zh.clone()), (1532, zh.clone()), (0, zh.clone()),]
|
||||||
(0, zh.clone()),
|
);
|
||||||
]);
|
|
||||||
|
|
||||||
// more realistic test with 12 history
|
// more realistic test with 12 history
|
||||||
let heights: Vec<u64> = vec![
|
let heights: Vec<u64> = vec![
|
||||||
4598, 4596, 4592, 4584, 4568, 4536, 4472, 4344, 4088, 3576, 2552, 504, 0
|
4598, 4596, 4592, 4584, 4568, 4536, 4472, 4344, 4088, 3576, 2552, 504, 0,
|
||||||
];
|
];
|
||||||
let history_locator: Vec<(u64, Hash)> = vec![
|
let history_locator: Vec<(u64, Hash)> = vec![
|
||||||
(4087, zh.clone()),
|
(4087, zh.clone()),
|
||||||
|
@ -341,11 +338,14 @@ mod test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
locator.dedup_by(|a, b| a.0 == b.0);
|
locator.dedup_by(|a, b| a.0 == b.0);
|
||||||
assert_eq!(locator, vec![
|
assert_eq!(
|
||||||
(4087, zh.clone()),
|
locator,
|
||||||
(3576, zh.clone()),
|
vec![
|
||||||
(3065, zh.clone()),
|
(4087, zh.clone()),
|
||||||
(0, zh.clone()),
|
(3576, zh.clone()),
|
||||||
]);
|
(3065, zh.clone()),
|
||||||
|
(0, zh.clone()),
|
||||||
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -151,10 +151,7 @@ impl StateSync {
|
||||||
.get_block_header(&header_head.prev_block_h)
|
.get_block_header(&header_head.prev_block_h)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for _ in 0..(horizon - horizon / 10) {
|
for _ in 0..(horizon - horizon / 10) {
|
||||||
txhashset_head = self
|
txhashset_head = self.chain.get_previous_header(&txhashset_head).unwrap();
|
||||||
.chain
|
|
||||||
.get_block_header(&txhashset_head.previous)
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
let bhash = txhashset_head.hash();
|
let bhash = txhashset_head.hash();
|
||||||
debug!(
|
debug!(
|
||||||
|
|
|
@ -139,7 +139,7 @@ fn build_block(
|
||||||
);
|
);
|
||||||
|
|
||||||
// Now set txhashset roots and sizes on the header of the block being built.
|
// Now set txhashset roots and sizes on the header of the block being built.
|
||||||
let roots_result = chain.set_txhashset_roots(&mut b, false);
|
let roots_result = chain.set_txhashset_roots(&mut b);
|
||||||
|
|
||||||
match roots_result {
|
match roots_result {
|
||||||
Ok(_) => Ok((b, block_fees)),
|
Ok(_) => Ok((b, block_fees)),
|
||||||
|
|
|
@ -157,9 +157,10 @@ impl Miner {
|
||||||
// we found a solution, push our block through the chain processing pipeline
|
// we found a solution, push our block through the chain processing pipeline
|
||||||
if sol {
|
if sol {
|
||||||
info!(
|
info!(
|
||||||
"(Server ID: {}) Found valid proof of work, adding block {}.",
|
"(Server ID: {}) Found valid proof of work, adding block {} (prev_root {}).",
|
||||||
self.debug_output_id,
|
self.debug_output_id,
|
||||||
b.hash()
|
b.hash(),
|
||||||
|
b.header.prev_root,
|
||||||
);
|
);
|
||||||
let res = self.chain.process_block(b, chain::Options::MINE);
|
let res = self.chain.process_block(b, chain::Options::MINE);
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
|
|
|
@ -99,7 +99,7 @@ pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbDa
|
||||||
).unwrap();
|
).unwrap();
|
||||||
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
b.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
chain.set_txhashset_roots(&mut b, false).unwrap();
|
chain.set_txhashset_roots(&mut b).unwrap();
|
||||||
pow::pow_size(
|
pow::pow_size(
|
||||||
&mut b.header,
|
&mut b.header,
|
||||||
next_header_info.difficulty,
|
next_header_info.difficulty,
|
||||||
|
|
Loading…
Reference in a new issue