mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
[2.x.x] Generate txhashset archives on 720 block intervals. (#2951)
Generate txhashset archives on 720 block intervals.
This commit is contained in:
parent
121e6c8c46
commit
4966dc04f7
13 changed files with 229 additions and 13 deletions
|
@ -684,6 +684,27 @@ impl Chain {
|
|||
))
|
||||
}
|
||||
|
||||
/// To support the ability to download the txhashset from multiple peers in parallel,
|
||||
/// the peers must all agree on the exact binary representation of the txhashset.
|
||||
/// This means compacting and rewinding to the exact same header.
|
||||
/// Since compaction is a heavy operation, peers can agree to compact every 12 hours,
|
||||
/// and no longer support requesting arbitrary txhashsets.
|
||||
/// Here we return the header of the txhashset we are currently offering to peers.
|
||||
pub fn txhashset_archive_header(&self) -> Result<BlockHeader, Error> {
|
||||
let sync_threshold = global::state_sync_threshold() as u64;
|
||||
let body_head = self.head()?;
|
||||
let archive_interval = global::txhashset_archive_interval();
|
||||
let mut txhashset_height = body_head.height.saturating_sub(sync_threshold);
|
||||
txhashset_height = txhashset_height.saturating_sub(txhashset_height % archive_interval);
|
||||
|
||||
debug!(
|
||||
"txhashset_archive_header: body_head - {}, {}, txhashset height - {}",
|
||||
body_head.last_block_h, body_head.height, txhashset_height,
|
||||
);
|
||||
|
||||
self.get_header_by_height(txhashset_height)
|
||||
}
|
||||
|
||||
// Special handling to make sure the whole kernel set matches each of its
|
||||
// roots in each block header, without truncation. We go back header by
|
||||
// header, rewind and check each root. This fixes a potential weakness in
|
||||
|
|
|
@ -1431,10 +1431,10 @@ pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
|
|||
} else {
|
||||
// clean up old zips.
|
||||
// Theoretically, we only need clean-up those zip files older than STATE_SYNC_THRESHOLD.
|
||||
// But practically, these zip files are not small ones, we just keep the zips in last one hour
|
||||
// But practically, these zip files are not small ones, we just keep the zips in last 24 hours
|
||||
let data_dir = Path::new(&root_dir);
|
||||
let pattern = format!("{}_", TXHASHSET_ZIP);
|
||||
if let Ok(n) = clean_files_by_prefix(data_dir.clone(), &pattern, 60 * 60) {
|
||||
if let Ok(n) = clean_files_by_prefix(data_dir.clone(), &pattern, 24 * 60 * 60) {
|
||||
debug!(
|
||||
"{} zip files have been clean up in folder: {:?}",
|
||||
n, data_dir
|
||||
|
|
127
chain/tests/chain_test_helper.rs
Normal file
127
chain/tests/chain_test_helper.rs
Normal file
|
@ -0,0 +1,127 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use self::chain::types::NoopAdapter;
|
||||
use self::chain::types::Options;
|
||||
use self::chain::Chain;
|
||||
use self::core::core::verifier_cache::LruVerifierCache;
|
||||
use self::core::core::Block;
|
||||
use self::core::genesis;
|
||||
use self::core::global::ChainTypes;
|
||||
use self::core::libtx::{self, reward};
|
||||
use self::core::pow::Difficulty;
|
||||
use self::core::{consensus, global, pow};
|
||||
use self::keychain::{ExtKeychainPath, Keychain};
|
||||
use self::util::RwLock;
|
||||
use chrono::Duration;
|
||||
use grin_chain as chain;
|
||||
use grin_core as core;
|
||||
use grin_keychain as keychain;
|
||||
use grin_util as util;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn clean_output_dir(dir_name: &str) {
|
||||
let _ = fs::remove_dir_all(dir_name);
|
||||
}
|
||||
|
||||
pub fn setup(dir_name: &str, genesis: Block) -> Chain {
|
||||
util::init_test_logger();
|
||||
clean_output_dir(dir_name);
|
||||
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
|
||||
Chain::init(
|
||||
dir_name.to_string(),
|
||||
Arc::new(NoopAdapter {}),
|
||||
genesis,
|
||||
pow::verify_size,
|
||||
verifier_cache,
|
||||
false,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Mine a chain of specified length to assist with automated tests.
|
||||
/// Must call clean_output_dir at the end of your test.
|
||||
pub fn mine_chain(dir_name: &str, chain_length: u64) -> Chain {
|
||||
global::set_mining_mode(ChainTypes::AutomatedTesting);
|
||||
|
||||
// add coinbase data from the dev genesis block
|
||||
let mut genesis = genesis::genesis_dev();
|
||||
let keychain = keychain::ExtKeychain::from_random_seed(false).unwrap();
|
||||
let key_id = keychain::ExtKeychain::derive_key_id(0, 1, 0, 0, 0);
|
||||
let reward = reward::output(
|
||||
&keychain,
|
||||
&libtx::ProofBuilder::new(&keychain),
|
||||
&key_id,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
genesis = genesis.with_reward(reward.0, reward.1);
|
||||
|
||||
let mut chain = setup(dir_name, pow::mine_genesis_block().unwrap());
|
||||
chain.set_txhashset_roots(&mut genesis).unwrap();
|
||||
genesis.header.output_mmr_size = 1;
|
||||
genesis.header.kernel_mmr_size = 1;
|
||||
|
||||
// get a valid PoW
|
||||
pow::pow_size(
|
||||
&mut genesis.header,
|
||||
Difficulty::unit(),
|
||||
global::proofsize(),
|
||||
global::min_edge_bits(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
mine_some_on_top(&mut chain, chain_length, &keychain);
|
||||
chain
|
||||
}
|
||||
|
||||
fn mine_some_on_top<K>(chain: &mut Chain, chain_length: u64, keychain: &K)
|
||||
where
|
||||
K: Keychain,
|
||||
{
|
||||
for n in 1..chain_length {
|
||||
let prev = chain.head_header().unwrap();
|
||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||
let reward =
|
||||
libtx::reward::output(keychain, &libtx::ProofBuilder::new(keychain), &pk, 0, false)
|
||||
.unwrap();
|
||||
let mut b =
|
||||
core::core::Block::new(&prev, vec![], next_header_info.clone().difficulty, reward)
|
||||
.unwrap();
|
||||
b.header.timestamp = prev.timestamp + Duration::seconds(160);
|
||||
b.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||
|
||||
chain.set_txhashset_roots(&mut b).unwrap();
|
||||
|
||||
let edge_bits = if n == 2 {
|
||||
global::min_edge_bits() + 1
|
||||
} else {
|
||||
global::min_edge_bits()
|
||||
};
|
||||
b.header.pow.proof.edge_bits = edge_bits;
|
||||
pow::pow_size(
|
||||
&mut b.header,
|
||||
next_header_info.difficulty,
|
||||
global::proofsize(),
|
||||
edge_bits,
|
||||
)
|
||||
.unwrap();
|
||||
b.header.pow.proof.edge_bits = edge_bits;
|
||||
|
||||
chain.process_block(b, Options::MINE).unwrap();
|
||||
}
|
||||
}
|
25
chain/tests/test_txhashset_archive.rs
Normal file
25
chain/tests/test_txhashset_archive.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
mod chain_test_helper;
|
||||
|
||||
use self::chain_test_helper::{clean_output_dir, mine_chain};
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let chain = mine_chain(".txhashset_archive_test", 35);
|
||||
let header = chain.txhashset_archive_header().unwrap();
|
||||
assert_eq!(10, header.height);
|
||||
clean_output_dir(".txhashset_archive_test");
|
||||
}
|
|
@ -61,7 +61,10 @@ pub const AUTOMATED_TESTING_COINBASE_MATURITY: u64 = 3;
|
|||
pub const USER_TESTING_COINBASE_MATURITY: u64 = 3;
|
||||
|
||||
/// Testing cut through horizon in blocks
|
||||
pub const TESTING_CUT_THROUGH_HORIZON: u32 = 70;
|
||||
pub const AUTOMATED_TESTING_CUT_THROUGH_HORIZON: u32 = 20;
|
||||
|
||||
/// Testing cut through horizon in blocks
|
||||
pub const USER_TESTING_CUT_THROUGH_HORIZON: u32 = 70;
|
||||
|
||||
/// Testing state sync threshold in blocks
|
||||
pub const TESTING_STATE_SYNC_THRESHOLD: u32 = 20;
|
||||
|
@ -92,6 +95,12 @@ pub const PEER_EXPIRATION_REMOVE_TIME: i64 = PEER_EXPIRATION_DAYS * 24 * 3600;
|
|||
/// For a node configured as "archival_mode = true" only the txhashset will be compacted.
|
||||
pub const COMPACTION_CHECK: u64 = DAY_HEIGHT;
|
||||
|
||||
/// Automated testing number of blocks to reuse a txhashset zip for.
|
||||
pub const AUTOMATED_TESTING_TXHASHSET_ARCHIVE_INTERVAL: u64 = 10;
|
||||
|
||||
/// Number of blocks to reuse a txhashset zip for.
|
||||
pub const TXHASHSET_ARCHIVE_INTERVAL: u64 = 12 * 60;
|
||||
|
||||
/// Types of chain a server can run with, dictates the genesis block and
|
||||
/// and mining parameters used.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
|
@ -261,8 +270,8 @@ pub fn max_block_weight() -> usize {
|
|||
pub fn cut_through_horizon() -> u32 {
|
||||
let param_ref = CHAIN_TYPE.read();
|
||||
match *param_ref {
|
||||
ChainTypes::AutomatedTesting => TESTING_CUT_THROUGH_HORIZON,
|
||||
ChainTypes::UserTesting => TESTING_CUT_THROUGH_HORIZON,
|
||||
ChainTypes::AutomatedTesting => AUTOMATED_TESTING_CUT_THROUGH_HORIZON,
|
||||
ChainTypes::UserTesting => USER_TESTING_CUT_THROUGH_HORIZON,
|
||||
_ => CUT_THROUGH_HORIZON,
|
||||
}
|
||||
}
|
||||
|
@ -277,6 +286,15 @@ pub fn state_sync_threshold() -> u32 {
|
|||
}
|
||||
}
|
||||
|
||||
/// Number of blocks to reuse a txhashset zip for.
|
||||
pub fn txhashset_archive_interval() -> u64 {
|
||||
let param_ref = CHAIN_TYPE.read();
|
||||
match *param_ref {
|
||||
ChainTypes::AutomatedTesting => AUTOMATED_TESTING_TXHASHSET_ARCHIVE_INTERVAL,
|
||||
_ => TXHASHSET_ARCHIVE_INTERVAL,
|
||||
}
|
||||
}
|
||||
|
||||
/// Are we in automated testing mode?
|
||||
pub fn is_automated_testing_mode() -> bool {
|
||||
let param_ref = CHAIN_TYPE.read();
|
||||
|
|
|
@ -73,7 +73,6 @@ pub fn verify_size(bh: &BlockHeader) -> Result<(), Error> {
|
|||
pub fn mine_genesis_block() -> Result<Block, Error> {
|
||||
let mut gen = genesis::genesis_dev();
|
||||
if global::is_user_testing_mode() || global::is_automated_testing_mode() {
|
||||
gen = genesis::genesis_dev();
|
||||
gen.header.timestamp = Utc::now();
|
||||
}
|
||||
|
||||
|
|
|
@ -565,6 +565,10 @@ impl ChainAdapter for TrackingAdapter {
|
|||
self.adapter.txhashset_read(h)
|
||||
}
|
||||
|
||||
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
|
||||
self.adapter.txhashset_archive_header()
|
||||
}
|
||||
|
||||
fn txhashset_receive_ready(&self) -> bool {
|
||||
self.adapter.txhashset_receive_ready()
|
||||
}
|
||||
|
|
|
@ -674,6 +674,10 @@ impl ChainAdapter for Peers {
|
|||
self.adapter.txhashset_read(h)
|
||||
}
|
||||
|
||||
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
|
||||
self.adapter.txhashset_archive_header()
|
||||
}
|
||||
|
||||
fn txhashset_receive_ready(&self) -> bool {
|
||||
self.adapter.txhashset_receive_ready()
|
||||
}
|
||||
|
|
|
@ -12,8 +12,10 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
|
||||
use crate::conn::{Message, MessageHandler, Response, Tracker};
|
||||
use crate::core::core::{self, hash::Hash, CompactBlock};
|
||||
use crate::core::core::{self, hash::Hash, hash::Hashed, CompactBlock};
|
||||
|
||||
use crate::msg::{
|
||||
BanReason, GetPeerAddrs, Headers, KernelDataResponse, Locator, PeerAddrs, Ping, Pong,
|
||||
TxHashSetArchive, TxHashSetRequest, Type,
|
||||
|
@ -320,7 +322,9 @@ impl MessageHandler for Protocol {
|
|||
sm_req.hash, sm_req.height
|
||||
);
|
||||
|
||||
let txhashset = self.adapter.txhashset_read(sm_req.hash);
|
||||
let txhashset_header = self.adapter.txhashset_archive_header()?;
|
||||
let txhashset_header_hash = txhashset_header.hash();
|
||||
let txhashset = self.adapter.txhashset_read(txhashset_header_hash);
|
||||
|
||||
if let Some(txhashset) = txhashset {
|
||||
let file_sz = txhashset.reader.metadata()?.len();
|
||||
|
@ -328,8 +332,8 @@ impl MessageHandler for Protocol {
|
|||
Type::TxHashSetArchive,
|
||||
self.peer_info.version,
|
||||
&TxHashSetArchive {
|
||||
height: sm_req.height as u64,
|
||||
hash: sm_req.hash,
|
||||
height: txhashset_header.height as u64,
|
||||
hash: txhashset_header_hash,
|
||||
bytes: file_sz,
|
||||
},
|
||||
writer,
|
||||
|
|
|
@ -302,6 +302,10 @@ impl ChainAdapter for DummyAdapter {
|
|||
unimplemented!()
|
||||
}
|
||||
|
||||
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn txhashset_receive_ready(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
|
|
@ -535,6 +535,9 @@ pub trait ChainAdapter: Sync + Send {
|
|||
/// at the provided block hash.
|
||||
fn txhashset_read(&self, h: Hash) -> Option<TxHashSetRead>;
|
||||
|
||||
/// Header of the txhashset archive currently being served to peers.
|
||||
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error>;
|
||||
|
||||
/// Whether the node is ready to accept a new txhashset. If this isn't the
|
||||
/// case, the archive is provided without being requested and likely an
|
||||
/// attack attempt. This should be checked *before* downloading the whole
|
||||
|
|
|
@ -368,6 +368,10 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
|||
}
|
||||
}
|
||||
|
||||
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
|
||||
self.chain().txhashset_archive_header()
|
||||
}
|
||||
|
||||
fn txhashset_receive_ready(&self) -> bool {
|
||||
match self.sync_state.status() {
|
||||
SyncStatus::TxHashsetDownload { .. } => true,
|
||||
|
|
|
@ -160,6 +160,9 @@ impl StateSync {
|
|||
|
||||
fn request_state(&self, header_head: &chain::Tip) -> Result<Arc<Peer>, p2p::Error> {
|
||||
let threshold = global::state_sync_threshold() as u64;
|
||||
let archive_interval = global::txhashset_archive_interval();
|
||||
let mut txhashset_height = header_head.height.saturating_sub(threshold);
|
||||
txhashset_height = txhashset_height.saturating_sub(txhashset_height % archive_interval);
|
||||
|
||||
if let Some(peer) = self.peers.most_work_peer() {
|
||||
// ask for txhashset at state_sync_threshold
|
||||
|
@ -168,18 +171,18 @@ impl StateSync {
|
|||
.get_block_header(&header_head.prev_block_h)
|
||||
.map_err(|e| {
|
||||
error!(
|
||||
"chain error dirung getting a block header {}: {:?}",
|
||||
"chain error during getting a block header {}: {:?}",
|
||||
&header_head.prev_block_h, e
|
||||
);
|
||||
p2p::Error::Internal
|
||||
})?;
|
||||
for _ in 0..threshold {
|
||||
while txhashset_head.height > txhashset_height {
|
||||
txhashset_head = self
|
||||
.chain
|
||||
.get_previous_header(&txhashset_head)
|
||||
.map_err(|e| {
|
||||
error!(
|
||||
"chain error dirung getting a previous block header {}: {:?}",
|
||||
"chain error during getting a previous block header {}: {:?}",
|
||||
txhashset_head.hash(),
|
||||
e
|
||||
);
|
||||
|
|
Loading…
Reference in a new issue