mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
use sandbox folder for txhashset validation on state sync (#2685)
* use sandbox folder for txhashset validation on state sync * rustfmt * use temp directory as the sandbox instead actual db_root txhashset dir * rustfmt * move txhashset overwrite to the end of full validation * fix travis-ci test * rustfmt * fix: hashset have 2 folders including txhashset and header * rustfmt * (1)switch to rebuild_header_mmr instead of copy the sandbox header mmr (2)lock txhashset when overwriting and opening and rebuild * minor improve on sandbox_dir
This commit is contained in:
parent
ba6f12c70b
commit
37b3a72c2f
4 changed files with 96 additions and 26 deletions
|
@ -36,7 +36,9 @@ use crate::util::secp::pedersen::{Commitment, RangeProof};
|
|||
use crate::util::{Mutex, RwLock, StopState};
|
||||
use grin_store::Error::NotFoundErr;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
@ -848,6 +850,11 @@ impl Chain {
|
|||
}
|
||||
}
|
||||
|
||||
/// Clean the temporary sandbox folder
|
||||
pub fn clean_txhashset_sandbox(&self) {
|
||||
txhashset::clean_txhashset_folder(&env::temp_dir());
|
||||
}
|
||||
|
||||
/// Writes a reading view on a txhashset state that's been provided to us.
|
||||
/// If we're willing to accept that new state, the data stream will be
|
||||
/// read as a zip file, unzipped and the resulting state files should be
|
||||
|
@ -869,17 +876,19 @@ impl Chain {
|
|||
|
||||
let header = self.get_block_header(&h)?;
|
||||
|
||||
{
|
||||
let mut txhashset_ref = self.txhashset.write();
|
||||
// Drop file handles in underlying txhashset
|
||||
txhashset_ref.release_backend_files();
|
||||
}
|
||||
// Write txhashset to sandbox (in the os temporary directory)
|
||||
let sandbox_dir = env::temp_dir();
|
||||
txhashset::clean_txhashset_folder(&sandbox_dir);
|
||||
txhashset::zip_write(sandbox_dir.clone(), txhashset_data.try_clone()?, &header)?;
|
||||
|
||||
// Rewrite hashset
|
||||
txhashset::zip_write(self.db_root.clone(), txhashset_data, &header)?;
|
||||
|
||||
let mut txhashset =
|
||||
txhashset::TxHashSet::open(self.db_root.clone(), self.store.clone(), Some(&header))?;
|
||||
let mut txhashset = txhashset::TxHashSet::open(
|
||||
sandbox_dir
|
||||
.to_str()
|
||||
.expect("invalid sandbox folder")
|
||||
.to_owned(),
|
||||
self.store.clone(),
|
||||
Some(&header),
|
||||
)?;
|
||||
|
||||
// The txhashset.zip contains the output, rangeproof and kernel MMRs.
|
||||
// We must rebuild the header MMR ourselves based on the headers in our db.
|
||||
|
@ -931,9 +940,27 @@ impl Chain {
|
|||
|
||||
debug!("txhashset_write: finished committing the batch (head etc.)");
|
||||
|
||||
// Replace the chain txhashset with the newly built one.
|
||||
// Sandbox full validation ok, go to overwrite txhashset on db root
|
||||
{
|
||||
let mut txhashset_ref = self.txhashset.write();
|
||||
|
||||
// Before overwriting, drop file handlers in underlying txhashset
|
||||
txhashset_ref.release_backend_files();
|
||||
|
||||
// Move sandbox to overwrite
|
||||
txhashset.release_backend_files();
|
||||
txhashset::txhashset_replace(sandbox_dir, PathBuf::from(self.db_root.clone()))?;
|
||||
|
||||
// Re-open on db root dir
|
||||
txhashset = txhashset::TxHashSet::open(
|
||||
self.db_root.clone(),
|
||||
self.store.clone(),
|
||||
Some(&header),
|
||||
)?;
|
||||
|
||||
self.rebuild_header_mmr(&Tip::from_header(&header), &mut txhashset)?;
|
||||
|
||||
// Replace the chain txhashset with the newly built one.
|
||||
*txhashset_ref = txhashset;
|
||||
}
|
||||
|
||||
|
|
|
@ -1450,17 +1450,50 @@ pub fn zip_read(root_dir: String, header: &BlockHeader, rand: Option<u32>) -> Re
|
|||
/// Extract the txhashset data from a zip file and writes the content into the
|
||||
/// txhashset storage dir
|
||||
pub fn zip_write(
|
||||
root_dir: String,
|
||||
root_dir: PathBuf,
|
||||
txhashset_data: File,
|
||||
header: &BlockHeader,
|
||||
) -> Result<(), Error> {
|
||||
let txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR);
|
||||
debug!("zip_write on path: {:?}", root_dir);
|
||||
let txhashset_path = root_dir.clone().join(TXHASHSET_SUBDIR);
|
||||
fs::create_dir_all(txhashset_path.clone())?;
|
||||
zip::decompress(txhashset_data, &txhashset_path, expected_file)
|
||||
.map_err(|ze| ErrorKind::Other(ze.to_string()))?;
|
||||
check_and_remove_files(&txhashset_path, header)
|
||||
}
|
||||
|
||||
/// Overwrite txhashset folders in "to" folder with "from" folder
|
||||
pub fn txhashset_replace(from: PathBuf, to: PathBuf) -> Result<(), Error> {
|
||||
debug!("txhashset_replace: move from {:?} to {:?}", from, to);
|
||||
|
||||
// clean the 'to' folder firstly
|
||||
clean_txhashset_folder(&to);
|
||||
|
||||
// rename the 'from' folder as the 'to' folder
|
||||
if let Err(e) = fs::rename(
|
||||
from.clone().join(TXHASHSET_SUBDIR),
|
||||
to.clone().join(TXHASHSET_SUBDIR),
|
||||
) {
|
||||
error!("hashset_replace fail on {}. err: {}", TXHASHSET_SUBDIR, e);
|
||||
Err(ErrorKind::TxHashSetErr(format!("txhashset replacing fail")).into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean the txhashset folder
|
||||
pub fn clean_txhashset_folder(root_dir: &PathBuf) {
|
||||
let txhashset_path = root_dir.clone().join(TXHASHSET_SUBDIR);
|
||||
if txhashset_path.exists() {
|
||||
if let Err(e) = fs::remove_dir_all(txhashset_path.clone()) {
|
||||
warn!(
|
||||
"clean_txhashset_folder: fail on {:?}. err: {}",
|
||||
txhashset_path, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expected_file(path: &Path) -> bool {
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
|
|
|
@ -49,7 +49,12 @@ fn test_unexpected_zip() {
|
|||
assert!(txhashset::zip_read(db_root.clone(), &BlockHeader::default(), Some(rand)).is_ok());
|
||||
let zip_path = Path::new(&db_root).join(format!("txhashset_snapshot_{}.zip", rand));
|
||||
let zip_file = File::open(&zip_path).unwrap();
|
||||
assert!(txhashset::zip_write(db_root.clone(), zip_file, &BlockHeader::default()).is_ok());
|
||||
assert!(txhashset::zip_write(
|
||||
PathBuf::from(db_root.clone()),
|
||||
zip_file,
|
||||
&BlockHeader::default()
|
||||
)
|
||||
.is_ok());
|
||||
// Remove temp txhashset dir
|
||||
fs::remove_dir_all(Path::new(&db_root).join(format!("txhashset_zip_{}", rand))).unwrap();
|
||||
// Then add strange files in the original txhashset folder
|
||||
|
@ -64,7 +69,12 @@ fn test_unexpected_zip() {
|
|||
fs::remove_dir_all(Path::new(&db_root).join(format!("txhashset_zip_{}", rand))).unwrap();
|
||||
|
||||
let zip_file = File::open(zip_path).unwrap();
|
||||
assert!(txhashset::zip_write(db_root.clone(), zip_file, &BlockHeader::default()).is_ok());
|
||||
assert!(txhashset::zip_write(
|
||||
PathBuf::from(db_root.clone()),
|
||||
zip_file,
|
||||
&BlockHeader::default()
|
||||
)
|
||||
.is_ok());
|
||||
// Check that the txhashset dir dos not contains the strange files
|
||||
let txhashset_path = Path::new(&db_root).join("txhashset");
|
||||
assert!(txhashset_contains_expected_files(
|
||||
|
|
|
@ -330,17 +330,16 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
|||
update_time: old_update_time,
|
||||
downloaded_size: old_downloaded_size,
|
||||
..
|
||||
} => {
|
||||
self.sync_state
|
||||
.update_txhashset_download(SyncStatus::TxHashsetDownload {
|
||||
start_time,
|
||||
prev_update_time: old_update_time,
|
||||
update_time: Utc::now(),
|
||||
prev_downloaded_size: old_downloaded_size,
|
||||
downloaded_size,
|
||||
total_size,
|
||||
})
|
||||
}
|
||||
} => self
|
||||
.sync_state
|
||||
.update_txhashset_download(SyncStatus::TxHashsetDownload {
|
||||
start_time,
|
||||
prev_update_time: old_update_time,
|
||||
update_time: Utc::now(),
|
||||
prev_downloaded_size: old_downloaded_size,
|
||||
downloaded_size,
|
||||
total_size,
|
||||
}),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -360,6 +359,7 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
|||
.chain()
|
||||
.txhashset_write(h, txhashset_data, self.sync_state.as_ref())
|
||||
{
|
||||
self.chain().clean_txhashset_sandbox();
|
||||
error!("Failed to save txhashset archive: {}", e);
|
||||
let is_good_data = !e.is_bad_data();
|
||||
self.sync_state.set_sync_error(types::Error::Chain(e));
|
||||
|
|
Loading…
Reference in a new issue