mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 08:51:08 +03:00
Check content before zip/unzip the txhashset (#1174)
* Check txhashset content before zip/unzip * Add header in txhashset verification * Add copy function and test * Add file util * Now check and remove unexpected files instead of just crashing
This commit is contained in:
parent
3ee01055ed
commit
3df050cc93
11 changed files with 297 additions and 24 deletions
|
@ -529,7 +529,7 @@ impl Chain {
|
|||
}
|
||||
|
||||
// prepares the zip and return the corresponding Read
|
||||
let txhashset_reader = txhashset::zip_read(self.db_root.clone())?;
|
||||
let txhashset_reader = txhashset::zip_read(self.db_root.clone(), &header)?;
|
||||
Ok((
|
||||
header.output_mmr_size,
|
||||
header.kernel_mmr_size,
|
||||
|
@ -558,7 +558,7 @@ impl Chain {
|
|||
}
|
||||
|
||||
let header = self.store.get_block_header(&h)?;
|
||||
txhashset::zip_write(self.db_root.clone(), txhashset_data)?;
|
||||
txhashset::zip_write(self.db_root.clone(), txhashset_data, &header)?;
|
||||
|
||||
let mut txhashset =
|
||||
txhashset::TxHashSet::open(self.db_root.clone(), self.store.clone(), Some(&header))?;
|
||||
|
|
|
@ -15,9 +15,8 @@
|
|||
//! Utility structs to handle the 3 hashtrees (output, range proof,
|
||||
//! kernel) more conveniently and transactionally.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::{self, File};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
@ -38,11 +37,11 @@ use core::ser::{PMMRIndexHashable, PMMRable};
|
|||
|
||||
use error::{Error, ErrorKind};
|
||||
use grin_store;
|
||||
use grin_store::pmmr::PMMRBackend;
|
||||
use grin_store::pmmr::{PMMRBackend, PMMR_FILES};
|
||||
use grin_store::types::prune_noop;
|
||||
use store::{Batch, ChainStore};
|
||||
use types::{TxHashSetRoots, TxHashsetWriteStatus};
|
||||
use util::{secp_static, zip, LOGGER};
|
||||
use util::{file, secp_static, zip, LOGGER};
|
||||
|
||||
const TXHASHSET_SUBDIR: &'static str = "txhashset";
|
||||
const OUTPUT_SUBDIR: &'static str = "output";
|
||||
|
@ -1055,13 +1054,23 @@ impl<'a> Extension<'a> {
|
|||
|
||||
/// Packages the txhashset data files into a zip and returns a Read to the
|
||||
/// resulting file
|
||||
pub fn zip_read(root_dir: String) -> Result<File, Error> {
|
||||
pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
|
||||
let txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR);
|
||||
let zip_path = Path::new(&root_dir).join(TXHASHSET_ZIP);
|
||||
|
||||
// create the zip archive
|
||||
{
|
||||
zip::compress(&txhashset_path, &File::create(zip_path.clone())?)
|
||||
// Temp txhashset directory
|
||||
let temp_txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR.to_string() + "_zip");
|
||||
// Remove temp dir if it exist
|
||||
if temp_txhashset_path.exists() {
|
||||
fs::remove_dir_all(&temp_txhashset_path)?;
|
||||
}
|
||||
// Copy file to another dir
|
||||
file::copy_dir_to(&txhashset_path,&temp_txhashset_path)?;
|
||||
// Check and remove file that are not supposed to be there
|
||||
check_and_remove_files(&temp_txhashset_path, header)?;
|
||||
// Compress zip
|
||||
zip::compress(&temp_txhashset_path, &File::create(zip_path.clone())?)
|
||||
.map_err(|ze| ErrorKind::Other(ze.to_string()))?;
|
||||
}
|
||||
|
||||
|
@ -1072,12 +1081,82 @@ pub fn zip_read(root_dir: String) -> Result<File, Error> {
|
|||
|
||||
/// Extract the txhashset data from a zip file and writes the content into the
|
||||
/// txhashset storage dir
|
||||
pub fn zip_write(root_dir: String, txhashset_data: File) -> Result<(), Error> {
|
||||
pub fn zip_write(root_dir: String, txhashset_data: File, header: &BlockHeader) -> Result<(), Error> {
|
||||
let txhashset_path = Path::new(&root_dir).join(TXHASHSET_SUBDIR);
|
||||
|
||||
fs::create_dir_all(txhashset_path.clone())?;
|
||||
zip::decompress(txhashset_data, &txhashset_path)
|
||||
.map_err(|ze| ErrorKind::Other(ze.to_string()).into())
|
||||
.map_err(|ze| ErrorKind::Other(ze.to_string()))?;
|
||||
check_and_remove_files(&txhashset_path, header)
|
||||
}
|
||||
|
||||
/// Check a txhashset directory and remove any unexpected
|
||||
fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Result<(), Error> {
|
||||
// First compare the subdirectories
|
||||
let subdirectories_expected: HashSet<_> = [OUTPUT_SUBDIR, KERNEL_SUBDIR, RANGE_PROOF_SUBDIR]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|s| String::from(s))
|
||||
.collect();
|
||||
|
||||
let subdirectories_found: HashSet<_> = fs::read_dir(txhashset_path)?
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|e| {
|
||||
e.path()
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str().map(|s| String::from(s)))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let dir_difference: Vec<String> = subdirectories_found
|
||||
.difference(&subdirectories_expected)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Removing unexpected directories if needed
|
||||
if !dir_difference.is_empty() {
|
||||
debug!(LOGGER, "Unexpected folder(s) found in txhashset folder, removing.");
|
||||
for diff in dir_difference {
|
||||
let diff_path = txhashset_path.join(diff);
|
||||
file::delete(diff_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Then compare the files found in the subdirectories
|
||||
let pmmr_files_expected: HashSet<_> = PMMR_FILES
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|s| if s.contains("pmmr_leaf.bin") {
|
||||
format!("{}.{}", s, header.hash())}
|
||||
else {String::from(s) })
|
||||
.collect();
|
||||
|
||||
let subdirectories = fs::read_dir(txhashset_path)?;
|
||||
for subdirectory in subdirectories {
|
||||
let subdirectory_path = subdirectory?.path();
|
||||
let pmmr_files = fs::read_dir(&subdirectory_path)?;
|
||||
let pmmr_files_found: HashSet<_> = pmmr_files
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|e| {
|
||||
e.path()
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str().map(|s| String::from(s)))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let difference: Vec<String> = pmmr_files_found
|
||||
.difference(&pmmr_files_expected)
|
||||
.cloned()
|
||||
.collect();
|
||||
if !difference.is_empty() {
|
||||
debug!(LOGGER, "Unexpected file(s) found in txhashset subfolder {:?}, removing.", &subdirectory_path);
|
||||
for diff in difference {
|
||||
let diff_path = subdirectory_path.join(diff);
|
||||
file::delete(diff_path)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Given a block header to rewind to and the block header at the
|
||||
|
|
|
@ -17,17 +17,21 @@ extern crate grin_core as core;
|
|||
extern crate grin_keychain as keychain;
|
||||
extern crate grin_store as store;
|
||||
extern crate grin_wallet as wallet;
|
||||
extern crate grin_util as util;
|
||||
|
||||
use std::fs;
|
||||
use std::collections::HashSet;
|
||||
use std::iter::FromIterator;
|
||||
use std::fs::{self, File, OpenOptions};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use chain::store::ChainStore;
|
||||
use chain::txhashset;
|
||||
use chain::types::Tip;
|
||||
use core::core::merkle_proof::MerkleProof;
|
||||
use core::core::target::Difficulty;
|
||||
use core::core::{Block, BlockHeader};
|
||||
use keychain::{ExtKeychain, Keychain};
|
||||
use util::file;
|
||||
use wallet::libtx::{build, reward};
|
||||
|
||||
fn clean_output_dir(dir_name: &str) {
|
||||
|
@ -135,3 +139,68 @@ fn test_some_raw_txs() {
|
|||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unexpected_zip() {
|
||||
let db_root = format!(".grin_txhashset_zip");
|
||||
clean_output_dir(&db_root);
|
||||
let db_env = Arc::new(store::new_env(db_root.clone()));
|
||||
let chain_store = ChainStore::new(db_env).unwrap();
|
||||
let store = Arc::new(chain_store);
|
||||
txhashset::TxHashSet::open(db_root.clone(), store.clone(), None).unwrap();
|
||||
// First check if everything works out of the box
|
||||
assert!(txhashset::zip_read(db_root.clone(), &BlockHeader::default()).is_ok());
|
||||
let zip_path = Path::new(&db_root).join("txhashset_snapshot.zip");
|
||||
let zip_file = File::open(&zip_path).unwrap();
|
||||
assert!(txhashset::zip_write(db_root.clone(), zip_file, &BlockHeader::default()).is_ok());
|
||||
// Remove temp txhashset dir
|
||||
fs::remove_dir_all(Path::new(&db_root).join("txhashset_zip")).unwrap();
|
||||
// Then add strange files in the original txhashset folder
|
||||
write_file(db_root.clone());
|
||||
assert!(txhashset::zip_read(db_root.clone(), &BlockHeader::default()).is_ok());
|
||||
// Check that the temp dir dos not contains the strange files
|
||||
let txhashset_zip_path = Path::new(&db_root).join("txhashset_zip");
|
||||
assert!(txhashset_contains_expected_files("txhashset_zip".to_string(), txhashset_zip_path.clone()));
|
||||
fs::remove_dir_all(Path::new(&db_root).join("txhashset_zip")).unwrap();
|
||||
|
||||
let zip_file = File::open(zip_path).unwrap();
|
||||
assert!(txhashset::zip_write(db_root.clone(), zip_file, &BlockHeader::default()).is_ok());
|
||||
// Check that the txhashset dir dos not contains the strange files
|
||||
let txhashset_path = Path::new(&db_root).join("txhashset");
|
||||
assert!(txhashset_contains_expected_files("txhashset".to_string(), txhashset_path.clone()));
|
||||
fs::remove_dir_all(Path::new(&db_root).join("txhashset")).unwrap();
|
||||
}
|
||||
|
||||
fn write_file (db_root: String) {
|
||||
OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(Path::new(&db_root).join("txhashset").join("kernel").join("strange0")).unwrap();
|
||||
OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(Path::new(&db_root).join("txhashset").join("strange1")).unwrap();
|
||||
fs::create_dir(Path::new(&db_root).join("txhashset").join("strange_dir")).unwrap();
|
||||
OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(Path::new(&db_root).join("txhashset").join("strange_dir").join("strange2")).unwrap();
|
||||
fs::create_dir(Path::new(&db_root).join("txhashset").join("strange_dir").join("strange_subdir")).unwrap();
|
||||
OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(Path::new(&db_root).join("txhashset").join("strange_dir").join("strange_subdir").join("strange3")).unwrap();
|
||||
}
|
||||
|
||||
fn txhashset_contains_expected_files(dirname: String, path_buf: PathBuf) -> bool {
|
||||
let list_zip_files = file::list_files(path_buf.into_os_string().into_string().unwrap());
|
||||
let zip_files_hashset: HashSet<_> = HashSet::from_iter(list_zip_files.iter().cloned());
|
||||
let expected_files = vec![dirname, "output".to_string(), "rangeproof".to_string(), "kernel".to_string(), "pmmr_hash.bin".to_string(), "pmmr_data.bin".to_string()];
|
||||
let expected_files_hashset = HashSet::from_iter(expected_files.iter().cloned());
|
||||
let intersection: HashSet<_> = zip_files_hashset.difference(&expected_files_hashset).collect();
|
||||
if intersection.is_empty() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
|
@ -285,7 +285,7 @@ where
|
|||
Ok(elmt_pos)
|
||||
}
|
||||
|
||||
/// Saves a snaphost of the MMR tagged with the block hash.
|
||||
/// Saves a snapshot of the MMR tagged with the block hash.
|
||||
/// Specifically - snapshots the utxo file as we need this rewound before
|
||||
/// sending the txhashset zip file to another node for fast-sync.
|
||||
pub fn snapshot(&mut self, header: &BlockHeader) -> Result<(), String> {
|
||||
|
|
|
@ -31,6 +31,14 @@ const PMMR_DATA_FILE: &'static str = "pmmr_data.bin";
|
|||
const PMMR_LEAF_FILE: &'static str = "pmmr_leaf.bin";
|
||||
const PMMR_PRUN_FILE: &'static str = "pmmr_prun.bin";
|
||||
|
||||
/// The list of PMMR_Files for internal purposes
|
||||
pub const PMMR_FILES: [&str; 4] = [
|
||||
PMMR_HASH_FILE,
|
||||
PMMR_DATA_FILE,
|
||||
PMMR_LEAF_FILE,
|
||||
PMMR_PRUN_FILE,
|
||||
];
|
||||
|
||||
/// PMMR persistent backend implementation. Relies on multiple facilities to
|
||||
/// handle writing, reading and pruning.
|
||||
///
|
||||
|
|
64
util/src/file.rs
Normal file
64
util/src/file.rs
Normal file
|
@ -0,0 +1,64 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub fn delete(path_buf: PathBuf) -> io::Result<()>{
|
||||
if path_buf.is_dir() {
|
||||
fs::remove_dir_all(path_buf)
|
||||
} else if path_buf.is_file() {
|
||||
fs::remove_file(path_buf)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn copy_dir_to(src: &Path, dst: &Path) -> io::Result<u64> {
|
||||
let mut counter = 0u64;
|
||||
if !dst.is_dir() {
|
||||
fs::create_dir(dst)?
|
||||
}
|
||||
|
||||
for entry_result in src.read_dir()? {
|
||||
let entry = entry_result?;
|
||||
let file_type = entry.file_type()?;
|
||||
let count = copy_to(&entry.path(), &file_type, &dst.join(entry.file_name()))?;
|
||||
counter +=count;
|
||||
}
|
||||
Ok(counter)
|
||||
}
|
||||
|
||||
pub fn list_files(path: String) -> Vec<String> {
|
||||
let mut files_vec: Vec<String> = vec![];
|
||||
for entry in WalkDir::new(Path::new(&path)).into_iter().filter_map(|e| e.ok()) {
|
||||
match entry.file_name().to_str(){
|
||||
Some(path_str) => files_vec.push(path_str.to_string()),
|
||||
None => println!("Could not read optional type"),
|
||||
}
|
||||
}
|
||||
return files_vec;
|
||||
}
|
||||
|
||||
fn copy_to(src: &Path, src_type: &fs::FileType, dst: &Path) -> io::Result<u64> {
|
||||
if src_type.is_file() {
|
||||
fs::copy(src,dst)
|
||||
} else if src_type.is_dir() {
|
||||
copy_dir_to(src, dst)
|
||||
} else {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, format!("Could not copy: {}", src.display())))
|
||||
}
|
||||
}
|
||||
|
|
@ -64,6 +64,8 @@ pub use hex::*;
|
|||
|
||||
/// Compress and decompress zip bz2 archives
|
||||
pub mod zip;
|
||||
/// File util
|
||||
pub mod file;
|
||||
|
||||
/// Encapsulation of a RefCell<Option<T>> for one-time initialization after
|
||||
/// construction. This implementation will purposefully fail hard if not used
|
||||
|
|
|
@ -88,7 +88,7 @@ lazy_static! {
|
|||
};
|
||||
}
|
||||
|
||||
/// Initialises the logger with the given configuration
|
||||
/// Initialize the logger with the given configuration
|
||||
pub fn init_logger(config: Option<LoggingConfig>) {
|
||||
if let Some(c) = config {
|
||||
let mut config_ref = LOGGING_CONFIG.lock().unwrap();
|
||||
|
|
|
@ -97,4 +97,4 @@ where
|
|||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
49
util/tests/file.rs
Normal file
49
util/tests/file.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
// Copyright 2018 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
extern crate grin_util as util;
|
||||
extern crate walkdir;
|
||||
|
||||
use std::fs::{self, File};
|
||||
use std::io::{self, Write};
|
||||
use std::path::Path;
|
||||
use util::file;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[test]
|
||||
fn copy_dir() {
|
||||
let root = Path::new("./target/tmp2");
|
||||
fs::create_dir_all(root.join("./original/sub")).unwrap();
|
||||
fs::create_dir_all(root.join("./original/sub2")).unwrap();
|
||||
write_files("original".to_string(),&root).unwrap();
|
||||
let original_path = Path::new("./target/tmp2/original");
|
||||
let copy_path = Path::new("./target/tmp2/copy");
|
||||
file::copy_dir_to(original_path, copy_path).unwrap();
|
||||
let original_files = file::list_files("./target/tmp2/original".to_string());
|
||||
let copied_files = file::list_files("./target/tmp2/copy".to_string());
|
||||
for i in 1..5 {
|
||||
assert_eq!(copied_files[i],original_files[i]);
|
||||
}
|
||||
fs::remove_dir_all(root).unwrap();
|
||||
}
|
||||
|
||||
fn write_files(dir_name: String, root: &Path) -> io::Result<()> {
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/foo.txt"))?;
|
||||
file.write_all(b"Hello, world!")?;
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/bar.txt"))?;
|
||||
file.write_all(b"Goodbye, world!")?;
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/sub/lorem"))?;
|
||||
file.write_all(b"Lorem ipsum dolor sit amet, consectetur adipiscing elit")?;
|
||||
Ok(())
|
||||
}
|
|
@ -13,11 +13,13 @@
|
|||
// limitations under the License.
|
||||
|
||||
extern crate grin_util as util;
|
||||
extern crate walkdir;
|
||||
|
||||
use std::fs::{self, File};
|
||||
use std::io::{self, Write};
|
||||
use std::path::Path;
|
||||
use util::zip;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[test]
|
||||
fn zip_unzip() {
|
||||
|
@ -25,7 +27,7 @@ fn zip_unzip() {
|
|||
let zip_name = "./target/tmp/zipped.zip";
|
||||
|
||||
fs::create_dir_all(root.join("./to_zip/sub")).unwrap();
|
||||
write_files(&root).unwrap();
|
||||
write_files("to_zip".to_string(),&root).unwrap();
|
||||
|
||||
let zip_file = File::create(zip_name).unwrap();
|
||||
zip::compress(&root.join("./to_zip"), &zip_file).unwrap();
|
||||
|
@ -48,12 +50,12 @@ fn zip_unzip() {
|
|||
assert!(lorem.metadata().unwrap().len() == 55);
|
||||
}
|
||||
|
||||
fn write_files(root: &Path) -> io::Result<()> {
|
||||
let mut file = File::create(root.join("to_zip/foo.txt"))?;
|
||||
fn write_files(dir_name: String, root: &Path) -> io::Result<()> {
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/foo.txt"))?;
|
||||
file.write_all(b"Hello, world!")?;
|
||||
let mut file = File::create(root.join("to_zip/bar.txt"))?;
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/bar.txt"))?;
|
||||
file.write_all(b"Goodbye, world!")?;
|
||||
let mut file = File::create(root.join("to_zip/sub/lorem"))?;
|
||||
let mut file = File::create(root.join(dir_name.clone() + "/sub/lorem"))?;
|
||||
file.write_all(b"Lorem ipsum dolor sit amet, consectetur adipiscing elit")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue