[2.x.x] simplify txhashset zip creation and extraction (#2908)

* simplify how we extract files from the txhashset zip archive

* bump version of zip-rs
cleanup create_zip (was compress)
use explicit list of files when creating zip archive

* rework zip tests for our simplified impl

* periodic debug during txhashset download

* rework list_files to return something usable
This commit is contained in:
Antioch Peverell 2019-07-12 10:50:27 +01:00 committed by GitHub
parent 3a4752635a
commit 1395074a25
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 223 additions and 391 deletions

19
Cargo.lock generated
View file

@ -926,7 +926,7 @@ dependencies = [
"serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"zeroize 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"zip 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"zip 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1314,15 +1314,6 @@ dependencies = [
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "msdos_time"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ncurses"
version = "5.99.0"
@ -2650,12 +2641,11 @@ dependencies = [
[[package]]
name = "zip"
version = "0.4.2"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"podio 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
]
[metadata]
@ -2779,7 +2769,6 @@ dependencies = [
"checksum mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)" = "71646331f2619b1026cc302f87a2b8b648d5c6dd6937846a16cc8ce0f347f432"
"checksum mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125"
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
"checksum msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729"
"checksum ncurses 5.99.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15699bee2f37e9f8828c7b35b2bc70d13846db453f2d507713b758fabe536b82"
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
"checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17"
@ -2934,4 +2923,4 @@ dependencies = [
"checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d"
"checksum zeroize 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e2ea4afc22e9497e26b42bf047083c30f7e3ca566f3bcd7187f83d18b327043"
"checksum zeroize_derive 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afd1469e4bbca3b96606d26ba6e9bd6d3aed3b1299c82b92ec94377d22d78dbc"
"checksum zip 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "36b9e08fb518a65cf7e08a1e482573eb87a2f4f8c6619316612a3c1f162fe822"
"checksum zip 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c18fc320faf909036e46ac785ea827f72e485304877faf1a3a39538d3714dbc3"

View file

@ -32,8 +32,7 @@ use crate::util::secp::pedersen::{Commitment, RangeProof};
use crate::util::{file, secp_static, zip};
use croaring::Bitmap;
use grin_store;
use grin_store::pmmr::{clean_files_by_prefix, PMMRBackend, PMMR_FILES};
use std::collections::HashSet;
use grin_store::pmmr::{clean_files_by_prefix, PMMRBackend};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::sync::Arc;
@ -1457,11 +1456,13 @@ pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
}
// Copy file to another dir
file::copy_dir_to(&txhashset_path, &temp_txhashset_path)?;
// Check and remove file that are not supposed to be there
check_and_remove_files(&temp_txhashset_path, header)?;
// Compress zip
zip::compress(&temp_txhashset_path, &File::create(zip_path.clone())?)
.map_err(|ze| ErrorKind::Other(ze.to_string()))?;
let zip_file = File::create(zip_path.clone())?;
// Explicit list of files to add to our zip archive.
let files = file_list(header);
zip::create_zip(&zip_file, &temp_txhashset_path, files)?;
temp_txhashset_path
};
@ -1480,6 +1481,30 @@ pub fn zip_read(root_dir: String, header: &BlockHeader) -> Result<File, Error> {
Ok(zip_file)
}
// Explicit list of files to extract from our zip archive.
// We include *only* these files when building the txhashset zip.
// We extract *only* these files when receiving a txhashset zip.
// Everything else will be safely ignored.
// Return Vec<PathBuf> as some of these are dynamic (specifically the "rewound" leaf files).
fn file_list(header: &BlockHeader) -> Vec<PathBuf> {
vec![
// kernel MMR
PathBuf::from("kernel/pmmr_data.bin"),
PathBuf::from("kernel/pmmr_hash.bin"),
// output MMR
PathBuf::from("output/pmmr_data.bin"),
PathBuf::from("output/pmmr_hash.bin"),
PathBuf::from("output/pmmr_prun.bin"),
// rangeproof MMR
PathBuf::from("rangeproof/pmmr_data.bin"),
PathBuf::from("rangeproof/pmmr_hash.bin"),
PathBuf::from("rangeproof/pmmr_prun.bin"),
// Header specific "rewound" leaf files for output and rangeproof MMR.
PathBuf::from(format!("output/pmmr_leaf.bin.{}", header.hash())),
PathBuf::from(format!("rangeproof/pmmr_leaf.bin.{}", header.hash())),
]
}
/// Extract the txhashset data from a zip file and writes the content into the
/// txhashset storage dir
pub fn zip_write(
@ -1489,10 +1514,17 @@ pub fn zip_write(
) -> Result<(), Error> {
debug!("zip_write on path: {:?}", root_dir);
let txhashset_path = root_dir.clone().join(TXHASHSET_SUBDIR);
fs::create_dir_all(txhashset_path.clone())?;
zip::decompress(txhashset_data, &txhashset_path, expected_file)
.map_err(|ze| ErrorKind::Other(ze.to_string()))?;
check_and_remove_files(&txhashset_path, header)
fs::create_dir_all(&txhashset_path)?;
// Explicit list of files to extract from our zip archive.
let files = file_list(header);
// We expect to see *exactly* the paths listed above.
// No attempt is made to be permissive or forgiving with "alternative" paths.
// These are the *only* files we will attempt to extract from the zip file.
// If any of these are missing we will attempt to continue as some are potentially optional.
zip::extract_files(txhashset_data, &txhashset_path, files)?;
Ok(())
}
/// Overwrite txhashset folders in "to" folder with "from" folder
@ -1537,112 +1569,6 @@ pub fn clean_header_folder(root_dir: &PathBuf) {
}
}
fn expected_file(path: &Path) -> bool {
use lazy_static::lazy_static;
use regex::Regex;
let s_path = path.to_str().unwrap_or_else(|| "");
lazy_static! {
static ref RE: Regex = Regex::new(
format!(
r#"^({}|{}|{})((/|\\)pmmr_(hash|data|leaf|prun)\.bin(\.\w*)?)?$"#,
OUTPUT_SUBDIR, KERNEL_SUBDIR, RANGE_PROOF_SUBDIR
)
.as_str()
)
.expect("invalid txhashset regular expression");
}
RE.is_match(&s_path)
}
/// Check a txhashset directory and remove any unexpected
fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Result<(), Error> {
// First compare the subdirectories
let subdirectories_expected: HashSet<_> = [OUTPUT_SUBDIR, KERNEL_SUBDIR, RANGE_PROOF_SUBDIR]
.iter()
.cloned()
.map(|s| String::from(s))
.collect();
let subdirectories_found: HashSet<_> = fs::read_dir(txhashset_path)?
.filter_map(|entry| {
entry.ok().and_then(|e| {
e.path()
.file_name()
.and_then(|n| n.to_str().map(|s| String::from(s)))
})
})
.collect();
let dir_difference: Vec<String> = subdirectories_found
.difference(&subdirectories_expected)
.cloned()
.collect();
// Removing unexpected directories if needed
if !dir_difference.is_empty() {
debug!("Unexpected folder(s) found in txhashset folder, removing.");
for diff in dir_difference {
let diff_path = txhashset_path.join(diff);
file::delete(diff_path)?;
}
}
// Then compare the files found in the subdirectories
let pmmr_files_expected: HashSet<_> = PMMR_FILES
.iter()
.cloned()
.map(|s| {
if s.contains("pmmr_leaf.bin") {
format!("{}.{}", s, header.hash())
} else {
String::from(s)
}
})
.collect();
let subdirectories = fs::read_dir(txhashset_path)?;
for subdirectory in subdirectories {
let subdirectory_path = subdirectory?.path();
let pmmr_files = fs::read_dir(&subdirectory_path)?;
let pmmr_files_found: HashSet<_> = pmmr_files
.filter_map(|entry| {
entry.ok().and_then(|e| {
e.path()
.file_name()
.and_then(|n| n.to_str().map(|s| String::from(s)))
})
})
.collect();
let difference: Vec<String> = pmmr_files_found
.difference(&pmmr_files_expected)
.cloned()
.collect();
let mut removed = 0;
if !difference.is_empty() {
for diff in &difference {
let diff_path = subdirectory_path.join(diff);
match file::delete(diff_path.clone()) {
Err(e) => error!(
"check_and_remove_files: fail to remove file '{:?}', Err: {:?}",
diff_path, e,
),
Ok(_) => {
removed += 1;
trace!("check_and_remove_files: file '{:?}' removed", diff_path);
}
}
}
debug!(
"{} tmp file(s) found in txhashset subfolder {:?}, {} removed.",
difference.len(),
&subdirectory_path,
removed,
);
}
}
Ok(())
}
/// Given a block header to rewind to and the block header at the
/// head of the current chain state, we need to calculate the positions
/// of all inputs (spent outputs) we need to "undo" during a rewind.
@ -1694,23 +1620,3 @@ pub fn input_pos_to_rewind(
bitmap_fast_or(None, &mut block_input_bitmaps).ok_or_else(|| ErrorKind::Bitmap.into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_expected_files() {
assert!(!expected_file(Path::new("kernels")));
assert!(!expected_file(Path::new("xkernel")));
assert!(expected_file(Path::new("kernel")));
assert!(expected_file(Path::new("kernel\\pmmr_data.bin")));
assert!(expected_file(Path::new("kernel/pmmr_hash.bin")));
assert!(expected_file(Path::new("kernel/pmmr_leaf.bin")));
assert!(expected_file(Path::new("kernel/pmmr_prun.bin")));
assert!(expected_file(Path::new("kernel/pmmr_leaf.bin.deadbeef")));
assert!(!expected_file(Path::new("xkernel/pmmr_data.bin")));
assert!(!expected_file(Path::new("kernel/pmmrx_data.bin")));
assert!(!expected_file(Path::new("kernel/pmmr_data.binx")));
}
}

View file

@ -55,94 +55,65 @@ fn test_unexpected_zip() {
Path::new(&db_root).join(format!("txhashset_zip_{}", head.hash().to_string())),
);
// Then add strange files in the original txhashset folder
write_file(db_root.clone());
File::create(&Path::new(&db_root).join("txhashset").join("badfile"))
.expect("problem creating a file");
File::create(
&Path::new(&db_root)
.join("txhashset")
.join("output")
.join("badfile"),
)
.expect("problem creating a file");
let files = file::list_files(&Path::new(&db_root).join("txhashset"));
let expected_files: Vec<_> = vec![
"badfile",
"kernel/pmmr_data.bin",
"kernel/pmmr_hash.bin",
"kernel/pmmr_size.bin",
"output/badfile",
"output/pmmr_data.bin",
"output/pmmr_hash.bin",
"rangeproof/pmmr_data.bin",
"rangeproof/pmmr_hash.bin",
];
assert_eq!(
files,
expected_files
.iter()
.map(|x| PathBuf::from(x))
.collect::<Vec<_>>()
);
assert!(txhashset::zip_read(db_root.clone(), &head).is_ok());
// Check that the temp dir dos not contains the strange files
let txhashset_zip_path =
Path::new(&db_root).join(format!("txhashset_zip_{}", head.hash().to_string()));
assert!(txhashset_contains_expected_files(
format!("txhashset_zip_{}", head.hash().to_string()),
txhashset_zip_path.clone()
));
let _ = fs::remove_dir_all(
Path::new(&db_root).join(format!("txhashset_zip_{}", head.hash().to_string())),
);
let zip_file = File::open(zip_path).unwrap();
assert!(txhashset::zip_write(PathBuf::from(db_root.clone()), zip_file, &head).is_ok());
// Check that the txhashset dir dos not contains the strange files
let txhashset_path = Path::new(&db_root).join("txhashset");
assert!(txhashset_contains_expected_files(
"txhashset".to_string(),
txhashset_path.clone()
));
let _ = fs::remove_dir_all(Path::new(&db_root).join("txhashset"));
assert!(txhashset::zip_write(PathBuf::from(db_root.clone()), zip_file, &head).is_ok());
// Check that the new txhashset dir contains *only* the expected files
// No "badfiles" and no "size" file.
let files = file::list_files(&Path::new(&db_root).join("txhashset"));
let expected_files: Vec<_> = vec![
"kernel/pmmr_data.bin",
"kernel/pmmr_hash.bin",
"output/pmmr_data.bin",
"output/pmmr_hash.bin",
"rangeproof/pmmr_data.bin",
"rangeproof/pmmr_hash.bin",
];
assert_eq!(
files,
expected_files
.iter()
.map(|x| PathBuf::from(x))
.collect::<Vec<_>>()
);
}
// Cleanup chain directory
clean_output_dir(&db_root);
}
fn write_file(db_root: String) {
OpenOptions::new()
.create(true)
.write(true)
.open(
Path::new(&db_root)
.join("txhashset")
.join("kernel")
.join("strange0"),
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
.open(Path::new(&db_root).join("txhashset").join("strange1"))
.unwrap();
fs::create_dir(Path::new(&db_root).join("txhashset").join("strange_dir")).unwrap();
OpenOptions::new()
.create(true)
.write(true)
.open(
Path::new(&db_root)
.join("txhashset")
.join("strange_dir")
.join("strange2"),
)
.unwrap();
fs::create_dir(
Path::new(&db_root)
.join("txhashset")
.join("strange_dir")
.join("strange_subdir"),
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
.open(
Path::new(&db_root)
.join("txhashset")
.join("strange_dir")
.join("strange_subdir")
.join("strange3"),
)
.unwrap();
}
fn txhashset_contains_expected_files(dirname: String, path_buf: PathBuf) -> bool {
let list_zip_files = file::list_files(path_buf.into_os_string().into_string().unwrap());
let zip_files_hashset: HashSet<_> = HashSet::from_iter(list_zip_files.iter().cloned());
let expected_files = vec![
dirname,
"output".to_string(),
"rangeproof".to_string(),
"kernel".to_string(),
"pmmr_hash.bin".to_string(),
"pmmr_data.bin".to_string(),
];
let expected_files_hashset = HashSet::from_iter(expected_files.iter().cloned());
let intersection: HashSet<_> = zip_files_hashset
.difference(&expected_files_hashset)
.collect();
intersection.is_empty()
}

View file

@ -20,7 +20,7 @@ serde_derive = "1"
log4rs = { version = "0.8.1", features = ["rolling_file_appender", "compound_policy", "size_trigger", "fixed_window_roller"] }
log = "0.4"
walkdir = "2"
zip = { version = "0.4", default-features = false }
zip = { version = "0.5", default-features = false }
parking_lot = {version = "0.6"}
zeroize = "0.9"

View file

@ -44,18 +44,15 @@ pub fn copy_dir_to(src: &Path, dst: &Path) -> io::Result<u64> {
}
/// List directory
pub fn list_files(path: String) -> Vec<String> {
let mut files_vec: Vec<String> = vec![];
for entry in WalkDir::new(Path::new(&path))
pub fn list_files(path: &Path) -> Vec<PathBuf> {
WalkDir::new(path)
.sort_by(|a, b| a.path().cmp(b.path()))
.min_depth(1)
.into_iter()
.filter_map(|e| e.ok())
{
match entry.file_name().to_str() {
Some(path_str) => files_vec.push(path_str.to_string()),
None => println!("Could not read optional type"),
}
}
return files_vec;
.filter_map(|x| x.ok())
.filter(|x| x.file_type().is_file())
.filter_map(|x| x.path().strip_prefix(path).map(|x| x.to_path_buf()).ok())
.collect()
}
fn copy_to(src: &Path, src_type: &fs::FileType, dst: &Path) -> io::Result<u64> {

View file

@ -12,133 +12,76 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::{self, File};
/// Wrappers around the `zip-rs` library to compress and decompress zip archives.
use std::io;
use std::panic;
use std::path::Path;
use walkdir::WalkDir;
use std::fs::{self, File};
use std::io::{self, BufReader, BufWriter, Write};
use std::path::{Path, PathBuf};
use std::thread;
use self::zip_rs::result::{ZipError, ZipResult};
use self::zip_rs::write::FileOptions;
use zip as zip_rs;
/// Compress a source directory recursively into a zip file.
/// Permissions are set to 644 by default to avoid any
/// unwanted execution bits.
pub fn compress(src_dir: &Path, dst_file: &File) -> ZipResult<()> {
if !Path::new(src_dir).is_dir() {
return Err(ZipError::Io(io::Error::new(
io::ErrorKind::Other,
"Source must be a directory.",
)));
}
/// Create a zip archive from source dir and list of relative file paths.
/// Permissions are set to 644 by default.
pub fn create_zip(dst_file: &File, src_dir: &Path, files: Vec<PathBuf>) -> io::Result<()> {
let mut writer = {
let zip = zip_rs::ZipWriter::new(dst_file);
BufWriter::new(zip)
};
let options = FileOptions::default()
.compression_method(zip_rs::CompressionMethod::Stored)
.unix_permissions(0o644);
let mut zip = zip_rs::ZipWriter::new(dst_file);
let walkdir = WalkDir::new(src_dir.to_str().unwrap());
let it = walkdir.into_iter();
for dent in it.filter_map(|e| e.ok()) {
let path = dent.path();
let name = path
.strip_prefix(Path::new(src_dir))
.unwrap()
.to_str()
.unwrap();
if path.is_file() {
zip.start_file(name, options)?;
let mut f = File::open(path)?;
io::copy(&mut f, &mut zip)?;
for x in &files {
let file_path = src_dir.join(x);
if let Ok(file) = File::open(file_path.clone()) {
info!("compress: {:?} -> {:?}", file_path, x);
writer.get_mut().start_file_from_path(x, options)?;
io::copy(&mut BufReader::new(file), &mut writer)?;
// Flush the BufWriter after each file so we start then next one correctly.
writer.flush()?;
}
}
zip.finish()?;
writer.get_mut().finish()?;
dst_file.sync_all()?;
Ok(())
}
/// Decompress a source file into the provided destination path.
pub fn decompress<R, F>(src_file: R, dest: &Path, expected: F) -> ZipResult<usize>
where
R: io::Read + io::Seek + panic::UnwindSafe,
F: Fn(&Path) -> bool + panic::UnwindSafe,
{
let mut decompressed = 0;
/// Extract a set of files from the provided zip archive.
pub fn extract_files(from_archive: File, dest: &Path, files: Vec<PathBuf>) -> io::Result<()> {
let dest: PathBuf = PathBuf::from(dest);
let files: Vec<_> = files.iter().cloned().collect();
let res = thread::spawn(move || {
let mut archive = zip_rs::ZipArchive::new(from_archive).expect("archive file exists");
for x in files {
if let Ok(file) = archive.by_name(x.to_str().expect("valid path")) {
let path = dest.join(file.sanitized_name());
let parent_dir = path.parent().expect("valid parent dir");
fs::create_dir_all(&parent_dir).expect("create parent dir");
let outfile = fs::File::create(&path).expect("file created");
io::copy(&mut BufReader::new(file), &mut BufWriter::new(outfile))
.expect("write to file");
// catch the panic to avoid the thread quit
panic::set_hook(Box::new(|panic_info| {
error!(
"panic occurred: {:?}",
panic_info.payload().downcast_ref::<&str>().unwrap()
);
}));
let result = panic::catch_unwind(move || {
let mut archive = zip_rs::ZipArchive::new(src_file)?;
info!("extract_files: {:?} -> {:?}", x, path);
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let san_name = file.sanitized_name();
if san_name.to_str().unwrap_or("").replace("\\", "/") != file.name().replace("\\", "/")
|| !expected(&san_name)
{
info!(
"ignoring a suspicious file: {}, got {:?}",
file.name(),
san_name.to_str()
);
continue;
}
let file_path = dest.join(san_name);
if (&*file.name()).ends_with('/') {
fs::create_dir_all(&file_path)?;
} else {
if let Some(p) = file_path.parent() {
if !p.exists() {
fs::create_dir_all(&p)?;
}
}
let res = fs::File::create(&file_path);
let mut outfile = match res {
Err(e) => {
error!("{:?}", e);
return Err(zip::result::ZipError::Io(e));
}
Ok(r) => r,
};
io::copy(&mut file, &mut outfile)?;
decompressed += 1;
}
// Get and Set permissions
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
fs::set_permissions(
&file_path.to_str().unwrap(),
PermissionsExt::from_mode(mode),
)?;
// Set file permissions to "644" (Unix only).
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mode = PermissionsExt::from_mode(0o644);
fs::set_permissions(&path, mode).expect("set file permissions");
}
}
}
Ok(decompressed)
});
match result {
Ok(res) => match res {
Err(e) => Err(e.into()),
Ok(_) => res,
},
Err(_) => {
error!("panic occurred on zip::decompress!");
Err(zip::result::ZipError::InvalidArchive(
"panic occurred on zip::decompress",
))
}
}
})
.join();
// If join() above is Ok then we successfully extracted the files.
// If the result is Err then we failed to extract the files.
res.map_err(|e| {
error!("failed to extract files from zip: {:?}", e);
io::Error::new(io::ErrorKind::Other, "failed to extract files from zip")
})
}

View file

@ -28,11 +28,9 @@ fn copy_dir() {
let original_path = Path::new("./target/tmp2/original");
let copy_path = Path::new("./target/tmp2/copy");
file::copy_dir_to(original_path, copy_path).unwrap();
let original_files = file::list_files("./target/tmp2/original".to_string());
let copied_files = file::list_files("./target/tmp2/copy".to_string());
for i in 1..5 {
assert_eq!(copied_files[i], original_files[i]);
}
let original_files = file::list_files(&Path::new("./target/tmp2/original"));
let copied_files = file::list_files(&Path::new("./target/tmp2/copy"));
assert_eq!(original_files, copied_files);
fs::remove_dir_all(root).unwrap();
}

Binary file not shown.

View file

@ -16,52 +16,80 @@ use grin_util as util;
use crate::util::zip;
use std::fs::{self, File};
use std::io::{self, Write};
use std::path::Path;
use std::io::Write;
use std::path::{Path, PathBuf};
#[test]
fn zip_unzip() {
let root = Path::new("./target/tmp");
let zip_name = "./target/tmp/zipped.zip";
let root = Path::new("target/tmp");
let zip_path = root.join("zipped.zip");
let path = root.join("to_zip");
fs::create_dir_all(root.join("./to_zip/sub")).unwrap();
write_files("to_zip".to_string(), &root).unwrap();
// Some files we want to use for testing our zip file.
{
fs::create_dir_all(&path).unwrap();
let zip_file = File::create(zip_name).unwrap();
zip::compress(&root.join("./to_zip"), &zip_file).unwrap();
zip_file.sync_all().unwrap();
let mut file = File::create(path.join("foo.txt")).unwrap();
file.write_all(b"Hello, world!").unwrap();
let mut file = File::create(path.join("bar.txt")).unwrap();
file.write_all(b"This, was unexpected!").unwrap();
let mut file = File::create(path.join("wat.txt")).unwrap();
file.write_all(b"Goodbye, world!").unwrap();
let sub_path = path.join("sub");
fs::create_dir_all(&sub_path).unwrap();
let mut file = File::create(sub_path.join("lorem.txt")).unwrap();
file.write_all(b"Lorem ipsum dolor sit amet, consectetur adipiscing elit")
.unwrap();
}
// Create our zip file using an explicit (sub)set of the above files.
{
// List of files to be accepted when creating the zip and extracting from the zip.
// Note: "wat.txt" is not included in the list of files (hence it is excluded).
let files = vec![
PathBuf::from("foo.txt"),
PathBuf::from("bar.txt"),
PathBuf::from("sub/lorem.txt"),
];
let zip_file = File::create(&zip_path).unwrap();
zip::create_zip(&zip_file, &path, files).unwrap();
zip_file.sync_all().unwrap();
}
let zip_path = Path::new(zip_name);
assert!(zip_path.exists());
assert!(zip_path.is_file());
assert!(zip_path.metadata().unwrap().len() > 300);
fs::create_dir_all(root.join("./dezipped")).unwrap();
let zip_file = File::open(zip_name).unwrap();
zip::decompress(zip_file, &root.join("./dezipped"), |_| true).unwrap();
let zip_file = File::open(zip_path).unwrap();
assert!(root.join("to_zip/foo.txt").is_file());
assert!(root.join("to_zip/bar.txt").is_file());
assert!(root.join("to_zip/sub").is_dir());
let lorem = root.join("to_zip/sub/lorem");
assert!(lorem.is_file());
assert!(lorem.metadata().unwrap().len() == 55);
{
let dest_dir = root.join("unzipped");
fs::create_dir_all(&dest_dir).unwrap();
let decompressed = zip::decompress(
File::open("tests/test.zip").unwrap(),
&root.join("./dezipped"),
|_| true,
)
.unwrap();
assert_eq!(decompressed, 1);
}
fn write_files(dir_name: String, root: &Path) -> io::Result<()> {
let mut file = File::create(root.join(dir_name.clone() + "/foo.txt"))?;
file.write_all(b"Hello, world!")?;
let mut file = File::create(root.join(dir_name.clone() + "/bar.txt"))?;
file.write_all(b"Goodbye, world!")?;
let mut file = File::create(root.join(dir_name.clone() + "/sub/lorem"))?;
file.write_all(b"Lorem ipsum dolor sit amet, consectetur adipiscing elit")?;
Ok(())
// List of files to extract from the zip.
// Note: we do not extract "wat.txt" here, even if present in the zip.
let files = vec![PathBuf::from("foo.txt"), PathBuf::from("sub/lorem.txt")];
zip::extract_files(zip_file, &dest_dir, files).unwrap();
assert!(dest_dir.join("foo.txt").is_file());
// Check we did not extract "bar.txt" from the zip file.
// We should *only* extract the files explicitly listed.
assert!(!dest_dir.join("bar.txt").exists());
let sub_path = dest_dir.join("sub");
assert!(sub_path.is_dir());
let lorem = sub_path.join("lorem.txt");
assert!(lorem.is_file());
assert_eq!(
fs::read_to_string(lorem).unwrap(),
"Lorem ipsum dolor sit amet, consectetur adipiscing elit"
);
}
}