[T4 ONLY] BIP32 Wallet Compliance - Aggsig Updates - Bulletproof Updates ()

* change keychain to use bip32 paths

* convert keychain to use bip32

* change identifier to be serialisation of 4-level bip32 path

* wallet changes compiling, pass parent key into all wallet functions

* rustfmt

* fix tests in chain

* rustfmt

* core tests passing

* rustfmt

* pool tests

* rustfmt

* fixing wallet tests

* rustfmt

* remove file wallet

* wallet tests compiling

* rustfmt

* remove db_migrate

* successful tx exchange test using BIP32 paths

* rustfmt

* fix wallet derivation paths to m/0/0/0

* wallet test fixed again, working with default path

* rustfmt

* fix server tests

* rustfmt

* make parent_id a trait on walletbackend

* rustfmt

* add ability for wallet to switch between multiple named accounts, and tests (not complete)

* rustfmt

* account switching tests in place and passing

* rustfmt

* compile and test with latest libsecp changes

* added public key sum to calculated e for aggsig

* rustfmt

* Update secp to 26

* bulletproof bip32 path integration

* rustfmt

* wallet restore updated with bip32 paths, also restores accounts

* rustfmt

* rustfmt

* remove old extkey

* remove old extkey

* rustfmt

* add wallet account commands

* rustfmt

* update wallet documentation

* rustfmt

* merge from master

* update libsecp tag

* merge from upstream and fix server test

* rustfmt

* rustfmt

* merge from master

* update latest libsecp merge

* fix commitment to zero value generation
This commit is contained in:
Yeastplume 2018-10-10 10:11:01 +01:00 committed by GitHub
parent 92f826a917
commit 6c8c483172
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
65 changed files with 2017 additions and 1454 deletions

474
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -246,54 +246,52 @@ impl Chain {
Ok(head)
}
Err(e) => {
match e.kind() {
ErrorKind::Orphan => {
let block_hash = b.hash();
let orphan = Orphan {
block: b,
opts: opts,
added: Instant::now(),
};
Err(e) => match e.kind() {
ErrorKind::Orphan => {
let block_hash = b.hash();
let orphan = Orphan {
block: b,
opts: opts,
added: Instant::now(),
};
&self.orphans.add(orphan);
&self.orphans.add(orphan);
debug!(
LOGGER,
"process_block: orphan: {:?}, # orphans {}{}",
block_hash,
self.orphans.len(),
if self.orphans.len_evicted() > 0 {
format!(", # evicted {}", self.orphans.len_evicted())
} else {
String::new()
},
);
Err(ErrorKind::Orphan.into())
}
ErrorKind::Unfit(ref msg) => {
debug!(
LOGGER,
"Block {} at {} is unfit at this time: {}",
b.hash(),
b.header.height,
msg
);
Err(ErrorKind::Unfit(msg.clone()).into())
}
_ => {
info!(
LOGGER,
"Rejected block {} at {}: {:?}",
b.hash(),
b.header.height,
e
);
add_to_hash_cache(b.hash());
Err(ErrorKind::Other(format!("{:?}", e).to_owned()).into())
}
debug!(
LOGGER,
"process_block: orphan: {:?}, # orphans {}{}",
block_hash,
self.orphans.len(),
if self.orphans.len_evicted() > 0 {
format!(", # evicted {}", self.orphans.len_evicted())
} else {
String::new()
},
);
Err(ErrorKind::Orphan.into())
}
}
ErrorKind::Unfit(ref msg) => {
debug!(
LOGGER,
"Block {} at {} is unfit at this time: {}",
b.hash(),
b.header.height,
msg
);
Err(ErrorKind::Unfit(msg.clone()).into())
}
_ => {
info!(
LOGGER,
"Rejected block {} at {}: {:?}",
b.hash(),
b.header.height,
e
);
add_to_hash_cache(b.hash());
Err(ErrorKind::Other(format!("{:?}", e).to_owned()).into())
}
},
}
}

View file

@ -33,7 +33,7 @@ use core::core::{Block, BlockHeader, Transaction};
use core::global::{self, ChainTypes};
use core::pow::{self, Difficulty};
use core::{consensus, genesis};
use keychain::{ExtKeychain, Keychain};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use wallet::libtx;
fn clean_output_dir(dir_name: &str) {
@ -83,7 +83,7 @@ fn data_files() {
for n in 1..4 {
let prev = chain.head_header().unwrap();
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
let pk = keychain.derive_key_id(n as u32).unwrap();
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60);
@ -154,7 +154,7 @@ fn _prepare_block_nosum(
diff: u64,
txs: Vec<&Transaction>,
) -> Block {
let key_id = kc.derive_key_id(diff as u32).unwrap();
let key_id = ExtKeychainPath::new(1, diff as u32, 0, 0, 0).to_identifier();
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(kc, &key_id, fees, prev.height).unwrap();

View file

@ -33,7 +33,7 @@ use core::core::{Block, BlockHeader, OutputFeatures, OutputIdentifier, Transacti
use core::global::ChainTypes;
use core::pow::Difficulty;
use core::{consensus, global, pow};
use keychain::{ExtKeychain, Keychain};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use wallet::libtx::{self, build};
fn clean_output_dir(dir_name: &str) {
@ -65,7 +65,7 @@ fn mine_empty_chain() {
for n in 1..4 {
let prev = chain.head_header().unwrap();
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
let pk = keychain.derive_key_id(n as u32).unwrap();
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
b.header.timestamp = prev.timestamp + Duration::seconds(60);
@ -262,11 +262,14 @@ fn spend_in_fork_and_compact() {
// Check the height of the "fork block".
assert_eq!(fork_head.height, 4);
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let key_id30 = ExtKeychainPath::new(1, 30, 0, 0, 0).to_identifier();
let key_id31 = ExtKeychainPath::new(1, 31, 0, 0, 0).to_identifier();
let tx1 = build::transaction(
vec![
build::coinbase_input(consensus::REWARD, kc.derive_key_id(2).unwrap()),
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
build::coinbase_input(consensus::REWARD, key_id2.clone()),
build::output(consensus::REWARD - 20000, key_id30.clone()),
build::with_fee(20000),
],
&kc,
@ -281,8 +284,8 @@ fn spend_in_fork_and_compact() {
let tx2 = build::transaction(
vec![
build::input(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
build::input(consensus::REWARD - 20000, key_id30.clone()),
build::output(consensus::REWARD - 40000, key_id31.clone()),
build::with_fee(20000),
],
&kc,
@ -377,7 +380,7 @@ fn output_header_mappings() {
for n in 1..15 {
let prev = chain.head_header().unwrap();
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
let pk = keychain.derive_key_id(n as u32).unwrap();
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
reward_outputs.push(reward.0.clone());
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
@ -465,7 +468,7 @@ where
K: Keychain,
{
let proof_size = global::proofsize();
let key_id = kc.derive_key_id(diff as u32).unwrap();
let key_id = ExtKeychainPath::new(1, diff as u32, 0, 0, 0).to_identifier();
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(kc, &key_id, fees, prev.height).unwrap();

View file

@ -28,7 +28,7 @@ use core::core::hash::Hashed;
use core::core::{Block, BlockHeader};
use core::global::{self, ChainTypes};
use core::pow::{self, Difficulty};
use keychain::{ExtKeychain, Keychain};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use wallet::libtx;
fn clean_output_dir(dir_name: &str) {
@ -45,7 +45,7 @@ fn test_various_store_indices() {
clean_output_dir(chain_dir);
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let db_env = Arc::new(store::new_env(chain_dir.to_string()));
let chain_store = chain::store::ChainStore::new(db_env).unwrap();

View file

@ -32,7 +32,7 @@ use core::core::verifier_cache::LruVerifierCache;
use core::global::{self, ChainTypes};
use core::pow::Difficulty;
use core::{consensus, pow};
use keychain::{ExtKeychain, Keychain};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use wallet::libtx::{self, build};
fn clean_output_dir(dir_name: &str) {
@ -63,10 +63,10 @@ fn test_coinbase_maturity() {
let prev = chain.head_header().unwrap();
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
let key_id4 = ExtKeychainPath::new(1, 4, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &key_id1, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
@ -146,7 +146,7 @@ fn test_coinbase_maturity() {
let prev = chain.head_header().unwrap();
let keychain = ExtKeychain::from_random_seed().unwrap();
let pk = keychain.derive_key_id(1).unwrap();
let pk = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let reward = libtx::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();

View file

@ -26,7 +26,10 @@ use std::sync::Arc;
use chain::store::ChainStore;
use chain::txhashset;
use core::core::BlockHeader;
use chain::types::Tip;
use core::core::{Block, BlockHeader};
use core::pow::Difficulty;
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use util::file;
fn clean_output_dir(dir_name: &str) {
@ -79,7 +82,8 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("kernel")
.join("strange0"),
).unwrap();
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
@ -94,7 +98,8 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("strange_dir")
.join("strange2"),
).unwrap();
)
.unwrap();
fs::create_dir(
Path::new(&db_root)
.join("txhashset")
@ -110,7 +115,8 @@ fn write_file(db_root: String) {
.join("strange_dir")
.join("strange_subdir")
.join("strange3"),
).unwrap();
)
.unwrap();
}
fn txhashset_contains_expected_files(dirname: String, path_buf: PathBuf) -> bool {

View file

@ -170,9 +170,9 @@ mod test {
let foo = Foo(0);
let expected_hash =
Hash::from_hex("81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c")
.unwrap();
let expected_hash = Hash::from_hex(
"81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c",
).unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash = Hash::default();
@ -182,9 +182,9 @@ mod test {
);
let foo = Foo(5);
let expected_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
let expected_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash = Hash::default();
@ -194,14 +194,14 @@ mod test {
);
let foo = Foo(5);
let expected_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
let expected_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
assert_eq!(foo.hash(), expected_hash);
let other_hash =
Hash::from_hex("81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c")
.unwrap();
let other_hash = Hash::from_hex(
"81e47a19e6b29b0a65b9591762ce5143ed30d0261e5d24a3201752506b20f15c",
).unwrap();
assert_eq!(
foo.short_id(&other_hash, foo.0),
ShortId::from_hex("3e9cde72a687").unwrap()

View file

@ -84,7 +84,8 @@ where
// here we want to get from underlying hash file
// as the pos *may* have been "removed"
self.backend.get_from_file(pi)
}).collect()
})
.collect()
}
fn peak_path(&self, peak_pos: u64) -> Vec<Hash> {

View file

@ -19,7 +19,7 @@ use std::marker;
use core::hash::Hash;
use core::pmmr::{bintree_postorder_height, is_leaf, peaks, Backend};
use ser::{PMMRable, PMMRIndexHashable};
use ser::{PMMRIndexHashable, PMMRable};
/// Rewindable (but still readonly) view of a PMMR.
pub struct RewindablePMMR<'a, T, B>
@ -110,7 +110,8 @@ where
// here we want to get from underlying hash file
// as the pos *may* have been "removed"
self.backend.get_from_file(pi)
}).collect()
})
.collect()
}
/// Total size of the tree, including intermediary nodes and ignoring any

View file

@ -201,7 +201,16 @@ impl TxKernel {
let sig = &self.excess_sig;
// Verify aggsig directly in libsecp
let pubkey = &self.excess.to_pubkey(&secp)?;
if !secp::aggsig::verify_single(&secp, &sig, &msg, None, &pubkey, false) {
if !secp::aggsig::verify_single(
&secp,
&sig,
&msg,
None,
&pubkey,
Some(&pubkey),
None,
false,
) {
return Err(secp::Error::IncorrectSignature);
}
Ok(())
@ -1203,7 +1212,7 @@ mod test {
#[test]
fn test_kernel_ser_deser() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit = keychain.commit(5, &key_id).unwrap();
// just some bytes for testing ser/deser
@ -1248,10 +1257,10 @@ mod test {
#[test]
fn commit_consistency() {
let keychain = ExtKeychain::from_seed(&[0; 32]).unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit = keychain.commit(1003, &key_id).unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit_2 = keychain.commit(1003, &key_id).unwrap();
@ -1261,7 +1270,7 @@ mod test {
#[test]
fn input_short_id() {
let keychain = ExtKeychain::from_seed(&[0; 32]).unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit = keychain.commit(5, &key_id).unwrap();
let input = Input {
@ -1269,14 +1278,14 @@ mod test {
commit: commit,
};
let block_hash =
Hash::from_hex("3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673")
.unwrap();
let block_hash = Hash::from_hex(
"3a42e66e46dd7633b57d1f921780a1ac715e6b93c19ee52ab714178eb3a9f673",
).unwrap();
let nonce = 0;
let short_id = input.short_id(&block_hash, nonce);
assert_eq!(short_id, ShortId::from_hex("28fea5a693af").unwrap());
assert_eq!(short_id, ShortId::from_hex("df31d96e3cdb").unwrap());
// now generate the short_id for a *very* similar output (single feature flag
// different) and check it generates a different short_id
@ -1286,6 +1295,6 @@ mod test {
};
let short_id = input.short_id(&block_hash, nonce);
assert_eq!(short_id, ShortId::from_hex("2df325971ab0").unwrap());
assert_eq!(short_id, ShortId::from_hex("784fc5afd5d9").unwrap());
}
}

View file

@ -69,7 +69,8 @@ impl VerifierCache for LruVerifierCache {
.kernel_sig_verification_cache
.get_mut(&x.hash())
.unwrap_or(&mut false)
}).cloned()
})
.cloned()
.collect::<Vec<_>>();
debug!(
LOGGER,
@ -88,7 +89,8 @@ impl VerifierCache for LruVerifierCache {
.rangeproof_verification_cache
.get_mut(&x.proof.hash())
.unwrap_or(&mut false)
}).cloned()
})
.cloned()
.collect::<Vec<_>>();
debug!(
LOGGER,

View file

@ -52,7 +52,7 @@ fn too_large_block() {
let mut pks = vec![];
for n in 0..(max_out + 1) {
pks.push(keychain.derive_key_id(n as u32).unwrap());
pks.push(ExtKeychain::derive_key_id(1, n as u32, 0, 0, 0));
}
let mut parts = vec![];
@ -66,7 +66,7 @@ fn too_large_block() {
println!("Build tx: {}", now.elapsed().as_secs());
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx], &keychain, &prev, &key_id);
assert!(
b.validate(&BlindingFactor::zero(), &zero_commit, verifier_cache())
@ -90,9 +90,9 @@ fn very_empty_block() {
// builds a block with a tx spending another and check that cut_through occurred
fn block_with_cut_through() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let zero_commit = secp_static::commit_to_zero_value();
@ -106,7 +106,7 @@ fn block_with_cut_through() {
let mut btx3 = txspend1i1o(5, &keychain, key_id2.clone(), key_id3);
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(
vec![&mut btx1, &mut btx2, &mut btx3],
&keychain,
@ -129,7 +129,7 @@ fn empty_block_with_coinbase_is_valid() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
assert_eq!(b.inputs().len(), 0);
@ -168,7 +168,7 @@ fn remove_coinbase_output_flag() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
@ -198,7 +198,7 @@ fn remove_coinbase_kernel_flag() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
@ -225,7 +225,7 @@ fn remove_coinbase_kernel_flag() {
fn serialize_deserialize_block_header() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
let header1 = b.header;
@ -242,7 +242,7 @@ fn serialize_deserialize_block() {
let tx1 = tx1i2o();
let keychain = ExtKeychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let mut vec = Vec::new();
@ -260,7 +260,7 @@ fn serialize_deserialize_block() {
fn empty_block_serialized_size() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
@ -273,7 +273,7 @@ fn block_single_tx_serialized_size() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
@ -285,7 +285,7 @@ fn block_single_tx_serialized_size() {
fn empty_compact_block_serialized_size() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
let cb: CompactBlock = b.into();
let mut vec = Vec::new();
@ -299,7 +299,7 @@ fn compact_block_single_tx_serialized_size() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let cb: CompactBlock = b.into();
let mut vec = Vec::new();
@ -319,7 +319,7 @@ fn block_10_tx_serialized_size() {
txs.push(tx);
}
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
@ -337,7 +337,7 @@ fn compact_block_10_tx_serialized_size() {
txs.push(tx);
}
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
let cb: CompactBlock = b.into();
let mut vec = Vec::new();
@ -351,7 +351,7 @@ fn compact_block_hash_with_nonce() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let tx = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx], &keychain, &prev, &key_id);
let cb1: CompactBlock = b.clone().into();
let cb2: CompactBlock = b.clone().into();
@ -381,7 +381,7 @@ fn convert_block_to_compact_block() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let cb: CompactBlock = b.clone().into();
@ -403,7 +403,7 @@ fn convert_block_to_compact_block() {
fn hydrate_empty_compact_block() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
let cb: CompactBlock = b.clone().into();
let hb = Block::hydrate_from(cb, vec![]).unwrap();
@ -417,7 +417,7 @@ fn serialize_deserialize_compact_block() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let mut cb1: CompactBlock = b.into();
@ -442,7 +442,7 @@ fn empty_block_v2_switch() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let mut prev = BlockHeader::default();
prev.height = consensus::HEADER_V2_HARD_FORK - 1;
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");

View file

@ -29,9 +29,9 @@ use wallet::libtx::reward;
// utility producing a transaction with 2 inputs and a single outputs
pub fn tx2i1o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = keychain::ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
build::transaction(
vec![
@ -47,8 +47,8 @@ pub fn tx2i1o() -> Transaction {
// utility producing a transaction with a single input and output
pub fn tx1i1o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
build::transaction(
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
@ -61,9 +61,9 @@ pub fn tx1i1o() -> Transaction {
// Note: this tx has an "offset" kernel
pub fn tx1i2o() -> Transaction {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = keychain::ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = keychain::ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = keychain::ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
build::transaction(
vec![

View file

@ -77,7 +77,7 @@ fn tx_double_ser_deser() {
#[should_panic(expected = "InvalidSecretKey")]
fn test_zero_commit_fails() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
// blinding should fail as signing with a zero r*G shouldn't work
build::transaction(
@ -97,9 +97,9 @@ fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
#[test]
fn build_tx_kernel() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
// first build a valid tx with corresponding blinding factor
let tx = build::transaction(
@ -318,9 +318,9 @@ fn basic_transaction_deaggregation() {
#[test]
fn hash_output() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let tx = build::transaction(
vec![
@ -372,10 +372,10 @@ fn tx_hash_diff() {
#[test]
fn tx_build_exchange() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let key_id4 = ExtKeychain::derive_key_id(1, 4, 0, 0, 0);
let (tx_alice, blind_sum) = {
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
@ -409,7 +409,7 @@ fn tx_build_exchange() {
#[test]
fn reward_empty_block() {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let zero_commit = secp_static::commit_to_zero_value();
@ -426,7 +426,7 @@ fn reward_empty_block() {
#[test]
fn reward_with_tx_block() {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let vc = verifier_cache();
@ -448,7 +448,7 @@ fn reward_with_tx_block() {
#[test]
fn simple_block() {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let vc = verifier_cache();
@ -473,9 +473,9 @@ fn simple_block() {
fn test_block_with_timelocked_tx() {
let keychain = keychain::ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let vc = verifier_cache();

View file

@ -28,7 +28,7 @@ use wallet::libtx::proof;
#[test]
fn test_output_ser_deser() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit = keychain.commit(5, &key_id).unwrap();
let proof = proof::create(&keychain, 5, &key_id, commit, None).unwrap();

View file

@ -36,7 +36,7 @@ fn test_verifier_cache_rangeproofs() {
let cache = verifier_cache();
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let commit = keychain.commit(5, &key_id).unwrap();
let proof = proof::create(&keychain, 5, &key_id, commit, None).unwrap();

View file

@ -33,11 +33,22 @@ Logging configuration for the wallet is read from `grin-wallet.toml`.
#### Switches common to all wallet commands
### Wallet Account
The wallet supports multiple accounts. To set the active account for a wallet command, use the '-a' switch, e.g:
```
[host]$ grin wallet -a account_1 info
```
All output creation, transaction building, and querying is done against a particular account in the wallet.
If the '-a' switch is not provided for a command, the account named 'default' is used.
##### Grin Node Address
The wallet generally needs to talk to a running grin node in order to remain up-to-date and verify its contents. By default, the wallet
tries to contact a node at `127.0.0.1:13413`. To change this, modify the value in the wallet's `grin_wallet.toml` file. Alternatively,
you can provide the `-a` switch to the wallet command, e.g.:
you can provide the `-r` (seRver) switch to the wallet command, e.g.:
```sh
[host]$ grin wallet -a "http://192.168.0.2:1341" info
@ -79,6 +90,27 @@ This will create a `grin-wallet.toml` file in the current directory configured t
as well as all needed data files. When running any `grin wallet` command, grin will check the current directory to see if
a `grin-wallet.toml` file exists. If not it will use the default in `~/.grin`
### account
To create a new account, use the 'grin wallet account' command with the argument '-c', e.g.:
```
[host]$ grin wallet account -c my_account
```
This will create a new account called 'my_account'. To use this account in subsequent commands, provide the '-a' flag to
all wallet commands:
```
[host]$ grin wallet -a my_account info
```
To display a list of created accounts in the wallet, use the 'account' command with no flags:
```
[host]$ grin wallet -a my_account info
```
### info
A summary of the wallet's contents can be retrieved from the wallet using the `info` command. Note that the `Total` sum may appear
@ -86,7 +118,7 @@ inflated if you have a lot of unconfirmed outputs in your wallet (especially one
who then never it by posting to the chain). `Currently Spendable` is the most accurate field to look at here.
```sh
____ Wallet Summary Info as of 49 ____
____ Wallet Summary Info - Account 'default' as of 49 ____
Total | 3000.000000000
Awaiting Confirmation | 60.000000000
@ -177,7 +209,7 @@ Simply displays all the the outputs in your wallet: e.g:
```sh
[host]$ grin wallet outputs
Wallet Outputs - Block Height: 49
Wallet Outputs - Account 'default' - Block Height: 49
------------------------------------------------------------------------------------------------------------------------------------------------
Key Id Child Key Index Block Height Locked Until Status Is Coinbase? Num. of Confirmations Value Transaction
================================================================================================================================================
@ -209,8 +241,7 @@ transaction log, use the `txs`
```sh
[host]$ grin wallet txs
Transaction Log - Block Height: 49
Transaction Log - Account 'default' - Block Height: 49
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Id Type Shared Transaction Id Creation Time Confirmed? Confirmation Time Num. Inputs Num. Outputs Amount Credited Amount Debited Fee Net Difference
==========================================================================================================================================================================================================================================
@ -226,13 +257,13 @@ Transaction Log - Block Height: 49
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
6 Received Tx 03715cf6-f29b-4a3a-bda5-b02cba6bf0d9 2018-07-20 19:46:46.120244904 UTC false None 0 1 60.000000000 0.000000000 None 60.000000000
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
```
>>>>>>> master
To see the inputs/outputs associated with a particular transaction, use the `-i` switch providing the Id of the given transaction, e.g:
```sh
[host]$ grin wallet txs -i 6
Transaction Log - Block Height: 49
Transaction Log - Account 'default' - Block Height: 49
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Id Type Shared Transaction Id Creation Time Confirmed? Confirmation Time Num. Inputs Num. Outputs Amount Credited Amount Debited Fee Net Difference
===========================================================================================================================================================================================================
@ -263,7 +294,7 @@ Running against the data above:
```sh
[host]$ grin wallet cancel -i 6
[host]$ grin wallet txs -i 6
Transaction Log - Block Height: 49
Transaction Log - Account 'default' - Block Height: 49
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Id Type Shared Transaction Id Creation Time Confirmed? Confirmation Time Num. Inputs Num. Outputs Amount Credited Amount Debited Fee Net Difference
=======================================================================================================================================================================================================================
@ -326,4 +357,4 @@ grin wallet restore
```
Note this operation can potentially take a long time. Once it's done, your wallet outputs should be restored, and you can
transact with your restored wallet as before the backup.
transact with your restored wallet as before the backup.

View file

@ -1,192 +0,0 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use blake2::blake2b::blake2b;
use byteorder::{BigEndian, ByteOrder};
use types::{Error, Identifier};
use util::secp::key::SecretKey;
use util::secp::Secp256k1;
#[derive(Debug, Clone)]
pub struct ChildKey {
/// Child number of the key (n derivations)
pub n_child: u32,
/// Root key id
pub root_key_id: Identifier,
/// Key id
pub key_id: Identifier,
/// The private key
pub key: SecretKey,
}
/// An ExtendedKey is a secret key which can be used to derive new
/// secret keys to blind the commitment of a transaction output.
/// To be usable, a secret key should have an amount assigned to it,
/// but when the key is derived, the amount is not known and must be
/// given.
#[derive(Debug, Clone)]
pub struct ExtendedKey {
/// Child number of the extended key
pub n_child: u32,
/// Root key id
pub root_key_id: Identifier,
/// Key id
pub key_id: Identifier,
/// The secret key
pub key: SecretKey,
/// The chain code for the key derivation chain
pub chain_code: [u8; 32],
}
impl ExtendedKey {
/// Creates a new extended master key from a seed
pub fn from_seed(secp: &Secp256k1, seed: &[u8]) -> Result<ExtendedKey, Error> {
match seed.len() {
16 | 32 | 64 => (),
_ => {
return Err(Error::KeyDerivation(
"seed size must be 128, 256 or 512".to_owned(),
))
}
}
let derived = blake2b(64, b"Grin/MW Seed", seed);
let slice = derived.as_bytes();
let key =
SecretKey::from_slice(&secp, &slice[0..32]).expect("Error deriving key (from_slice)");
let mut chain_code: [u8; 32] = Default::default();
(&mut chain_code).copy_from_slice(&slice[32..64]);
let key_id = Identifier::from_secret_key(secp, &key)?;
let ext_key = ExtendedKey {
n_child: 0,
root_key_id: key_id.clone(),
key_id: key_id.clone(),
// key and extended chain code for the key itself
key,
chain_code,
};
Ok(ext_key)
}
/// Derive a child key from this extended key
pub fn derive(&self, secp: &Secp256k1, n: u32) -> Result<ChildKey, Error> {
let mut n_bytes: [u8; 4] = [0; 4];
BigEndian::write_u32(&mut n_bytes, n);
let mut seed = self.key[..].to_vec();
seed.extend_from_slice(&n_bytes);
// only need a 32 byte digest here as we only need the bytes for the key itself
// we do not need additional bytes for a derived (and unused) chain code
let derived = blake2b(32, &self.chain_code[..], &seed[..]);
let mut key = SecretKey::from_slice(&secp, &derived.as_bytes()[..])
.expect("Error deriving key (from_slice)");
key.add_assign(secp, &self.key)
.expect("Error deriving key (add_assign)");
let key_id = Identifier::from_secret_key(secp, &key)?;
Ok(ChildKey {
n_child: n,
root_key_id: self.root_key_id.clone(),
key_id,
key,
})
}
}
#[cfg(test)]
mod test {
use serde_json;
use super::{ExtendedKey, Identifier};
use util;
use util::secp::key::SecretKey;
use util::secp::Secp256k1;
fn from_hex(hex_str: &str) -> Vec<u8> {
util::from_hex(hex_str.to_string()).unwrap()
}
#[test]
fn test_identifier_json_ser_deser() {
let hex = "942b6c0bd43bdcb24f3edfe7fadbc77054ecc4f2";
let identifier = Identifier::from_hex(hex).unwrap();
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct HasAnIdentifier {
identifier: Identifier,
}
let has_an_identifier = HasAnIdentifier { identifier };
let json = serde_json::to_string(&has_an_identifier).unwrap();
assert_eq!(json, "{\"identifier\":\"942b6c0bd43bdcb24f3e\"}");
let deserialized: HasAnIdentifier = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized, has_an_identifier);
}
#[test]
fn extkey_from_seed() {
// TODO More test vectors
let s = Secp256k1::new();
let seed = from_hex("000102030405060708090a0b0c0d0e0f");
let extk = ExtendedKey::from_seed(&s, &seed.as_slice()).unwrap();
let sec = from_hex("2878a92133b0a7c2fbfb0bd4520ed2e55ea3fa2913200f05c30077d30b193480");
let secret_key = SecretKey::from_slice(&s, sec.as_slice()).unwrap();
let chain_code =
from_hex("3ad40dd836c5ce25dfcbdee5044d92cf6b65bd5475717fa7a56dd4a032cca7c0");
let identifier = from_hex("6f7c1a053ca54592e783");
let n_child = 0;
assert_eq!(extk.key, secret_key);
assert_eq!(extk.key_id, Identifier::from_bytes(identifier.as_slice()));
assert_eq!(
extk.root_key_id,
Identifier::from_bytes(identifier.as_slice())
);
assert_eq!(extk.chain_code, chain_code.as_slice());
assert_eq!(extk.n_child, n_child);
}
#[test]
fn extkey_derivation() {
let s = Secp256k1::new();
let seed = from_hex("000102030405060708090a0b0c0d0e0f");
let extk = ExtendedKey::from_seed(&s, &seed.as_slice()).unwrap();
let derived = extk.derive(&s, 0).unwrap();
let sec = from_hex("55f1a2b67ec58933bf954fdc721327afe486e8989af923c3ae298e45a84ef597");
let secret_key = SecretKey::from_slice(&s, sec.as_slice()).unwrap();
let root_key_id = from_hex("6f7c1a053ca54592e783");
let identifier = from_hex("8fa188b56cefe66be154");
let n_child = 0;
assert_eq!(derived.key, secret_key);
assert_eq!(
derived.key_id,
Identifier::from_bytes(identifier.as_slice())
);
assert_eq!(
derived.root_key_id,
Identifier::from_bytes(root_key_id.as_slice())
);
assert_eq!(derived.n_child, n_child);
}
}

View file

@ -88,30 +88,30 @@ pub trait BIP32Hasher {
}
/// Implementation of the above that uses the standard BIP32 Hash algorithms
pub struct BIP32ReferenceHasher {
pub struct BIP32GrinHasher {
hmac_sha512: Hmac<Sha512>,
}
impl BIP32ReferenceHasher {
impl BIP32GrinHasher {
/// New empty hasher
pub fn new() -> BIP32ReferenceHasher {
BIP32ReferenceHasher {
pub fn new() -> BIP32GrinHasher {
BIP32GrinHasher {
hmac_sha512: HmacSha512::new(GenericArray::from_slice(&[0u8; 128])),
}
}
}
impl BIP32Hasher for BIP32ReferenceHasher {
impl BIP32Hasher for BIP32GrinHasher {
fn network_priv() -> [u8; 4] {
// bitcoin network (xprv) (for test vectors)
// xprv
[0x04, 0x88, 0xAD, 0xE4]
}
fn network_pub() -> [u8; 4] {
// bitcoin network (xpub) (for test vectors)
// xpub
[0x04, 0x88, 0xB2, 0x1E]
}
fn master_seed() -> [u8; 12] {
b"Bitcoin seed".to_owned()
b"IamVoldemort".to_owned()
}
fn init_sha512(&mut self, seed: &[u8]) {
self.hmac_sha512 = HmacSha512::new_varkey(seed).expect("HMAC can take key of any size");;
@ -175,7 +175,7 @@ pub struct ExtendedPubKey {
}
/// A child number for a derived key
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub enum ChildNumber {
/// Non-hardened key
Normal {
@ -409,8 +409,7 @@ impl ExtendedPrivKey {
hasher.append_sha512(&be_n);
let result = hasher.result_sha512();
let mut sk = SecretKey::from_slice(secp, &result[..32]).map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key)
.map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key).map_err(Error::Ecdsa)?;
Ok(ExtendedPrivKey {
network: self.network,
@ -653,11 +652,66 @@ mod tests {
use util::from_hex;
use util::secp::Secp256k1;
use super::ChildNumber::{Hardened, Normal};
use super::Error;
use super::{ChildNumber, ExtendedPrivKey, ExtendedPubKey};
use super::*;
use super::BIP32ReferenceHasher;
use digest::generic_array::GenericArray;
use digest::Digest;
use hmac::{Hmac, Mac};
use ripemd160::Ripemd160;
use sha2::{Sha256, Sha512};
/// Implementation of the above that uses the standard BIP32 Hash algorithms
pub struct BIP32ReferenceHasher {
hmac_sha512: Hmac<Sha512>,
}
impl BIP32ReferenceHasher {
/// New empty hasher
pub fn new() -> BIP32ReferenceHasher {
BIP32ReferenceHasher {
hmac_sha512: HmacSha512::new(GenericArray::from_slice(&[0u8; 128])),
}
}
}
impl BIP32Hasher for BIP32ReferenceHasher {
fn network_priv() -> [u8; 4] {
// bitcoin network (xprv) (for test vectors)
[0x04, 0x88, 0xAD, 0xE4]
}
fn network_pub() -> [u8; 4] {
// bitcoin network (xpub) (for test vectors)
[0x04, 0x88, 0xB2, 0x1E]
}
fn master_seed() -> [u8; 12] {
b"Bitcoin seed".to_owned()
}
fn init_sha512(&mut self, seed: &[u8]) {
self.hmac_sha512 = HmacSha512::new_varkey(seed).expect("HMAC can take key of any size");;
}
fn append_sha512(&mut self, value: &[u8]) {
self.hmac_sha512.input(value);
}
fn result_sha512(&mut self) -> [u8; 64] {
let mut result = [0; 64];
result.copy_from_slice(self.hmac_sha512.result().code().as_slice());
result
}
fn sha_256(&self, input: &[u8]) -> [u8; 32] {
let mut sha2_res = [0; 32];
let mut sha2 = Sha256::new();
sha2.input(input);
sha2_res.copy_from_slice(sha2.result().as_slice());
sha2_res
}
fn ripemd_160(&self, input: &[u8]) -> [u8; 20] {
let mut ripemd_res = [0; 20];
let mut ripemd = Ripemd160::new();
ripemd.input(input);
ripemd_res.copy_from_slice(ripemd.result().as_slice());
ripemd_res
}
}
fn test_path(
secp: &Secp256k1,
@ -694,12 +748,12 @@ mod tests {
for &num in path.iter() {
sk = sk.ckd_priv(secp, &mut h, num).unwrap();
match num {
Normal { .. } => {
ChildNumber::Normal { .. } => {
let pk2 = pk.ckd_pub(secp, &mut h, num).unwrap();
pk = ExtendedPubKey::from_private::<BIP32ReferenceHasher>(secp, &sk);
assert_eq!(pk, pk2);
}
Hardened { .. } => {
ChildNumber::Hardened { .. } => {
assert_eq!(
pk.ckd_pub(secp, &mut h, num),
Err(Error::CannotDeriveFromHardenedKey)

View file

@ -16,14 +16,11 @@
/// scheme.
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use blake2;
use extkey;
use types::{BlindSum, BlindingFactor, Error, Identifier, Keychain};
use util::logger::LOGGER;
use extkey_bip32::{BIP32GrinHasher, ExtendedPrivKey};
use types::{BlindSum, BlindingFactor, Error, ExtKeychainPath, Identifier, Keychain};
use util::secp::key::SecretKey;
use util::secp::pedersen::Commitment;
use util::secp::{self, Message, Secp256k1, Signature};
@ -31,20 +28,17 @@ use util::secp::{self, Message, Secp256k1, Signature};
#[derive(Clone, Debug)]
pub struct ExtKeychain {
secp: Secp256k1,
extkey: extkey::ExtendedKey,
key_overrides: HashMap<Identifier, SecretKey>,
key_derivation_cache: Arc<RwLock<HashMap<Identifier, u32>>>,
master: ExtendedPrivKey,
}
impl Keychain for ExtKeychain {
fn from_seed(seed: &[u8]) -> Result<ExtKeychain, Error> {
let mut h = BIP32GrinHasher::new();
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
let extkey = extkey::ExtendedKey::from_seed(&secp, seed)?;
let master = ExtendedPrivKey::new_master(&secp, &mut h, seed)?;
let keychain = ExtKeychain {
secp: secp,
extkey: extkey,
key_overrides: HashMap::new(),
key_derivation_cache: Arc::new(RwLock::new(HashMap::new())),
master: master,
};
Ok(keychain)
}
@ -56,39 +50,27 @@ impl Keychain for ExtKeychain {
ExtKeychain::from_seed(seed.as_bytes())
}
fn root_key_id(&self) -> Identifier {
self.extkey.root_key_id.clone()
fn root_key_id() -> Identifier {
ExtKeychainPath::new(0, 0, 0, 0, 0).to_identifier()
}
fn derive_key_id(&self, derivation: u32) -> Result<Identifier, Error> {
let child_key = self.extkey.derive(&self.secp, derivation)?;
Ok(child_key.key_id)
fn derive_key_id(depth: u8, d1: u32, d2: u32, d3: u32, d4: u32) -> Identifier {
ExtKeychainPath::new(depth, d1, d2, d3, d4).to_identifier()
}
fn derived_key(&self, key_id: &Identifier) -> Result<SecretKey, Error> {
// first check our overrides and just return the key if we have one in there
if let Some(key) = self.key_overrides.get(key_id) {
trace!(
LOGGER,
"... Derived Key (using override) key_id: {}",
key_id
);
return Ok(*key);
fn derive_key(&self, id: &Identifier) -> Result<ExtendedPrivKey, Error> {
let mut h = BIP32GrinHasher::new();
let p = id.to_path();
let mut sk = self.master;
for i in 0..p.depth {
sk = sk.ckd_priv(&self.secp, &mut h, p.path[i as usize])?;
}
let child_key = self.derived_child_key(key_id)?;
Ok(child_key.key)
Ok(sk)
}
fn commit(&self, amount: u64, key_id: &Identifier) -> Result<Commitment, Error> {
let skey = self.derived_key(key_id)?;
let commit = self.secp.commit(amount, skey)?;
Ok(commit)
}
fn commit_with_key_index(&self, amount: u64, derivation: u32) -> Result<Commitment, Error> {
let child_key = self.derived_key_from_index(derivation)?;
let commit = self.secp.commit(amount, child_key.key)?;
fn commit(&self, amount: u64, id: &Identifier) -> Result<Commitment, Error> {
let key = self.derive_key(id)?;
let commit = self.secp.commit(amount, key.secret_key)?;
Ok(commit)
}
@ -96,13 +78,27 @@ impl Keychain for ExtKeychain {
let mut pos_keys: Vec<SecretKey> = blind_sum
.positive_key_ids
.iter()
.filter_map(|k| self.derived_key(&k).ok())
.filter_map(|k| {
let res = self.derive_key(&Identifier::from_path(&k));
if let Ok(s) = res {
Some(s.secret_key)
} else {
None
}
})
.collect();
let mut neg_keys: Vec<SecretKey> = blind_sum
.negative_key_ids
.iter()
.filter_map(|k| self.derived_key(&k).ok())
.filter_map(|k| {
let res = self.derive_key(&Identifier::from_path(&k));
if let Ok(s) = res {
Some(s.secret_key)
} else {
None
}
})
.collect();
pos_keys.extend(
@ -125,9 +121,9 @@ impl Keychain for ExtKeychain {
Ok(BlindingFactor::from_secret_key(sum))
}
fn sign(&self, msg: &Message, key_id: &Identifier) -> Result<Signature, Error> {
let skey = self.derived_key(key_id)?;
let sig = self.secp.sign(msg, &skey)?;
fn sign(&self, msg: &Message, id: &Identifier) -> Result<Signature, Error> {
let skey = self.derive_key(id)?;
let sig = self.secp.sign(msg, &skey.secret_key)?;
Ok(sig)
}
@ -146,82 +142,10 @@ impl Keychain for ExtKeychain {
}
}
impl ExtKeychain {
// For tests and burn only, associate a key identifier with a known secret key.
pub fn burn_enabled(keychain: &ExtKeychain, burn_key_id: &Identifier) -> ExtKeychain {
let mut key_overrides = HashMap::new();
key_overrides.insert(
burn_key_id.clone(),
SecretKey::from_slice(&keychain.secp, &[1; 32]).unwrap(),
);
ExtKeychain {
key_overrides: key_overrides,
..keychain.clone()
}
}
fn derived_child_key(&self, key_id: &Identifier) -> Result<extkey::ChildKey, Error> {
trace!(LOGGER, "Derived Key by key_id: {}", key_id);
// then check the derivation cache to see if we have previously derived this key
// if so use the derivation from the cache to derive the key
{
let cache = self.key_derivation_cache.read().unwrap();
if let Some(derivation) = cache.get(key_id) {
trace!(
LOGGER,
"... Derived Key (cache hit) key_id: {}, derivation: {}",
key_id,
derivation
);
return Ok(self.derived_key_from_index(*derivation)?);
}
}
// otherwise iterate over a large number of derivations looking for our key
// cache the resulting derivations by key_id for faster lookup later
// TODO - remove hard limit (within reason)
// TODO - do we benefit here if we track our max known n_child?
{
let mut cache = self.key_derivation_cache.write().unwrap();
for i in 1..100_000 {
let child_key = self.extkey.derive(&self.secp, i)?;
// let child_key_id = extkey.identifier(&self.secp)?;
if !cache.contains_key(&child_key.key_id) {
trace!(
LOGGER,
"... Derived Key (cache miss) key_id: {}, derivation: {}",
child_key.key_id,
child_key.n_child,
);
cache.insert(child_key.key_id.clone(), child_key.n_child);
}
if child_key.key_id == *key_id {
return Ok(child_key);
}
}
}
Err(Error::KeyDerivation(format!(
"failed to derive child_key for {:?}",
key_id
)))
}
// if we know the derivation index we can just straight to deriving the key
fn derived_key_from_index(&self, derivation: u32) -> Result<extkey::ChildKey, Error> {
trace!(LOGGER, "Derived Key (fast) by derivation: {}", derivation);
let child_key = self.extkey.derive(&self.secp, derivation)?;
return Ok(child_key);
}
}
#[cfg(test)]
mod test {
use keychain::ExtKeychain;
use types::{BlindSum, BlindingFactor, Keychain};
use types::{BlindSum, BlindingFactor, ExtKeychainPath, Keychain};
use util::secp;
use util::secp::key::SecretKey;
@ -230,8 +154,8 @@ mod test {
let keychain = ExtKeychain::from_random_seed().unwrap();
let secp = keychain.secp();
// use the keychain to derive a "key_id" based on the underlying seed
let key_id = keychain.derive_key_id(1).unwrap();
let path = ExtKeychainPath::new(1, 1, 0, 0, 0);
let key_id = path.to_identifier();
let msg_bytes = [0; 32];
let msg = secp::Message::from_slice(&msg_bytes[..]).unwrap();
@ -296,7 +220,8 @@ mod test {
&BlindSum::new()
.add_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
).unwrap(),
)
.unwrap(),
BlindingFactor::from_secret_key(skey3),
);
}

View file

@ -22,20 +22,21 @@ extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
#[macro_use]
extern crate slog;
extern crate digest;
extern crate hmac;
extern crate ripemd160;
extern crate serde_json;
extern crate sha2;
extern crate slog;
extern crate uuid;
mod base58;
pub mod extkey;
pub mod extkey_bip32;
mod types;
pub mod keychain;
pub use extkey_bip32::ChildNumber;
pub use keychain::ExtKeychain;
pub use types::{BlindSum, BlindingFactor, Error, Identifier, Keychain, IDENTIFIER_SIZE};
pub use types::{
BlindSum, BlindingFactor, Error, ExtKeychainPath, Identifier, Keychain, IDENTIFIER_SIZE,
};

View file

@ -14,6 +14,7 @@
use rand::thread_rng;
use std::cmp::min;
use std::io::Cursor;
use std::ops::Add;
/// Keychain trait and its main supporting types. The Identifier is a
/// semi-opaque structure (just bytes) to track keys within the Keychain.
@ -22,7 +23,8 @@ use std::ops::Add;
use std::{error, fmt};
use blake2::blake2b::blake2b;
use serde::{de, ser};
use extkey_bip32::{self, ChildNumber, ExtendedPrivKey};
use serde::{de, ser}; //TODO: Convert errors to use ErrorKind
use util;
use util::secp::constants::SECRET_KEY_SIZE;
@ -31,13 +33,15 @@ use util::secp::pedersen::Commitment;
use util::secp::{self, Message, Secp256k1, Signature};
use util::static_secp_instance;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
// Size of an identifier in bytes
pub const IDENTIFIER_SIZE: usize = 10;
pub const IDENTIFIER_SIZE: usize = 17;
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum Error {
Secp(secp::Error),
KeyDerivation(String),
KeyDerivation(extkey_bip32::Error),
Transaction(String),
RangeProof(String),
}
@ -48,6 +52,12 @@ impl From<secp::Error> for Error {
}
}
impl From<extkey_bip32::Error> for Error {
fn from(e: extkey_bip32::Error) -> Error {
Error::KeyDerivation(e)
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
@ -108,6 +118,42 @@ impl Identifier {
Identifier::from_bytes(&[0; IDENTIFIER_SIZE])
}
pub fn from_path(path: &ExtKeychainPath) -> Identifier {
path.to_identifier()
}
pub fn to_path(&self) -> ExtKeychainPath {
ExtKeychainPath::from_identifier(&self)
}
/// output the path itself, for insertion into bulletproof
/// recovery processes can grind through possiblities to find the
/// correct length if required
pub fn serialize_path(&self) -> [u8; IDENTIFIER_SIZE - 1] {
let mut retval = [0u8; IDENTIFIER_SIZE - 1];
retval.copy_from_slice(&self.0[1..IDENTIFIER_SIZE]);
retval
}
/// restore from a serialized path
pub fn from_serialized_path(len: u8, p: &[u8]) -> Identifier {
let mut id = [0; IDENTIFIER_SIZE];
id[0] = len;
for i in 1..IDENTIFIER_SIZE {
id[i] = p[i - 1];
}
Identifier(id)
}
/// Return the parent path
pub fn parent_path(&self) -> Identifier {
let mut p = ExtKeychainPath::from_identifier(&self);
if p.depth > 0 {
p.path[p.depth as usize - 1] = ChildNumber::from(0);
p.depth = p.depth - 1;
}
Identifier::from_path(&p)
}
pub fn from_bytes(bytes: &[u8]) -> Identifier {
let mut identifier = [0; IDENTIFIER_SIZE];
for i in 0..min(IDENTIFIER_SIZE, bytes.len()) {
@ -142,6 +188,15 @@ impl Identifier {
pub fn to_hex(&self) -> String {
util::to_hex(self.0.to_vec())
}
pub fn to_bip_32_string(&self) -> String {
let p = ExtKeychainPath::from_identifier(&self);
let mut retval = String::from("m");
for i in 0..p.depth {
retval.push_str(&format!("/{}", <u32>::from(p.path[i as usize])));
}
retval
}
}
impl AsRef<[u8]> for Identifier {
@ -272,8 +327,8 @@ pub struct SplitBlindingFactor {
/// factor as well as the "sign" with which they should be combined.
#[derive(Clone, Debug, PartialEq)]
pub struct BlindSum {
pub positive_key_ids: Vec<Identifier>,
pub negative_key_ids: Vec<Identifier>,
pub positive_key_ids: Vec<ExtKeychainPath>,
pub negative_key_ids: Vec<ExtKeychainPath>,
pub positive_blinding_factors: Vec<BlindingFactor>,
pub negative_blinding_factors: Vec<BlindingFactor>,
}
@ -289,13 +344,13 @@ impl BlindSum {
}
}
pub fn add_key_id(mut self, key_id: Identifier) -> BlindSum {
self.positive_key_ids.push(key_id);
pub fn add_key_id(mut self, path: ExtKeychainPath) -> BlindSum {
self.positive_key_ids.push(path);
self
}
pub fn sub_key_id(mut self, key_id: Identifier) -> BlindSum {
self.negative_key_ids.push(key_id);
pub fn sub_key_id(mut self, path: ExtKeychainPath) -> BlindSum {
self.negative_key_ids.push(path);
self
}
@ -312,16 +367,78 @@ impl BlindSum {
}
}
/// Encapsulates a max 4-level deep BIP32 path, which is the
/// most we can currently fit into a rangeproof message
#[derive(Copy, Clone, PartialEq, Eq, Debug, Deserialize)]
pub struct ExtKeychainPath {
pub depth: u8,
pub path: [extkey_bip32::ChildNumber; 4],
}
impl ExtKeychainPath {
/// Return a new chain path with given derivation and depth
pub fn new(depth: u8, d0: u32, d1: u32, d2: u32, d3: u32) -> ExtKeychainPath {
ExtKeychainPath {
depth: depth,
path: [
ChildNumber::from(d0),
ChildNumber::from(d1),
ChildNumber::from(d2),
ChildNumber::from(d3),
],
}
}
/// from an Indentifier [manual deserialization]
pub fn from_identifier(id: &Identifier) -> ExtKeychainPath {
let mut rdr = Cursor::new(id.0.to_vec());
ExtKeychainPath {
depth: rdr.read_u8().unwrap(),
path: [
ChildNumber::from(rdr.read_u32::<BigEndian>().unwrap()),
ChildNumber::from(rdr.read_u32::<BigEndian>().unwrap()),
ChildNumber::from(rdr.read_u32::<BigEndian>().unwrap()),
ChildNumber::from(rdr.read_u32::<BigEndian>().unwrap()),
],
}
}
/// to an Identifier [manual serialization]
pub fn to_identifier(&self) -> Identifier {
let mut wtr = vec![];
wtr.write_u8(self.depth).unwrap();
wtr.write_u32::<BigEndian>(<u32>::from(self.path[0]))
.unwrap();
wtr.write_u32::<BigEndian>(<u32>::from(self.path[1]))
.unwrap();
wtr.write_u32::<BigEndian>(<u32>::from(self.path[2]))
.unwrap();
wtr.write_u32::<BigEndian>(<u32>::from(self.path[3]))
.unwrap();
let mut retval = [0u8; IDENTIFIER_SIZE];
retval.copy_from_slice(&wtr[0..IDENTIFIER_SIZE]);
Identifier(retval)
}
/// Last part of the path (for last n_child)
pub fn last_path_index(&self) -> u32 {
if self.depth == 0 {
0
} else {
<u32>::from(self.path[self.depth as usize - 1])
}
}
}
pub trait Keychain: Sync + Send + Clone {
fn from_seed(seed: &[u8]) -> Result<Self, Error>;
fn from_random_seed() -> Result<Self, Error>;
fn root_key_id(&self) -> Identifier;
fn derive_key_id(&self, derivation: u32) -> Result<Identifier, Error>;
fn derived_key(&self, key_id: &Identifier) -> Result<SecretKey, Error>;
fn commit(&self, amount: u64, key_id: &Identifier) -> Result<Commitment, Error>;
fn commit_with_key_index(&self, amount: u64, derivation: u32) -> Result<Commitment, Error>;
fn root_key_id() -> Identifier;
fn derive_key_id(depth: u8, d1: u32, d2: u32, d3: u32, d4: u32) -> Identifier;
fn derive_key(&self, id: &Identifier) -> Result<ExtendedPrivKey, Error>;
fn commit(&self, amount: u64, id: &Identifier) -> Result<Commitment, Error>;
fn blind_sum(&self, blind_sum: &BlindSum) -> Result<BlindingFactor, Error>;
fn sign(&self, msg: &Message, key_id: &Identifier) -> Result<Signature, Error>;
fn sign(&self, msg: &Message, id: &Identifier) -> Result<Signature, Error>;
fn sign_with_blinding(&self, &Message, &BlindingFactor) -> Result<Signature, Error>;
fn secp(&self) -> &Secp256k1;
}
@ -330,7 +447,7 @@ pub trait Keychain: Sync + Send + Clone {
mod test {
use rand::thread_rng;
use types::BlindingFactor;
use types::{BlindingFactor, ExtKeychainPath, Identifier};
use util::secp::key::{SecretKey, ZERO_KEY};
use util::secp::Secp256k1;
@ -361,4 +478,34 @@ mod test {
assert_eq!(skey_in, skey_out);
}
// Check path identifiers
#[test]
fn path_identifier() {
let path = ExtKeychainPath::new(4, 1, 2, 3, 4);
let id = Identifier::from_path(&path);
let ret_path = id.to_path();
assert_eq!(path, ret_path);
let path = ExtKeychainPath::new(
1,
<u32>::max_value(),
<u32>::max_value(),
3,
<u32>::max_value(),
);
let id = Identifier::from_path(&path);
let ret_path = id.to_path();
assert_eq!(path, ret_path);
println!("id: {:?}", id);
println!("ret_path {:?}", ret_path);
let path = ExtKeychainPath::new(3, 0, 0, 10, 0);
let id = Identifier::from_path(&path);
let parent_id = id.parent_path();
let expected_path = ExtKeychainPath::new(2, 0, 0, 0, 0);
let expected_id = Identifier::from_path(&expected_path);
assert_eq!(expected_id, parent_id);
}
}

View file

@ -118,10 +118,8 @@ impl Pool {
// flatten buckets using aggregate (with cut-through)
let mut flat_txs: Vec<Transaction> = tx_buckets
.into_iter()
.filter_map(|mut bucket| {
bucket.truncate(MAX_TX_CHAIN);
transaction::aggregate(bucket).ok()
}).filter(|x| x.validate(self.verifier_cache.clone()).is_ok())
.filter_map(|bucket| transaction::aggregate(bucket).ok())
.filter(|x| x.validate(self.verifier_cache.clone()).is_ok())
.collect();
// sort by fees over weight, multiplying by 1000 to keep some precision

View file

@ -51,13 +51,12 @@ fn test_transaction_pool_block_building() {
// so we have a non-empty UTXO set.
let add_block = |prev_header: BlockHeader, txs: Vec<Transaction>, chain: &mut ChainAdapter| {
let height = prev_header.height + 1;
let key_id = keychain.derive_key_id(height as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let fee = txs.iter().map(|x| x.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fee, height).unwrap();
let block = Block::new(&prev_header, txs, Difficulty::one(), reward).unwrap();
chain.update_db_for_block(&block);
block.header
};
@ -113,7 +112,7 @@ fn test_transaction_pool_block_building() {
assert_eq!(txs.len(), 3);
let block = {
let key_id = keychain.derive_key_id(2).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
Block::new(&header, txs, Difficulty::one(), reward)

View file

@ -50,7 +50,7 @@ fn test_transaction_pool_block_reconciliation() {
let header = {
let height = 1;
let key_id = keychain.derive_key_id(height as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
let block = Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap();
@ -64,7 +64,7 @@ fn test_transaction_pool_block_reconciliation() {
let initial_tx = test_transaction_spending_coinbase(&keychain, &header, vec![10, 20, 30, 40]);
let block = {
let key_id = keychain.derive_key_id(2).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let fees = initial_tx.fee();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
let block = Block::new(&header, vec![initial_tx], Difficulty::one(), reward).unwrap();
@ -154,7 +154,7 @@ fn test_transaction_pool_block_reconciliation() {
// Now apply this block.
let block = {
let key_id = keychain.derive_key_id(3).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
let fees = block_txs.iter().map(|tx| tx.fee()).sum();
let reward = libtx::reward::output(&keychain, &key_id, fees, 0).unwrap();
let block = Block::new(&header, block_txs, Difficulty::one(), reward).unwrap();

View file

@ -38,7 +38,7 @@ use chain::store::ChainStore;
use chain::types::Tip;
use pool::*;
use keychain::Keychain;
use keychain::{ExtKeychain, Keychain};
use wallet::libtx;
use pool::types::*;
@ -192,12 +192,12 @@ where
// single input spending a single coinbase (deterministic key_id aka height)
{
let key_id = keychain.derive_key_id(header.height as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, header.height as u32, 0, 0, 0);
tx_elements.push(libtx::build::coinbase_input(coinbase_reward, key_id));
}
for output_value in output_values {
let key_id = keychain.derive_key_id(output_value as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, output_value as u32, 0, 0, 0);
tx_elements.push(libtx::build::output(output_value, key_id));
}
@ -223,12 +223,12 @@ where
let mut tx_elements = Vec::new();
for input_value in input_values {
let key_id = keychain.derive_key_id(input_value as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, input_value as u32, 0, 0, 0);
tx_elements.push(libtx::build::input(input_value, key_id));
}
for output_value in output_values {
let key_id = keychain.derive_key_id(output_value as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, output_value as u32, 0, 0, 0);
tx_elements.push(libtx::build::output(output_value, key_id));
}
tx_elements.push(libtx::build::with_fee(fees as u64));

View file

@ -50,7 +50,7 @@ fn test_the_transaction_pool() {
let header = {
let height = 1;
let key_id = keychain.derive_key_id(height as u32).unwrap();
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let reward = libtx::reward::output(&keychain, &key_id, 0, height).unwrap();
let mut block =
Block::new(&BlockHeader::default(), vec![], Difficulty::one(), reward).unwrap();

View file

@ -165,10 +165,11 @@ impl ServerConfig {
// check [server.p2p_config.capabilities] with 'archive_mode' in [server]
if let Some(archive) = self.archive_mode {
// note: slog not available before config loaded, only print here.
if archive != self
.p2p_config
.capabilities
.contains(p2p::Capabilities::FULL_HIST)
if archive
!= self
.p2p_config
.capabilities
.contains(p2p::Capabilities::FULL_HIST)
{
// if conflict, 'archive_mode' win
self.p2p_config

View file

@ -184,7 +184,7 @@ fn build_block(
fn burn_reward(block_fees: BlockFees) -> Result<(core::Output, core::TxKernel, BlockFees), Error> {
warn!(LOGGER, "Burning block fees: {:?}", block_fees);
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let (out, kernel) =
wallet::libtx::reward::output(&keychain, &key_id, block_fees.fees, block_fees.height)
.unwrap();

View file

@ -28,7 +28,8 @@ use std::ops::Deref;
use std::sync::{Arc, Mutex};
use std::{fs, thread, time};
use wallet::{FileWallet, HTTPWalletClient, WalletConfig};
use framework::keychain::Keychain;
use wallet::{HTTPWalletClient, LMDBBackend, WalletConfig};
/// Just removes all results from previous runs
pub fn clean_all_output(test_name_dir: &str) {
@ -269,8 +270,8 @@ impl LocalServerContainer {
//panic!("Error initializing wallet seed: {}", e);
}
let wallet: FileWallet<HTTPWalletClient, keychain::ExtKeychain> =
FileWallet::new(self.wallet_config.clone(), "", client).unwrap_or_else(|e| {
let wallet: LMDBBackend<HTTPWalletClient, keychain::ExtKeychain> =
LMDBBackend::new(self.wallet_config.clone(), "", client).unwrap_or_else(|e| {
panic!(
"Error creating wallet: {:?} Config: {:?}",
e, self.wallet_config
@ -307,11 +308,12 @@ impl LocalServerContainer {
.derive_keychain("")
.expect("Failed to derive keychain from seed file and passphrase.");
let client = HTTPWalletClient::new(&config.check_node_api_http_addr, None);
let mut wallet = FileWallet::new(config.clone(), "", client)
let mut wallet = LMDBBackend::new(config.clone(), "", client)
.unwrap_or_else(|e| panic!("Error creating wallet: {:?} Config: {:?}", e, config));
wallet.keychain = Some(keychain);
let _ = wallet::libwallet::internal::updater::refresh_outputs(&mut wallet);
wallet::libwallet::internal::updater::retrieve_info(&mut wallet).unwrap()
let parent_id = keychain::ExtKeychain::derive_key_id(2, 0, 0, 0, 0);
let _ = wallet::libwallet::internal::updater::refresh_outputs(&mut wallet, &parent_id);
wallet::libwallet::internal::updater::retrieve_info(&mut wallet, &parent_id).unwrap()
}
pub fn send_amount_to(
@ -337,7 +339,7 @@ impl LocalServerContainer {
let max_outputs = 500;
let change_outputs = 1;
let mut wallet = FileWallet::new(config.clone(), "", client)
let mut wallet = LMDBBackend::new(config.clone(), "", client)
.unwrap_or_else(|e| panic!("Error creating wallet: {:?} Config: {:?}", e, config));
wallet.keychain = Some(keychain);
let _ =

View file

@ -32,13 +32,13 @@ use core::global::{self, ChainTypes};
use wallet::controller;
use wallet::libtx::slate::Slate;
use wallet::libwallet::types::{WalletBackend, WalletClient, WalletInst};
use wallet::libwallet::types::{WalletBackend, WalletInst};
use wallet::lmdb_wallet::LMDBBackend;
use wallet::HTTPWalletClient;
use wallet::WalletConfig;
use framework::{
config, stop_all_servers, stratum_config, LocalServerContainerConfig, LocalServerContainerPool,
config, stop_all_servers, LocalServerContainerConfig, LocalServerContainerPool,
LocalServerContainerPoolConfig,
};
@ -326,7 +326,7 @@ fn simulate_fast_sync() {
let s2 = servers::Server::new(conf).unwrap();
while s2.header_head().height < 1 {
s2.ping_peers();
let _ = s2.ping_peers();
thread::sleep(time::Duration::from_millis(1_000));
}
s1.stop_test_miner();
@ -351,7 +351,8 @@ fn simulate_fast_sync() {
thread::sleep(time::Duration::from_millis(1_000));
}
// #[test]
#[ignore]
#[test]
fn simulate_fast_sync_double() {
util::init_test_logger();
@ -455,7 +456,7 @@ fn replicate_tx_fluff_failure() {
s2_config.dandelion_config.embargo_secs = Some(10);
s2_config.dandelion_config.patience_secs = Some(1);
s2_config.dandelion_config.relay_secs = Some(1);
let s2 = servers::Server::new(s2_config.clone()).unwrap();
let _s2 = servers::Server::new(s2_config.clone()).unwrap();
let dl_nodes = 5;

View file

@ -19,8 +19,8 @@ use std::path::PathBuf;
/// Wallet commands processing
use std::process::exit;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
use std::{process, thread};
use clap::ArgMatches;
@ -28,7 +28,9 @@ use api::TLSConfig;
use config::GlobalWalletConfig;
use core::{core, global};
use grin_wallet::{self, controller, display, libwallet};
use grin_wallet::{HTTPWalletClient, LMDBBackend, WalletConfig, WalletInst, WalletSeed};
use grin_wallet::{
HTTPWalletClient, LMDBBackend, WalletBackend, WalletConfig, WalletInst, WalletSeed,
};
use keychain;
use servers::start_webwallet_server;
use util::file::get_first_line;
@ -53,29 +55,23 @@ pub fn seed_exists(wallet_config: WalletConfig) -> bool {
pub fn instantiate_wallet(
wallet_config: WalletConfig,
passphrase: &str,
account: &str,
node_api_secret: Option<String>,
) -> Box<WalletInst<HTTPWalletClient, keychain::ExtKeychain>> {
if grin_wallet::needs_migrate(&wallet_config.data_file_dir) {
// Migrate wallet automatically
warn!(LOGGER, "Migrating legacy File-Based wallet to LMDB Format");
if let Err(e) = grin_wallet::migrate(&wallet_config.data_file_dir, passphrase) {
error!(LOGGER, "Error while trying to migrate wallet: {:?}", e);
error!(LOGGER, "Please ensure your file wallet files exist and are not corrupted, and that your password is correct");
panic!();
} else {
warn!(LOGGER, "Migration successful. Using LMDB Wallet backend");
}
warn!(LOGGER, "Please check the results of the migration process using `grin wallet info` and `grin wallet outputs`");
warn!(LOGGER, "If anything went wrong, you can try again by deleting the `db` directory and running a wallet command");
warn!(LOGGER, "If all is okay, you can move/backup/delete all files in the wallet directory EXCEPT FOR wallet.seed");
}
let client = HTTPWalletClient::new(&wallet_config.check_node_api_http_addr, node_api_secret);
let db_wallet = LMDBBackend::new(wallet_config.clone(), "", client).unwrap_or_else(|e| {
panic!(
"Error creating DB wallet: {} Config: {:?}",
e, wallet_config
);
});
let mut db_wallet =
LMDBBackend::new(wallet_config.clone(), passphrase, client).unwrap_or_else(|e| {
panic!(
"Error creating DB wallet: {} Config: {:?}",
e, wallet_config
);
});
db_wallet
.set_parent_key_id_by_name(account)
.unwrap_or_else(|e| {
println!("Error starting wallet: {}", e);
process::exit(0);
});
info!(LOGGER, "Using LMDB Backend for wallet");
Box::new(db_wallet)
}
@ -130,9 +126,19 @@ pub fn wallet_command(wallet_args: &ArgMatches, config: GlobalWalletConfig) {
let passphrase = wallet_args
.value_of("pass")
.expect("Failed to read passphrase.");
let account = wallet_args
.value_of("account")
.expect("Failed to read account.");
// Handle listener startup commands
{
let wallet = instantiate_wallet(wallet_config.clone(), passphrase, node_api_secret.clone());
let wallet = instantiate_wallet(
wallet_config.clone(),
passphrase,
account,
node_api_secret.clone(),
);
let api_secret = get_first_line(wallet_config.api_secret_path.clone());
let tls_conf = match wallet_config.tls_certificate_file.clone() {
@ -187,10 +193,40 @@ pub fn wallet_command(wallet_args: &ArgMatches, config: GlobalWalletConfig) {
let wallet = Arc::new(Mutex::new(instantiate_wallet(
wallet_config.clone(),
passphrase,
account,
node_api_secret,
)));
let res = controller::owner_single_use(wallet.clone(), |api| {
match wallet_args.subcommand() {
("account", Some(acct_args)) => {
let create = acct_args.value_of("create");
if create.is_none() {
let res = controller::owner_single_use(wallet, |api| {
let acct_mappings = api.accounts()?;
// give logging thread a moment to catch up
thread::sleep(Duration::from_millis(200));
display::accounts(acct_mappings, false);
Ok(())
});
if res.is_err() {
panic!("Error listing accounts: {}", res.unwrap_err());
}
} else {
let label = create.unwrap();
let res = controller::owner_single_use(wallet, |api| {
api.new_account_path(label)?;
thread::sleep(Duration::from_millis(200));
println!("Account: '{}' Created!", label);
Ok(())
});
if res.is_err() {
thread::sleep(Duration::from_millis(200));
println!("Error creating account '{}': {}", label, res.unwrap_err());
exit(1);
}
}
Ok(())
}
("send", Some(send_args)) => {
let amount = send_args
.value_of("amount")
@ -352,18 +388,19 @@ pub fn wallet_command(wallet_args: &ArgMatches, config: GlobalWalletConfig) {
e, wallet_config
)
});
display::info(&wallet_info, validated);
display::info(account, &wallet_info, validated);
Ok(())
}
("outputs", Some(_)) => {
let (height, _) = api.node_height()?;
let (validated, outputs) = api.retrieve_outputs(show_spent, true, None)?;
let _res = display::outputs(height, validated, outputs).unwrap_or_else(|e| {
panic!(
"Error getting wallet outputs: {:?} Config: {:?}",
e, wallet_config
)
});
let _res =
display::outputs(account, height, validated, outputs).unwrap_or_else(|e| {
panic!(
"Error getting wallet outputs: {:?} Config: {:?}",
e, wallet_config
)
});
Ok(())
}
("txs", Some(txs_args)) => {
@ -377,8 +414,8 @@ pub fn wallet_command(wallet_args: &ArgMatches, config: GlobalWalletConfig) {
let (height, _) = api.node_height()?;
let (validated, txs) = api.retrieve_txs(true, tx_id)?;
let include_status = !tx_id.is_some();
let _res =
display::txs(height, validated, txs, include_status).unwrap_or_else(|e| {
let _res = display::txs(account, height, validated, txs, include_status)
.unwrap_or_else(|e| {
panic!(
"Error getting wallet outputs: {} Config: {:?}",
e, wallet_config
@ -388,12 +425,13 @@ pub fn wallet_command(wallet_args: &ArgMatches, config: GlobalWalletConfig) {
// inputs/outputs
if tx_id.is_some() {
let (_, outputs) = api.retrieve_outputs(true, false, tx_id)?;
let _res = display::outputs(height, validated, outputs).unwrap_or_else(|e| {
panic!(
"Error getting wallet outputs: {} Config: {:?}",
e, wallet_config
)
});
let _res = display::outputs(account, height, validated, outputs)
.unwrap_or_else(|e| {
panic!(
"Error getting wallet outputs: {} Config: {:?}",
e, wallet_config
)
});
};
Ok(())
}

View file

@ -97,7 +97,7 @@ fn main() {
.help("Port to start the P2P server on")
.takes_value(true))
.arg(Arg::with_name("api_port")
.short("a")
.short("api")
.long("api_port")
.help("Port on which to start the api server (e.g. transaction pool api)")
.takes_value(true))
@ -154,6 +154,12 @@ fn main() {
.help("Wallet passphrase used to generate the private key seed")
.takes_value(true)
.default_value(""))
.arg(Arg::with_name("account")
.short("a")
.long("account")
.help("Wallet account to use for this operation")
.takes_value(true)
.default_value("default"))
.arg(Arg::with_name("data_dir")
.short("dd")
.long("data_dir")
@ -171,11 +177,19 @@ fn main() {
.help("Show spent outputs on wallet output command")
.takes_value(false))
.arg(Arg::with_name("api_server_address")
.short("a")
.short("r")
.long("api_server_address")
.help("Api address of running node on which to check inputs and post transactions")
.takes_value(true))
.subcommand(SubCommand::with_name("account")
.about("List wallet accounts or create a new account")
.arg(Arg::with_name("create")
.short("c")
.long("create")
.help("Name of new wallet account")
.takes_value(true)))
.subcommand(SubCommand::with_name("listen")
.about("Runs the wallet in listening mode waiting for transactions.")
.arg(Arg::with_name("port")

View file

@ -63,11 +63,13 @@ pub fn create() -> Box<View> {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
s.select_down(1)(c);
Some(EventResult::Consumed(None));
}).on_pre_event('k', move |c| {
})
.on_pre_event('k', move |c| {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
s.select_up(1)(c);
Some(EventResult::Consumed(None));
}).on_pre_event(Key::Tab, move |c| {
})
.on_pre_event(Key::Tab, move |c| {
let mut s: ViewRef<SelectView<&str>> = c.find_id(MAIN_MENU).unwrap();
if s.selected_id().unwrap() == s.len() - 1 {
s.set_selection(0)(c);

View file

@ -170,17 +170,23 @@ impl TUIStatusListener for TUIMiningView {
let table_view = TableView::<WorkerStats, StratumWorkerColumn>::new()
.column(StratumWorkerColumn::Id, "Worker ID", |c| {
c.width_percent(10)
}).column(StratumWorkerColumn::IsConnected, "Connected", |c| {
})
.column(StratumWorkerColumn::IsConnected, "Connected", |c| {
c.width_percent(10)
}).column(StratumWorkerColumn::LastSeen, "Last Seen", |c| {
})
.column(StratumWorkerColumn::LastSeen, "Last Seen", |c| {
c.width_percent(20)
}).column(StratumWorkerColumn::PowDifficulty, "Pow Difficulty", |c| {
})
.column(StratumWorkerColumn::PowDifficulty, "Pow Difficulty", |c| {
c.width_percent(10)
}).column(StratumWorkerColumn::NumAccepted, "Num Accepted", |c| {
})
.column(StratumWorkerColumn::NumAccepted, "Num Accepted", |c| {
c.width_percent(10)
}).column(StratumWorkerColumn::NumRejected, "Num Rejected", |c| {
})
.column(StratumWorkerColumn::NumRejected, "Num Rejected", |c| {
c.width_percent(10)
}).column(StratumWorkerColumn::NumStale, "Num Stale", |c| {
})
.column(StratumWorkerColumn::NumStale, "Num Stale", |c| {
c.width_percent(10)
});
@ -188,22 +194,28 @@ impl TUIStatusListener for TUIMiningView {
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_config_status")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_is_running_status")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_num_workers_status")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_block_height_status")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_network_difficulty_status")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_network_hashrate")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new(" ").with_id("stratum_cuckoo_size_status")),
);
@ -213,22 +225,26 @@ impl TUIStatusListener for TUIMiningView {
.child(BoxView::with_full_screen(
Dialog::around(table_view.with_id(TABLE_MINING_STATUS).min_size((50, 20)))
.title("Mining Workers"),
)).with_id("mining_device_view");
))
.with_id("mining_device_view");
let diff_status_view = LinearLayout::new(Orientation::Vertical)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new("Tip Height: "))
.child(TextView::new("").with_id("diff_cur_height")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new("Difficulty Adjustment Window: "))
.child(TextView::new("").with_id("diff_adjust_window")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new("Average Block Time: "))
.child(TextView::new("").with_id("diff_avg_block_time")),
).child(
)
.child(
LinearLayout::new(Orientation::Horizontal)
.child(TextView::new("Average Difficulty: "))
.child(TextView::new("").with_id("diff_avg_difficulty")),
@ -237,9 +253,11 @@ impl TUIStatusListener for TUIMiningView {
let diff_table_view = TableView::<DiffBlock, DiffColumn>::new()
.column(DiffColumn::BlockNumber, "Block Number", |c| {
c.width_percent(25)
}).column(DiffColumn::Difficulty, "Network Difficulty", |c| {
})
.column(DiffColumn::Difficulty, "Network Difficulty", |c| {
c.width_percent(25)
}).column(DiffColumn::Time, "Block Time", |c| c.width_percent(25))
})
.column(DiffColumn::Time, "Block Time", |c| c.width_percent(25))
.column(DiffColumn::Duration, "Duration", |c| c.width_percent(25));
let mining_difficulty_view = LinearLayout::new(Orientation::Vertical)
@ -250,7 +268,8 @@ impl TUIStatusListener for TUIMiningView {
.with_id(TABLE_MINING_DIFF_STATUS)
.min_size((50, 20)),
).title("Mining Difficulty Data"),
)).with_id("mining_difficulty_view");
))
.with_id("mining_difficulty_view");
let view_stack = StackView::new()
.layer(mining_difficulty_view)

View file

@ -84,6 +84,15 @@ pub fn to_key(prefix: u8, k: &mut Vec<u8>) -> Vec<u8> {
res
}
/// Build a db key from a prefix and a byte vector identifier and numeric identifier
pub fn to_key_u64(prefix: u8, k: &mut Vec<u8>, val: u64) -> Vec<u8> {
let mut res = vec![];
res.push(prefix);
res.push(SEP);
res.append(k);
res.write_u64::<BigEndian>(val).unwrap();
res
}
/// Build a db key from a prefix and a numeric identifier.
pub fn u64_to_key<'a>(prefix: u8, val: u64) -> Vec<u8> {
let mut u64_vec = vec![];

View file

@ -21,6 +21,6 @@ zip = "0.4"
[dependencies.secp256k1zkp]
git = "https://github.com/mimblewimble/rust-secp256k1-zkp"
tag = "grin_integration_23a"
tag = "grin_integration_28"
#path = "../../rust-secp256k1-zkp"
features = ["bullet-proof-sizing"]

View file

@ -77,7 +77,7 @@ pub fn get_first_line(file_path: Option<String>) -> Option<String> {
Some(path) => match fs::File::open(path) {
Ok(file) => {
let buf_reader = io::BufReader::new(file);
let mut lines_iter = buf_reader.lines().map(|l| l.unwrap());;
let mut lines_iter = buf_reader.lines().map(|l| l.unwrap());
lines_iter.next()
}
Err(_) => None,

View file

@ -35,7 +35,5 @@ pub fn static_secp_instance() -> Arc<Mutex<secp::Secp256k1>> {
/// Convenient way to generate a commitment to zero.
pub fn commit_to_zero_value() -> secp::pedersen::Commitment {
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
secp.commit_value(0).unwrap()
secp::pedersen::Commitment::from_vec(vec![0])
}

View file

@ -1,150 +0,0 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Temporary utility to migrate wallet data from file to a database
use keychain::{ExtKeychain, Identifier, Keychain};
use std::fs::File;
use std::io::Read;
use std::path::{Path, MAIN_SEPARATOR};
/// Migrate wallet data. Assumes current directory contains a set of wallet
/// files
use std::sync::Arc;
use error::{Error, ErrorKind};
use failure::ResultExt;
use serde_json;
use libwallet::types::WalletDetails;
use types::WalletSeed;
use libwallet::types::OutputData;
use store::{self, to_key};
const DETAIL_FILE: &'static str = "wallet.det";
const DAT_FILE: &'static str = "wallet.dat";
const SEED_FILE: &'static str = "wallet.seed";
const DB_DIR: &'static str = "db";
const OUTPUT_PREFIX: u8 = 'o' as u8;
const DERIV_PREFIX: u8 = 'd' as u8;
const CONFIRMED_HEIGHT_PREFIX: u8 = 'c' as u8;
// determine whether we have wallet files but no file wallet
pub fn needs_migrate(data_dir: &str) -> bool {
let db_path = Path::new(data_dir).join(DB_DIR);
let data_path = Path::new(data_dir).join(DAT_FILE);
if !db_path.exists() && data_path.exists() {
return true;
}
false
}
pub fn migrate(data_dir: &str, pwd: &str) -> Result<(), Error> {
let data_file_path = format!("{}{}{}", data_dir, MAIN_SEPARATOR, DAT_FILE);
let details_file_path = format!("{}{}{}", data_dir, MAIN_SEPARATOR, DETAIL_FILE);
let seed_file_path = format!("{}{}{}", data_dir, MAIN_SEPARATOR, SEED_FILE);
let outputs = read_outputs(&data_file_path)?;
let details = read_details(&details_file_path)?;
let mut file = File::open(seed_file_path).context(ErrorKind::IO)?;
let mut buffer = String::new();
file.read_to_string(&mut buffer).context(ErrorKind::IO)?;
let wallet_seed = WalletSeed::from_hex(&buffer)?;
let keychain: ExtKeychain = wallet_seed.derive_keychain(pwd)?;
let root_key_id = keychain.root_key_id();
//open db
let db_path = Path::new(data_dir).join(DB_DIR);
let lmdb_env = Arc::new(store::new_env(db_path.to_str().unwrap().to_string()));
// open store
let store = store::Store::open(lmdb_env, DB_DIR);
let batch = store.batch().unwrap();
// write
for out in outputs {
save_output(&batch, out.clone())?;
}
save_details(&batch, root_key_id, details)?;
let res = batch.commit();
if let Err(e) = res {
panic!("Unable to commit db: {:?}", e);
}
Ok(())
}
/// save output in db
fn save_output(batch: &store::Batch, out: OutputData) -> Result<(), Error> {
let key = to_key(OUTPUT_PREFIX, &mut out.key_id.to_bytes().to_vec());
if let Err(e) = batch.put_ser(&key, &out) {
Err(ErrorKind::GenericError(format!(
"Error inserting output: {:?}",
e
)))?;
}
Ok(())
}
/// save details in db
fn save_details(
batch: &store::Batch,
root_key_id: Identifier,
d: WalletDetails,
) -> Result<(), Error> {
let deriv_key = to_key(DERIV_PREFIX, &mut root_key_id.to_bytes().to_vec());
let height_key = to_key(
CONFIRMED_HEIGHT_PREFIX,
&mut root_key_id.to_bytes().to_vec(),
);
if let Err(e) = batch.put_ser(&deriv_key, &d.last_child_index) {
Err(ErrorKind::GenericError(format!(
"Error saving last_child_index: {:?}",
e
)))?;
}
if let Err(e) = batch.put_ser(&height_key, &d.last_confirmed_height) {
Err(ErrorKind::GenericError(format!(
"Error saving last_confirmed_height: {:?}",
e
)))?;
}
Ok(())
}
/// Read output_data vec from disk.
fn read_outputs(data_file_path: &str) -> Result<Vec<OutputData>, Error> {
let data_file = File::open(data_file_path.clone())
.context(ErrorKind::FileWallet(&"Could not open wallet file"))?;
serde_json::from_reader(data_file)
.context(ErrorKind::Format)
.map_err(|e| e.into())
}
/// Read details file from disk
fn read_details(details_file_path: &str) -> Result<WalletDetails, Error> {
let details_file = File::open(details_file_path.clone())
.context(ErrorKind::FileWallet(&"Could not open wallet details file"))?;
serde_json::from_reader(details_file)
.context(ErrorKind::Format)
.map_err(|e| e.into())
}
#[ignore]
#[test]
fn migrate_db() {
let _ = migrate("test_wallet", "");
}

View file

@ -13,7 +13,7 @@
// limitations under the License.
use core::core::{self, amount_to_hr_string};
use libwallet::types::{OutputData, TxLogEntry, WalletInfo};
use libwallet::types::{AcctPathMapping, OutputData, TxLogEntry, WalletInfo};
use libwallet::Error;
use prettytable;
use std::io::prelude::Write;
@ -23,11 +23,15 @@ use util::secp::pedersen;
/// Display outputs in a pretty way
pub fn outputs(
account: &str,
cur_height: u64,
validated: bool,
outputs: Vec<(OutputData, pedersen::Commitment)>,
) -> Result<(), Error> {
let title = format!("Wallet Outputs - Block Height: {}", cur_height);
let title = format!(
"Wallet Outputs - Account '{}' - Block Height: {}",
account, cur_height
);
println!();
let mut t = term::stdout().unwrap();
t.fg(term::color::MAGENTA).unwrap();
@ -87,12 +91,16 @@ pub fn outputs(
/// Display transaction log in a pretty way
pub fn txs(
account: &str,
cur_height: u64,
validated: bool,
txs: Vec<TxLogEntry>,
include_status: bool,
) -> Result<(), Error> {
let title = format!("Transaction Log - Block Height: {}", cur_height);
let title = format!(
"Transaction Log - Account '{}' - Block Height: {}",
account, cur_height
);
println!();
let mut t = term::stdout().unwrap();
t.fg(term::color::MAGENTA).unwrap();
@ -181,10 +189,10 @@ pub fn txs(
Ok(())
}
/// Display summary info in a pretty way
pub fn info(wallet_info: &WalletInfo, validated: bool) {
pub fn info(account: &str, wallet_info: &WalletInfo, validated: bool) {
println!(
"\n____ Wallet Summary Info as of {} ____\n",
wallet_info.last_confirmed_height
"\n____ Wallet Summary Info - Account '{}' as of height {} ____\n",
account, wallet_info.last_confirmed_height
);
let mut table = table!(
[bFG->"Total", FG->amount_to_hr_string(wallet_info.total, false)],
@ -205,3 +213,22 @@ pub fn info(wallet_info: &WalletInfo, validated: bool) {
);
}
}
/// Display list of wallet accounts in a pretty way
pub fn accounts(acct_mappings: Vec<AcctPathMapping>, show_derivations: bool) {
println!("\n____ Wallet Accounts ____\n",);
let mut table = table!();
table.set_titles(row![
mMG->"Name",
bMG->"Parent BIP-32 Derivation Path",
]);
for m in acct_mappings {
table.add_row(row![
bFC->m.label,
bGC->m.path.to_bip_32_string(),
]);
}
table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);
table.printstd();
println!();
}

View file

@ -444,9 +444,9 @@ where
// write details file
let mut details_file =
File::create(details_file_path).context(ErrorKind::FileWallet(&"Could not create "))?;
let res_json = serde_json::to_string_pretty(&self.details).context(
ErrorKind::FileWallet("Error serializing wallet details file"),
)?;
let res_json = serde_json::to_string_pretty(&self.details).context(ErrorKind::FileWallet(
"Error serializing wallet details file",
))?;
details_file
.write_all(res_json.into_bytes().as_slice())
.context(ErrorKind::FileWallet(&"Error writing wallet details file"))

View file

@ -46,10 +46,8 @@ extern crate grin_store as store;
extern crate grin_util as util;
mod client;
mod db_migrate;
pub mod display;
mod error;
pub mod file_wallet;
pub mod libtx;
pub mod libwallet;
pub mod lmdb_wallet;
@ -57,13 +55,9 @@ mod types;
pub use client::{create_coinbase, HTTPWalletClient};
pub use error::{Error, ErrorKind};
pub use file_wallet::FileWallet;
pub use libwallet::controller;
pub use libwallet::types::{
BlockFees, CbData, WalletBackend, WalletClient, WalletInfo, WalletInst,
};
pub use lmdb_wallet::{wallet_db_exists, LMDBBackend};
pub use types::{WalletConfig, WalletSeed, SEED_FILE};
// temporary
pub use db_migrate::{migrate, needs_migrate};

View file

@ -33,6 +33,7 @@ pub fn calculate_partial_sig(
sec_key: &SecretKey,
sec_nonce: &SecretKey,
nonce_sum: &PublicKey,
pubkey_sum: Option<&PublicKey>,
fee: u64,
lock_height: u64,
) -> Result<Signature, Error> {
@ -45,7 +46,9 @@ pub fn calculate_partial_sig(
&msg,
sec_key,
Some(sec_nonce),
None,
Some(nonce_sum),
pubkey_sum,
Some(nonce_sum),
)?;
Ok(sig)
@ -57,11 +60,20 @@ pub fn verify_partial_sig(
sig: &Signature,
pub_nonce_sum: &PublicKey,
pubkey: &PublicKey,
pubkey_sum: Option<&PublicKey>,
fee: u64,
lock_height: u64,
) -> Result<(), Error> {
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height))?;
if !verify_single(secp, sig, &msg, Some(&pub_nonce_sum), pubkey, true) {
if !verify_single(
secp,
sig,
&msg,
Some(&pub_nonce_sum),
pubkey,
pubkey_sum,
true,
) {
Err(ErrorKind::Signature(
"Signature validation error".to_string(),
))?
@ -75,12 +87,22 @@ pub fn sign_from_key_id<K>(
k: &K,
msg: &Message,
key_id: &Identifier,
blind_sum: Option<&PublicKey>,
) -> Result<Signature, Error>
where
K: Keychain,
{
let skey = k.derived_key(key_id)?;
let sig = aggsig::sign_single(secp, &msg, &skey, None, None, None)?;
let skey = k.derive_key(key_id)?;
let sig = aggsig::sign_single(
secp,
&msg,
&skey.secret_key,
None,
None,
None,
blind_sum,
None,
)?;
Ok(sig)
}
@ -91,10 +113,8 @@ pub fn verify_single_from_commit(
msg: &Message,
commit: &Commitment,
) -> Result<(), Error> {
// Extract the pubkey, unfortunately we need this hack for now, (we just hope
// one is valid)
let pubkey = commit.to_pubkey(secp)?;
if !verify_single(secp, sig, &msg, None, &pubkey, false) {
if !verify_single(secp, sig, &msg, None, &pubkey, Some(&pubkey), false) {
Err(ErrorKind::Signature(
"Signature validation error".to_string(),
))?
@ -107,11 +127,12 @@ pub fn verify_sig_build_msg(
secp: &Secp256k1,
sig: &Signature,
pubkey: &PublicKey,
pubkey_sum: Option<&PublicKey>,
fee: u64,
lock_height: u64,
) -> Result<(), Error> {
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height))?;
if !verify_single(secp, sig, &msg, None, pubkey, true) {
if !verify_single(secp, sig, &msg, None, pubkey, pubkey_sum, true) {
Err(ErrorKind::Signature(
"Signature validation error".to_string(),
))?
@ -126,9 +147,12 @@ pub fn verify_single(
msg: &Message,
pubnonce: Option<&PublicKey>,
pubkey: &PublicKey,
pubkey_sum: Option<&PublicKey>,
is_partial: bool,
) -> bool {
aggsig::verify_single(secp, sig, msg, pubnonce, pubkey, is_partial)
aggsig::verify_single(
secp, sig, msg, pubnonce, pubkey, pubkey_sum, None, is_partial,
)
}
/// Adds signatures
@ -147,8 +171,10 @@ pub fn sign_with_blinding(
secp: &Secp256k1,
msg: &Message,
blinding: &BlindingFactor,
pubkey_sum: Option<&PublicKey>,
) -> Result<Signature, Error> {
let skey = &blinding.secret_key(&secp)?;
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
//let pubkey_sum = PublicKey::from_secret_key(&secp, &skey)?;
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None, pubkey_sum, None)?;
Ok(sig)
}

View file

@ -55,7 +55,7 @@ where
move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
let commit = build.keychain.commit(value, &key_id).unwrap();
let input = Input::new(features, commit);
(tx.with_input(input), kern, sum.sub_key_id(key_id.clone()))
(tx.with_input(input), kern, sum.sub_key_id(key_id.to_path()))
},
)
}
@ -106,7 +106,7 @@ where
proof: rproof,
}),
kern,
sum.add_key_id(key_id.clone()),
sum.add_key_id(key_id.to_path()),
)
},
)
@ -236,7 +236,9 @@ where
// Generate kernel excess and excess_sig using the split key k1.
let skey = k1.secret_key(&keychain.secp())?;
kern.excess = ctx.keychain.secp().commit(0, skey)?;
kern.excess_sig = aggsig::sign_with_blinding(&keychain.secp(), &msg, &k1).unwrap();
let pubkey = &kern.excess.to_pubkey(&keychain.secp())?;
kern.excess_sig =
aggsig::sign_with_blinding(&keychain.secp(), &msg, &k1, Some(&pubkey)).unwrap();
// Store the kernel offset (k2) on the tx.
// Commitments will sum correctly when accounting for the offset.
@ -257,7 +259,7 @@ mod test {
use super::*;
use core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use keychain::ExtKeychain;
use keychain::{ExtKeychain, ExtKeychainPath};
fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
Arc::new(RwLock::new(LruVerifierCache::new()))
@ -266,9 +268,9 @@ mod test {
#[test]
fn blind_simple_tx() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
let vc = verifier_cache();
@ -288,9 +290,9 @@ mod test {
#[test]
fn blind_simple_tx_with_offset() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
let vc = verifier_cache();
@ -310,8 +312,8 @@ mod test {
#[test]
fn blind_simpler_tx() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
let vc = verifier_cache();

View file

@ -25,9 +25,9 @@ fn create_nonce<K>(k: &K, commit: &Commitment) -> Result<SecretKey, Error>
where
K: Keychain,
{
// hash(commit|masterkey) as nonce
let root_key = k.root_key_id();
let res = blake2::blake2b::blake2b(32, &commit.0, &root_key.to_bytes()[..]);
// hash(commit|wallet root secret key (m)) as nonce
let root_key = k.derive_key(&K::root_key_id())?.secret_key;
let res = blake2::blake2b::blake2b(32, &commit.0, &root_key.0[..]);
let res = res.as_bytes();
let mut ret_val = [0; 32];
for i in 0..res.len() {
@ -53,9 +53,11 @@ where
K: Keychain,
{
let commit = k.commit(amount, key_id)?;
let skey = k.derived_key(key_id)?;
let skey = k.derive_key(key_id)?;
let nonce = create_nonce(k, &commit)?;
Ok(k.secp().bullet_proof(amount, skey, nonce, extra_data))
let message = ProofMessage::from_bytes(&key_id.serialize_path());
Ok(k.secp()
.bullet_proof(amount, skey.secret_key, nonce, extra_data, Some(message)))
}
/// Verify a proof

View file

@ -51,6 +51,7 @@ where
let over_commit = secp.commit_value(reward(fees))?;
let out_commit = output.commitment();
let excess = secp.commit_sum(vec![out_commit], vec![over_commit])?;
let pubkey = excess.to_pubkey(&secp)?;
// NOTE: Remember we sign the fee *and* the lock_height.
// For a coinbase output the fee is 0 and the lock_height is
@ -59,7 +60,7 @@ where
// This output will not be spendable earlier than lock_height (and we sign this
// here).
let msg = secp::Message::from_slice(&kernel_sig_msg(0, height))?;
let sig = aggsig::sign_from_key_id(&secp, keychain, &msg, &key_id)?;
let sig = aggsig::sign_from_key_id(&secp, keychain, &msg, &key_id, Some(&pubkey))?;
let proof = TxKernel {
features: KernelFeatures::COINBASE_KERNEL,

View file

@ -162,6 +162,7 @@ impl Slate {
sec_key,
sec_nonce,
&self.pub_nonce_sum(keychain.secp())?,
Some(&self.pub_blind_sum(keychain.secp())?),
self.fee,
self.lock_height,
)?;
@ -304,6 +305,7 @@ impl Slate {
p.part_sig.as_ref().unwrap(),
&self.pub_nonce_sum(secp)?,
&p.public_blind_excess,
Some(&self.pub_blind_sum(secp)?),
self.fee,
self.lock_height,
)?;
@ -348,6 +350,7 @@ impl Slate {
&keychain.secp(),
&final_sig,
&final_pubkey,
Some(&final_pubkey),
self.fee,
self.lock_height,
)?;

View file

@ -27,11 +27,12 @@ use serde_json as json;
use core::core::hash::Hashed;
use core::core::Transaction;
use core::ser;
use keychain::Keychain;
use keychain::{Identifier, Keychain};
use libtx::slate::Slate;
use libwallet::internal::{selection, tx, updater};
use libwallet::internal::{keys, selection, tx, updater};
use libwallet::types::{
BlockFees, CbData, OutputData, TxLogEntry, TxWrapper, WalletBackend, WalletClient, WalletInfo,
AcctPathMapping, BlockFees, CbData, OutputData, TxLogEntry, TxWrapper, WalletBackend,
WalletClient, WalletInfo,
};
use libwallet::{Error, ErrorKind};
use util::secp::pedersen;
@ -78,6 +79,7 @@ where
) -> Result<(bool, Vec<(OutputData, pedersen::Commitment)>), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
let mut validated = false;
if refresh_from_node {
@ -86,7 +88,7 @@ where
let res = Ok((
validated,
updater::retrieve_outputs(&mut **w, include_spent, tx_id)?,
updater::retrieve_outputs(&mut **w, include_spent, tx_id, &parent_key_id)?,
));
w.close()?;
@ -102,13 +104,17 @@ where
) -> Result<(bool, Vec<TxLogEntry>), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
let mut validated = false;
if refresh_from_node {
validated = self.update_outputs(&mut w);
}
let res = Ok((validated, updater::retrieve_txs(&mut **w, tx_id)?));
let res = Ok((
validated,
updater::retrieve_txs(&mut **w, tx_id, &parent_key_id)?,
));
w.close()?;
res
@ -121,19 +127,32 @@ where
) -> Result<(bool, WalletInfo), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
let mut validated = false;
if refresh_from_node {
validated = self.update_outputs(&mut w);
}
let wallet_info = updater::retrieve_info(&mut **w)?;
let wallet_info = updater::retrieve_info(&mut **w, &parent_key_id)?;
let res = Ok((validated, wallet_info));
w.close()?;
res
}
/// Return list of existing account -> Path mappings
pub fn accounts(&mut self) -> Result<Vec<AcctPathMapping>, Error> {
let mut w = self.wallet.lock().unwrap();
keys::accounts(&mut **w)
}
/// Create a new account path
pub fn new_account_path(&mut self, label: &str) -> Result<Identifier, Error> {
let mut w = self.wallet.lock().unwrap();
keys::new_acct_path(&mut **w, label)
}
/// Issues a send transaction and sends to recipient
pub fn issue_send_tx(
&mut self,
@ -146,6 +165,7 @@ where
) -> Result<Slate, Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
let client;
let mut slate_out: Slate;
@ -159,6 +179,7 @@ where
max_outputs,
num_change_outputs,
selection_strategy_is_use_all,
&parent_key_id,
)?;
lock_fn_out = lock_fn;
@ -197,6 +218,7 @@ where
) -> Result<Slate, Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
let (slate, context, lock_fn) = tx::create_send_tx(
&mut **w,
@ -205,6 +227,7 @@ where
max_outputs,
num_change_outputs,
selection_strategy_is_use_all,
&parent_key_id,
)?;
if write_to_disk {
let mut pub_tx = File::create(dest)?;
@ -254,12 +277,13 @@ where
pub fn cancel_tx(&mut self, tx_id: u32) -> Result<(), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
if !self.update_outputs(&mut w) {
return Err(ErrorKind::TransactionCancellationError(
"Can't contact running Grin node. Not Cancelling.",
))?;
}
tx::cancel_tx(&mut **w, tx_id)?;
tx::cancel_tx(&mut **w, &parent_key_id, tx_id)?;
w.close()?;
Ok(())
}
@ -273,7 +297,14 @@ where
) -> Result<(), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let tx_burn = tx::issue_burn_tx(&mut **w, amount, minimum_confirmations, max_outputs)?;
let parent_key_id = w.parent_key_id();
let tx_burn = tx::issue_burn_tx(
&mut **w,
amount,
minimum_confirmations,
max_outputs,
&parent_key_id,
)?;
let tx_hex = util::to_hex(ser::ser_vec(&tx_burn).unwrap());
w.client().post_tx(&TxWrapper { tx_hex: tx_hex }, false)?;
w.close()?;
@ -312,7 +343,8 @@ where
let (confirmed, tx_hex) = {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let res = tx::retrieve_tx_hex(&mut **w, tx_id)?;
let parent_key_id = w.parent_key_id();
let res = tx::retrieve_tx_hex(&mut **w, &parent_key_id, tx_id)?;
w.close()?;
res
};
@ -345,8 +377,9 @@ where
let (confirmed, tx_hex) = {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let parent_key_id = w.parent_key_id();
client = w.client().clone();
let res = tx::retrieve_tx_hex(&mut **w, tx_id)?;
let res = tx::retrieve_tx_hex(&mut **w, &parent_key_id, tx_id)?;
w.close()?;
res
};
@ -400,19 +433,13 @@ where
w.client().get_chain_height()
};
match res {
Ok(height) => {
let mut w = self.wallet.lock().unwrap();
w.close()?;
Ok((height, true))
}
Ok(height) => Ok((height, true)),
Err(_) => {
let outputs = self.retrieve_outputs(true, false, None)?;
let height = match outputs.1.iter().map(|(out, _)| out.height).max() {
Some(height) => height,
None => 0,
};
let mut w = self.wallet.lock().unwrap();
w.close()?;
Ok((height, false))
}
}
@ -420,7 +447,8 @@ where
/// Attempt to update outputs in wallet, return whether it was successful
fn update_outputs(&self, w: &mut W) -> bool {
match updater::refresh_outputs(&mut *w) {
let parent_key_id = w.parent_key_id();
match updater::refresh_outputs(&mut *w, &parent_key_id) {
Ok(_) => true,
Err(_) => false,
}
@ -477,10 +505,11 @@ where
let mut wallet = self.wallet.lock().unwrap();
wallet.open_with_credentials()?;
let parent_key_id = wallet.parent_key_id();
// create an output using the amount in the slate
let (_, mut context, receiver_create_fn) =
selection::build_recipient_output_with_slate(&mut **wallet, &mut slate)?;
selection::build_recipient_output_with_slate(&mut **wallet, &mut slate, parent_key_id)?;
// fill public keys
let _ = slate.fill_round_1(
@ -506,7 +535,8 @@ where
pub fn receive_tx(&mut self, slate: &mut Slate) -> Result<(), Error> {
let mut w = self.wallet.lock().unwrap();
w.open_with_credentials()?;
let res = tx::receive_tx(&mut **w, slate);
let parent_key_id = w.parent_key_id();
let res = tx::receive_tx(&mut **w, slate, &parent_key_id);
w.close()?;
if let Err(e) = res {

View file

@ -96,11 +96,11 @@ where
let mut apis = ApiServer::new();
info!(LOGGER, "Starting HTTP Owner API server at {}.", addr);
let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address");
let api_thread =
apis.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;
let api_thread = apis
.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;
api_thread
.join()
.map_err(|e| ErrorKind::GenericError(format!("API thread panicked :{:?}", e)).into())
@ -128,11 +128,11 @@ where
let mut apis = ApiServer::new();
info!(LOGGER, "Starting HTTP Foreign API server at {}.", addr);
let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address");
let api_thread =
apis.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;
let api_thread = apis
.start(socket_addr, router, tls_config)
.context(ErrorKind::GenericError(
"API thread failed to start".to_string(),
))?;
api_thread
.join()
@ -339,20 +339,20 @@ where
Ok(id) => match api.cancel_tx(id) {
Ok(_) => ok(()),
Err(e) => {
error!(LOGGER, "finalize_tx: failed with error: {}", e);
error!(LOGGER, "cancel_tx: failed with error: {}", e);
err(e)
}
},
Err(e) => {
error!(LOGGER, "finalize_tx: could not parse id: {}", e);
error!(LOGGER, "cancel_tx: could not parse id: {}", e);
err(ErrorKind::TransactionCancellationError(
"finalize_tx: cannot cancel transaction. Could not parse id in request.",
"cancel_tx: cannot cancel transaction. Could not parse id in request.",
).into())
}
})
} else {
Box::new(err(ErrorKind::TransactionCancellationError(
"finalize_tx: Cannot cancel transaction. Missing id param in request.",
"cancel_tx: Cannot cancel transaction. Missing id param in request.",
).into()))
}
}

View file

@ -164,6 +164,18 @@ pub enum ErrorKind {
#[fail(display = "Transaction building not completed: {}", _0)]
TransactionBuildingNotCompleted(u32),
/// Invalid BIP-32 Depth
#[fail(display = "Invalid BIP32 Depth (must be 1 or greater)")]
InvalidBIP32Depth,
/// Attempt to add an account that exists
#[fail(display = "Account Label '{}' already exists", _0)]
AccountLabelAlreadyExists(String),
/// Reference unknown account label
#[fail(display = "Unknown Account Label '{}'", _0)]
UnknownAccountLabel(String),
/// Other
#[fail(display = "Generic error: {}", _0)]
GenericError(String),

View file

@ -13,21 +13,19 @@
// limitations under the License.
//! Wallet key management functions
use keychain::{Identifier, Keychain};
use libwallet::error::Error;
use libwallet::types::{WalletBackend, WalletClient};
use keychain::{ChildNumber, ExtKeychain, Identifier, Keychain};
use libwallet::error::{Error, ErrorKind};
use libwallet::types::{AcctPathMapping, WalletBackend, WalletClient};
/// Get next available key in the wallet
pub fn next_available_key<T: ?Sized, C, K>(wallet: &mut T) -> Result<(Identifier, u32), Error>
/// Get next available key in the wallet for a given parent
pub fn next_available_key<T: ?Sized, C, K>(wallet: &mut T) -> Result<Identifier, Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let root_key_id = wallet.keychain().root_key_id();
let derivation = wallet.next_child(root_key_id.clone())?;
let key_id = wallet.keychain().derive_key_id(derivation)?;
Ok((key_id, derivation))
let child = wallet.next_child()?;
Ok(child)
}
/// Retrieve an existing key from a wallet
@ -45,3 +43,77 @@ where
let derivation = existing.n_child;
Ok((key_id, derivation))
}
/// Returns a list of account to BIP32 path mappings
pub fn accounts<T: ?Sized, C, K>(wallet: &mut T) -> Result<Vec<AcctPathMapping>, Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
Ok(wallet.acct_path_iter().collect())
}
/// Adds an new parent account path with a given label
pub fn new_acct_path<T: ?Sized, C, K>(wallet: &mut T, label: &str) -> Result<Identifier, Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let label = label.to_owned();
if let Some(_) = wallet.acct_path_iter().find(|l| l.label == label) {
return Err(ErrorKind::AccountLabelAlreadyExists(label.clone()).into());
}
// We're always using paths at m/k/0 for parent keys for output derivations
// so find the highest of those, then increment (to conform with external/internal
// derivation chains in BIP32 spec)
let highest_entry = wallet.acct_path_iter().max_by(|a, b| {
<u32>::from(a.path.to_path().path[0]).cmp(&<u32>::from(b.path.to_path().path[0]))
});
let return_id = {
if let Some(e) = highest_entry {
let mut p = e.path.to_path();
p.path[0] = ChildNumber::from(<u32>::from(p.path[0]) + 1);
p.to_identifier()
} else {
ExtKeychain::derive_key_id(2, 0, 0, 0, 0)
}
};
let save_path = AcctPathMapping {
label: label.to_owned(),
path: return_id.clone(),
};
let mut batch = wallet.batch()?;
batch.save_acct_path(save_path)?;
batch.commit()?;
Ok(return_id)
}
/// Adds/sets a particular account path with a given label
pub fn set_acct_path<T: ?Sized, C, K>(
wallet: &mut T,
label: &str,
path: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let label = label.to_owned();
let save_path = AcctPathMapping {
label: label.to_owned(),
path: path.clone(),
};
let mut batch = wallet.batch()?;
batch.save_acct_path(save_path)?;
batch.commit()?;
Ok(())
}

View file

@ -14,10 +14,12 @@
//! Functions to restore a wallet's outputs from just the master seed
use core::global;
use keychain::{Identifier, Keychain};
use keychain::{ExtKeychain, Identifier, Keychain};
use libtx::proof;
use libwallet::internal::keys;
use libwallet::types::*;
use libwallet::Error;
use std::collections::HashMap;
use util::secp::{key::SecretKey, pedersen};
use util::LOGGER;
@ -26,9 +28,9 @@ struct OutputResult {
///
pub commit: pedersen::Commitment,
///
pub key_id: Option<Identifier>,
pub key_id: Identifier,
///
pub n_child: Option<u32>,
pub n_child: u32,
///
pub value: u64,
///
@ -79,10 +81,14 @@ where
*height
};
// TODO: Output paths are always going to be length 3 for now, but easy enough to grind
// through to find the right path if required later
let key_id = Identifier::from_serialized_path(3u8, &info.message.as_bytes());
wallet_outputs.push(OutputResult {
commit: *commit,
key_id: None,
n_child: None,
key_id: key_id.clone(),
n_child: key_id.to_path().last_path_index(),
value: info.value,
height: *height,
lock_height: lock_height,
@ -93,58 +99,6 @@ where
Ok(wallet_outputs)
}
/// Attempts to populate a list of outputs with their
/// correct child indices based on the root key
fn populate_child_indices<T, C, K>(
wallet: &mut T,
outputs: &mut Vec<OutputResult>,
max_derivations: u32,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
info!(
LOGGER,
"Attempting to populate child indices and key identifiers for {} identified outputs",
outputs.len()
);
// keep track of child keys we've already found, and avoid some EC ops
let mut found_child_indices: Vec<u32> = vec![];
for output in outputs.iter_mut() {
let mut found = false;
for i in 1..max_derivations {
// seems to be a bug allowing multiple child keys at the moment
/*if found_child_indices.contains(&i){
continue;
}*/
let key_id = wallet.keychain().derive_key_id(i as u32)?;
let b = wallet.keychain().derived_key(&key_id)?;
if output.blinding != b {
continue;
}
found = true;
found_child_indices.push(i);
info!(
LOGGER,
"Key index {} found for output {:?}", i, output.commit
);
output.key_id = Some(key_id);
output.n_child = Some(i);
break;
}
if !found {
warn!(
LOGGER,
"Unable to find child key index for: {:?}", output.commit,
);
}
}
Ok(())
}
/// Restore a wallet
pub fn restore<T, C, K>(wallet: &mut T) -> Result<(), Error>
where
@ -152,8 +106,6 @@ where
C: WalletClient,
K: Keychain,
{
let max_derivations = 1_000_000;
// Don't proceed if wallet_data has anything in it
let is_empty = wallet.iter().next().is_none();
if !is_empty {
@ -195,29 +147,34 @@ where
result_vec.len(),
);
populate_child_indices(wallet, &mut result_vec, max_derivations)?;
let mut found_parents: HashMap<Identifier, u32> = HashMap::new();
// Now save what we have
let root_key_id = wallet.keychain().root_key_id();
let current_chain_height = wallet.client().get_chain_height()?;
let mut batch = wallet.batch()?;
let mut max_child_index = 0;
for output in result_vec {
if output.key_id.is_some() && output.n_child.is_some() {
{
let mut batch = wallet.batch()?;
for output in result_vec {
let parent_key_id = output.key_id.parent_path();
if !found_parents.contains_key(&parent_key_id) {
found_parents.insert(parent_key_id.clone(), 0);
}
let log_id = batch.next_tx_log_id(&parent_key_id)?;
let mut tx_log_entry = None;
// wallet update will create tx log entries when it finds confirmed coinbase
// transactions
if !output.is_coinbase {
let log_id = batch.next_tx_log_id(root_key_id.clone())?;
// also keep tx log updated so everything still tallies
let mut t = TxLogEntry::new(TxLogEntryType::TxReceived, log_id);
let mut t =
TxLogEntry::new(parent_key_id.clone(), TxLogEntryType::TxReceived, log_id);
t.amount_credited = output.value;
t.num_outputs = 1;
tx_log_entry = Some(log_id);
let _ = batch.save_tx_log_entry(t);
batch.save_tx_log_entry(t, &parent_key_id)?;
}
let _ = batch.save(OutputData {
root_key_id: root_key_id.clone(),
key_id: output.key_id.unwrap(),
n_child: output.n_child.unwrap(),
root_key_id: parent_key_id.clone(),
key_id: output.key_id,
n_child: output.n_child,
value: output.value,
status: OutputStatus::Unconfirmed,
height: output.height,
@ -226,28 +183,28 @@ where
tx_log_entry: tx_log_entry,
});
max_child_index = if max_child_index >= output.n_child.unwrap() {
max_child_index
} else {
output.n_child.unwrap()
let max_child_index = found_parents.get(&parent_key_id).unwrap().clone();
if output.n_child >= max_child_index {
found_parents.insert(parent_key_id.clone(), output.n_child);
};
} else {
warn!(
LOGGER,
"Commit {:?} identified but unable to recover key. Output has not been restored.",
output.commit
);
}
batch.commit()?;
}
// restore labels, account paths and child derivation indices
let label_base = "account";
let mut index = 1;
for (path, max_child_index) in found_parents.iter() {
if *path == ExtKeychain::derive_key_id(2, 0, 0, 0, 0) {
//default path already exists
continue;
}
let label = format!("{}_{}", label_base, index);
keys::set_acct_path(wallet, &label, path)?;
index = index + 1;
{
let mut batch = wallet.batch()?;
batch.save_child_index(path, max_child_index + 1)?;
}
}
if max_child_index > 0 {
let details = WalletDetails {
last_child_index: max_child_index + 1,
last_confirmed_height: current_chain_height,
};
batch.save_details(root_key_id.clone(), details)?;
}
batch.commit()?;
Ok(())
}

View file

@ -37,6 +37,7 @@ pub fn build_send_tx_slate<T: ?Sized, C, K>(
max_outputs: usize,
change_outputs: usize,
selection_strategy_is_use_all: bool,
parent_key_id: Identifier,
) -> Result<
(
Slate,
@ -59,6 +60,7 @@ where
max_outputs,
change_outputs,
selection_strategy_is_use_all,
&parent_key_id,
)?;
// Create public slate
@ -85,22 +87,19 @@ where
}
// Store change output(s)
for (_, derivation) in &change_amounts_derivations {
let change_id = keychain.derive_key_id(derivation.clone()).unwrap();
context.add_output(&change_id);
for (_, id) in &change_amounts_derivations {
context.add_output(&id);
}
let lock_inputs = context.get_inputs().clone();
let _lock_outputs = context.get_outputs().clone();
let root_key_id = keychain.root_key_id();
// Return a closure to acquire wallet lock and lock the coins being spent
// so we avoid accidental double spend attempt.
let update_sender_wallet_fn = move |wallet: &mut T, tx_hex: &str| {
let mut batch = wallet.batch()?;
let log_id = batch.next_tx_log_id(root_key_id.clone())?;
let mut t = TxLogEntry::new(TxLogEntryType::TxSent, log_id);
let log_id = batch.next_tx_log_id(&parent_key_id)?;
let mut t = TxLogEntry::new(parent_key_id.clone(), TxLogEntryType::TxSent, log_id);
t.tx_slate_id = Some(slate_id);
t.fee = Some(fee);
t.tx_hex = Some(tx_hex.to_owned());
@ -116,14 +115,14 @@ where
t.amount_debited = amount_debited;
// write the output representing our change
for (change_amount, change_derivation) in &change_amounts_derivations {
let change_id = keychain.derive_key_id(change_derivation.clone()).unwrap();
for (change_amount, id) in &change_amounts_derivations {
let change_id = keychain.derive_key(&id).unwrap();
t.num_outputs += 1;
t.amount_credited += change_amount;
batch.save(OutputData {
root_key_id: root_key_id.clone(),
key_id: change_id.clone(),
n_child: change_derivation.clone(),
root_key_id: parent_key_id.clone(),
key_id: id.clone(),
n_child: id.to_path().last_path_index(),
value: change_amount.clone(),
status: OutputStatus::Unconfirmed,
height: current_height,
@ -132,7 +131,7 @@ where
tx_log_entry: Some(log_id),
})?;
}
batch.save_tx_log_entry(t)?;
batch.save_tx_log_entry(t, &parent_key_id)?;
batch.commit()?;
Ok(())
};
@ -147,6 +146,7 @@ where
pub fn build_recipient_output_with_slate<T: ?Sized, C, K>(
wallet: &mut T,
slate: &mut Slate,
parent_key_id: Identifier,
) -> Result<
(
Identifier,
@ -161,10 +161,9 @@ where
K: Keychain,
{
// Create a potential output for this transaction
let (key_id, derivation) = keys::next_available_key(wallet).unwrap();
let key_id = keys::next_available_key(wallet).unwrap();
let keychain = wallet.keychain().clone();
let root_key_id = keychain.root_key_id();
let key_id_inner = key_id.clone();
let amount = slate.amount;
let height = slate.height;
@ -187,15 +186,15 @@ where
// (up to the caller to decide when to do)
let wallet_add_fn = move |wallet: &mut T| {
let mut batch = wallet.batch()?;
let log_id = batch.next_tx_log_id(root_key_id.clone())?;
let mut t = TxLogEntry::new(TxLogEntryType::TxReceived, log_id);
let log_id = batch.next_tx_log_id(&parent_key_id)?;
let mut t = TxLogEntry::new(parent_key_id.clone(), TxLogEntryType::TxReceived, log_id);
t.tx_slate_id = Some(slate_id);
t.amount_credited = amount;
t.num_outputs = 1;
batch.save(OutputData {
root_key_id: root_key_id,
key_id: key_id_inner,
n_child: derivation,
root_key_id: parent_key_id.clone(),
key_id: key_id_inner.clone(),
n_child: key_id_inner.to_path().last_path_index(),
value: amount,
status: OutputStatus::Unconfirmed,
height: height,
@ -203,7 +202,7 @@ where
is_coinbase: false,
tx_log_entry: Some(log_id),
})?;
batch.save_tx_log_entry(t)?;
batch.save_tx_log_entry(t, &parent_key_id)?;
batch.commit()?;
Ok(())
};
@ -222,13 +221,14 @@ pub fn select_send_tx<T: ?Sized, C, K>(
max_outputs: usize,
change_outputs: usize,
selection_strategy_is_use_all: bool,
parent_key_id: &Identifier,
) -> Result<
(
Vec<Box<build::Append<K>>>,
Vec<OutputData>,
Vec<(u64, u32)>, // change amounts and derivations
u64, // amount
u64, // fee
Vec<(u64, Identifier)>, // change amounts and derivations
u64, // amount
u64, // fee
),
Error,
>
@ -245,6 +245,7 @@ where
minimum_confirmations,
max_outputs,
selection_strategy_is_use_all,
parent_key_id,
);
// sender is responsible for setting the fee on the partial tx
@ -300,6 +301,7 @@ where
minimum_confirmations,
max_outputs,
selection_strategy_is_use_all,
parent_key_id,
);
fee = tx_fee(coins.len(), num_outputs, 1, None);
total = coins.iter().map(|c| c.value).sum();
@ -309,7 +311,7 @@ where
// build transaction skeleton with inputs and change
let (mut parts, change_amounts_derivations) =
inputs_and_change(&coins, wallet, amount, fee, change_outputs)?;
inputs_and_change(&coins, wallet, amount, fee, change_outputs, parent_key_id)?;
// This is more proof of concept than anything but here we set lock_height
// on tx being sent (based on current chain height via api).
@ -325,7 +327,8 @@ pub fn inputs_and_change<T: ?Sized, C, K>(
amount: u64,
fee: u64,
num_change_outputs: usize,
) -> Result<(Vec<Box<build::Append<K>>>, Vec<(u64, u32)>), Error>
parent_key_id: &Identifier,
) -> Result<(Vec<Box<build::Append<K>>>, Vec<(u64, Identifier)>), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
@ -345,11 +348,10 @@ where
// build inputs using the appropriate derived key_ids
for coin in coins {
let key_id = wallet.keychain().derive_key_id(coin.n_child)?;
if coin.is_coinbase {
parts.push(build::coinbase_input(coin.value, key_id));
parts.push(build::coinbase_input(coin.value, coin.key_id.clone()));
} else {
parts.push(build::input(coin.value, key_id));
parts.push(build::input(coin.value, coin.key_id.clone()));
}
}
@ -378,11 +380,9 @@ where
};
let keychain = wallet.keychain().clone();
let root_key_id = keychain.root_key_id();
let change_derivation = wallet.next_child(root_key_id.clone()).unwrap();
let change_key = keychain.derive_key_id(change_derivation).unwrap();
let change_key = wallet.next_child().unwrap();
change_amounts_derivations.push((change_amount, change_derivation));
change_amounts_derivations.push((change_amount, change_key.clone()));
parts.push(build::output(change_amount, change_key));
}
}
@ -404,6 +404,7 @@ pub fn select_coins<T: ?Sized, C, K>(
minimum_confirmations: u64,
max_outputs: usize,
select_all: bool,
parent_key_id: &Identifier,
) -> (usize, Vec<OutputData>)
// max_outputs_available, Outputs
where
@ -412,11 +413,10 @@ where
K: Keychain,
{
// first find all eligible outputs based on number of confirmations
let root_key_id = wallet.keychain().root_key_id();
let mut eligible = wallet
.iter()
.filter(|out| {
out.root_key_id == root_key_id
out.root_key_id == *parent_key_id
&& out.eligible_to_spend(current_height, minimum_confirmations)
})
.collect::<Vec<OutputData>>();

View file

@ -28,7 +28,11 @@ use util::LOGGER;
/// Receive a transaction, modifying the slate accordingly (which can then be
/// sent back to sender for posting)
pub fn receive_tx<T: ?Sized, C, K>(wallet: &mut T, slate: &mut Slate) -> Result<(), Error>
pub fn receive_tx<T: ?Sized, C, K>(
wallet: &mut T,
slate: &mut Slate,
parent_key_id: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
@ -36,7 +40,7 @@ where
{
// create an output using the amount in the slate
let (_, mut context, receiver_create_fn) =
selection::build_recipient_output_with_slate(wallet, slate)?;
selection::build_recipient_output_with_slate(wallet, slate, parent_key_id.clone())?;
// fill public keys
let _ = slate.fill_round_1(
@ -64,6 +68,7 @@ pub fn create_send_tx<T: ?Sized, C, K>(
max_outputs: usize,
num_change_outputs: usize,
selection_strategy_is_use_all: bool,
parent_key_id: &Identifier,
) -> Result<
(
Slate,
@ -80,7 +85,7 @@ where
// Get lock height
let current_height = wallet.client().get_chain_height()?;
// ensure outputs we're selecting are up to date
updater::refresh_outputs(wallet)?;
updater::refresh_outputs(wallet, parent_key_id)?;
let lock_height = current_height;
@ -101,6 +106,7 @@ where
max_outputs,
num_change_outputs,
selection_strategy_is_use_all,
parent_key_id.clone(),
)?;
// Generate a kernel offset and subtract from our context's secret key. Store
@ -137,13 +143,17 @@ where
}
/// Rollback outputs associated with a transaction in the wallet
pub fn cancel_tx<T: ?Sized, C, K>(wallet: &mut T, tx_id: u32) -> Result<(), Error>
pub fn cancel_tx<T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
tx_id: u32,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let tx_vec = updater::retrieve_txs(wallet, Some(tx_id))?;
let tx_vec = updater::retrieve_txs(wallet, Some(tx_id), &parent_key_id)?;
if tx_vec.len() != 1 {
return Err(ErrorKind::TransactionDoesntExist(tx_id))?;
}
@ -155,9 +165,9 @@ where
return Err(ErrorKind::TransactionNotCancellable(tx_id))?;
}
// get outputs associated with tx
let res = updater::retrieve_outputs(wallet, false, Some(tx_id))?;
let res = updater::retrieve_outputs(wallet, false, Some(tx_id), &parent_key_id)?;
let outputs = res.iter().map(|(out, _)| out).cloned().collect();
updater::cancel_tx_and_outputs(wallet, tx, outputs)?;
updater::cancel_tx_and_outputs(wallet, tx, outputs, parent_key_id)?;
Ok(())
}
@ -165,6 +175,7 @@ where
/// as well as whether it's been confirmed
pub fn retrieve_tx_hex<T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
tx_id: u32,
) -> Result<(bool, Option<String>), Error>
where
@ -172,7 +183,7 @@ where
C: WalletClient,
K: Keychain,
{
let tx_vec = updater::retrieve_txs(wallet, Some(tx_id))?;
let tx_vec = updater::retrieve_txs(wallet, Some(tx_id), parent_key_id)?;
if tx_vec.len() != 1 {
return Err(ErrorKind::TransactionDoesntExist(tx_id))?;
}
@ -186,6 +197,7 @@ pub fn issue_burn_tx<T: ?Sized, C, K>(
amount: u64,
minimum_confirmations: u64,
max_outputs: usize,
parent_key_id: &Identifier,
) -> Result<Transaction, Error>
where
T: WalletBackend<C, K>,
@ -199,7 +211,7 @@ where
let current_height = wallet.client().get_chain_height()?;
let _ = updater::refresh_outputs(wallet);
let _ = updater::refresh_outputs(wallet, parent_key_id);
// select some spendable coins from the wallet
let (_, coins) = selection::select_coins(
@ -209,14 +221,21 @@ where
minimum_confirmations,
max_outputs,
false,
parent_key_id,
);
debug!(LOGGER, "selected some coins - {}", coins.len());
let fee = tx_fee(coins.len(), 2, 1, None);
let num_change_outputs = 1;
let (mut parts, _) =
selection::inputs_and_change(&coins, wallet, amount, fee, num_change_outputs)?;
let (mut parts, _) = selection::inputs_and_change(
&coins,
wallet,
amount,
fee,
num_change_outputs,
parent_key_id,
)?;
//TODO: If we end up using this, create change output here
@ -232,7 +251,7 @@ where
#[cfg(test)]
mod test {
use keychain::{ExtKeychain, Keychain};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use libtx::build;
#[test]
@ -240,7 +259,7 @@ mod test {
// based on the public key and amount begin spent
fn output_commitment_equals_input_commitment_on_spend() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
let tx1 = build::transaction(vec![build::output(105, key_id1.clone())], &keychain).unwrap();
let tx2 = build::transaction(vec![build::input(105, key_id1.clone())], &keychain).unwrap();

View file

@ -38,31 +38,31 @@ pub fn retrieve_outputs<T: ?Sized, C, K>(
wallet: &mut T,
show_spent: bool,
tx_id: Option<u32>,
parent_key_id: &Identifier,
) -> Result<Vec<(OutputData, pedersen::Commitment)>, Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let root_key_id = wallet.keychain().clone().root_key_id();
// just read the wallet here, no need for a write lock
let mut outputs = wallet
.iter()
.filter(|out| out.root_key_id == root_key_id)
.filter(|out| out.root_key_id == *parent_key_id)
.filter(|out| {
if show_spent {
true
} else {
out.status != OutputStatus::Spent
}
}).collect::<Vec<_>>();
})
.collect::<Vec<_>>();
// only include outputs with a given tx_id if provided
if let Some(id) = tx_id {
outputs = outputs
.into_iter()
.filter(|out| out.tx_log_entry == Some(id))
.filter(|out| out.tx_log_entry == Some(id) && out.root_key_id == *parent_key_id)
.collect::<Vec<_>>();
}
@ -73,7 +73,8 @@ where
.map(|out| {
let commit = wallet.get_commitment(&out.key_id).unwrap();
(out, commit)
}).collect();
})
.collect();
Ok(res)
}
@ -81,6 +82,7 @@ where
pub fn retrieve_txs<T: ?Sized, C, K>(
wallet: &mut T,
tx_id: Option<u32>,
parent_key_id: &Identifier,
) -> Result<Vec<TxLogEntry>, Error>
where
T: WalletBackend<C, K>,
@ -96,21 +98,27 @@ where
vec![]
}
} else {
wallet.tx_log_iter().collect::<Vec<_>>()
wallet
.tx_log_iter()
.filter(|t| t.parent_key_id == *parent_key_id)
.collect::<Vec<_>>()
};
txs.sort_by_key(|tx| tx.creation_ts);
Ok(txs)
}
/// Refreshes the outputs in a wallet with the latest information
/// from a node
pub fn refresh_outputs<T: ?Sized, C, K>(wallet: &mut T) -> Result<(), Error>
pub fn refresh_outputs<T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let height = wallet.client().get_chain_height()?;
refresh_output_state(wallet, height)?;
refresh_output_state(wallet, height, parent_key_id)?;
Ok(())
}
@ -118,6 +126,7 @@ where
/// and a list of outputs we want to query the node for
pub fn map_wallet_outputs<T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
) -> Result<HashMap<pedersen::Commitment, Identifier>, Error>
where
T: WalletBackend<C, K>,
@ -126,12 +135,11 @@ where
{
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
let keychain = wallet.keychain().clone();
let root_key_id = keychain.root_key_id().clone();
let unspents = wallet
.iter()
.filter(|x| x.root_key_id == root_key_id && x.status != OutputStatus::Spent);
.filter(|x| x.root_key_id == *parent_key_id && x.status != OutputStatus::Spent);
for out in unspents {
let commit = keychain.commit_with_key_index(out.value, out.n_child)?;
let commit = keychain.commit(out.value, &out.key_id)?;
wallet_outputs.insert(commit, out.key_id.clone());
}
Ok(wallet_outputs)
@ -142,6 +150,7 @@ pub fn cancel_tx_and_outputs<T: ?Sized, C, K>(
wallet: &mut T,
tx: TxLogEntry,
outputs: Vec<OutputData>,
parent_key_id: &Identifier,
) -> Result<(), libwallet::Error>
where
T: WalletBackend<C, K>,
@ -149,6 +158,7 @@ where
K: Keychain,
{
let mut batch = wallet.batch()?;
for mut o in outputs {
// unlock locked outputs
if o.status == OutputStatus::Unconfirmed {
@ -166,7 +176,7 @@ where
if tx.tx_type == TxLogEntryType::TxReceived {
tx.tx_type = TxLogEntryType::TxReceivedCancelled;
}
batch.save_tx_log_entry(tx)?;
batch.save_tx_log_entry(tx, parent_key_id)?;
batch.commit()?;
Ok(())
}
@ -177,6 +187,7 @@ pub fn apply_api_outputs<T: ?Sized, C, K>(
wallet_outputs: &HashMap<pedersen::Commitment, Identifier>,
api_outputs: &HashMap<pedersen::Commitment, (String, u64)>,
height: u64,
parent_key_id: &Identifier,
) -> Result<(), libwallet::Error>
where
T: WalletBackend<C, K>,
@ -187,11 +198,10 @@ where
// api output (if it exists) and refresh it in-place in the wallet.
// Note: minimizing the time we spend holding the wallet lock.
{
let root_key_id = wallet.keychain().root_key_id();
let mut details = wallet.details(root_key_id.clone())?;
let last_confirmed_height = wallet.last_confirmed_height()?;
// If the server height is less than our confirmed height, don't apply
// these changes as the chain is syncing, incorrect or forking
if height < details.last_confirmed_height {
if height < last_confirmed_height {
warn!(
LOGGER,
"Not updating outputs as the height of the node's chain \
@ -210,27 +220,32 @@ where
Some(o) => {
// if this is a coinbase tx being confirmed, it's recordable in tx log
if output.is_coinbase && output.status == OutputStatus::Unconfirmed {
let log_id = batch.next_tx_log_id(root_key_id.clone())?;
let mut t = TxLogEntry::new(TxLogEntryType::ConfirmedCoinbase, log_id);
let log_id = batch.next_tx_log_id(parent_key_id)?;
let mut t = TxLogEntry::new(
parent_key_id.clone(),
TxLogEntryType::ConfirmedCoinbase,
log_id,
);
t.confirmed = true;
t.amount_credited = output.value;
t.amount_debited = 0;
t.num_outputs = 1;
t.update_confirmation_ts();
output.tx_log_entry = Some(log_id);
batch.save_tx_log_entry(t)?;
batch.save_tx_log_entry(t, &parent_key_id)?;
}
// also mark the transaction in which this output is involved as confirmed
// note that one involved input/output confirmation SHOULD be enough
// to reliably confirm the tx
if !output.is_coinbase && output.status == OutputStatus::Unconfirmed {
let tx = batch
.tx_log_iter()
.find(|t| Some(t.id) == output.tx_log_entry);
let tx = batch.tx_log_iter().find(|t| {
Some(t.id) == output.tx_log_entry
&& t.parent_key_id == *parent_key_id
});
if let Some(mut t) = tx {
t.update_confirmation_ts();
t.confirmed = true;
batch.save_tx_log_entry(t)?;
batch.save_tx_log_entry(t, &parent_key_id)?;
}
}
output.height = o.1;
@ -242,8 +257,7 @@ where
}
}
{
details.last_confirmed_height = height;
batch.save_details(root_key_id, details)?;
batch.save_last_confirmed_height(parent_key_id, height)?;
}
batch.commit()?;
}
@ -252,7 +266,11 @@ where
/// Builds a single api query to retrieve the latest output data from the node.
/// So we can refresh the local wallet outputs.
fn refresh_output_state<T: ?Sized, C, K>(wallet: &mut T, height: u64) -> Result<(), Error>
fn refresh_output_state<T: ?Sized, C, K>(
wallet: &mut T,
height: u64,
parent_key_id: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
@ -262,12 +280,12 @@ where
// build a local map of wallet outputs keyed by commit
// and a list of outputs we want to query the node for
let wallet_outputs = map_wallet_outputs(wallet)?;
let wallet_outputs = map_wallet_outputs(wallet, parent_key_id)?;
let wallet_output_keys = wallet_outputs.keys().map(|commit| commit.clone()).collect();
let api_outputs = wallet.client().get_outputs_from_node(wallet_output_keys)?;
apply_api_outputs(wallet, &wallet_outputs, &api_outputs, height)?;
apply_api_outputs(wallet, &wallet_outputs, &api_outputs, height, parent_key_id)?;
clean_old_unconfirmed(wallet, height)?;
Ok(())
}
@ -297,18 +315,19 @@ where
/// Retrieve summary info about the wallet
/// caller should refresh first if desired
pub fn retrieve_info<T: ?Sized, C, K>(wallet: &mut T) -> Result<WalletInfo, Error>
pub fn retrieve_info<T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
) -> Result<WalletInfo, Error>
where
T: WalletBackend<C, K>,
C: WalletClient,
K: Keychain,
{
let root_key_id = wallet.keychain().root_key_id();
let current_height = wallet.details(root_key_id.clone())?.last_confirmed_height;
let keychain = wallet.keychain().clone();
let current_height = wallet.last_confirmed_height()?;
let outputs = wallet
.iter()
.filter(|out| out.root_key_id == keychain.root_key_id());
.filter(|out| out.root_key_id == *parent_key_id);
let mut unspent_total = 0;
let mut immature_total = 0;
@ -378,14 +397,13 @@ where
C: WalletClient,
K: Keychain,
{
let root_key_id = wallet.keychain().root_key_id();
let height = block_fees.height;
let lock_height = height + global::coinbase_maturity(height); // ignores on/off spendability around soft fork height
let key_id = block_fees.key_id();
let parent_key_id = wallet.parent_key_id();
let (key_id, derivation) = match key_id {
Some(key_id) => keys::retrieve_existing_key(wallet, key_id)?,
let key_id = match key_id {
Some(key_id) => keys::retrieve_existing_key(wallet, key_id)?.0,
None => keys::next_available_key(wallet)?,
};
@ -393,9 +411,9 @@ where
// Now acquire the wallet lock and write the new output.
let mut batch = wallet.batch()?;
batch.save(OutputData {
root_key_id: root_key_id.clone(),
root_key_id: parent_key_id,
key_id: key_id.clone(),
n_child: derivation,
n_child: key_id.to_path().last_path_index(),
value: reward(block_fees.fees),
status: OutputStatus::Unconfirmed,
height: height,
@ -410,7 +428,7 @@ where
LOGGER,
"receive_coinbase: built candidate output - {:?}, {}",
key_id.clone(),
derivation,
key_id,
);
let mut block_fees = block_fees.clone();

View file

@ -28,7 +28,7 @@ use uuid::Uuid;
use core::core::hash::Hash;
use core::ser;
use keychain::{Identifier, Keychain};
use keychain::{ExtKeychain, Identifier, Keychain};
use libtx::aggsig;
use libtx::slate::Slate;
@ -72,6 +72,16 @@ where
/// Return the client being used
fn client(&mut self) -> &mut C;
/// Set parent key id by stored account name
fn set_parent_key_id_by_name(&mut self, label: &str) -> Result<(), Error>;
/// The BIP32 path of the parent path to use for all output-related
/// functions, (essentially 'accounts' within a wallet.
fn set_parent_key_id(&mut self, Identifier);
/// return the parent path
fn parent_key_id(&mut self) -> Identifier;
/// Iterate over all output data stored by the backend
fn iter<'a>(&'a self) -> Box<Iterator<Item = OutputData> + 'a>;
@ -90,14 +100,20 @@ where
/// Iterate over all output data stored by the backend
fn tx_log_iter<'a>(&'a self) -> Box<Iterator<Item = TxLogEntry> + 'a>;
/// Iterate over all stored account paths
fn acct_path_iter<'a>(&'a self) -> Box<Iterator<Item = AcctPathMapping> + 'a>;
/// Gets an account path for a given label
fn get_acct_path(&self, label: String) -> Result<Option<AcctPathMapping>, Error>;
/// Create a new write batch to update or remove output data
fn batch<'a>(&'a mut self) -> Result<Box<WalletOutputBatch<K> + 'a>, Error>;
/// Next child ID when we want to create a new output
fn next_child<'a>(&mut self, root_key_id: Identifier) -> Result<u32, Error>;
/// Next child ID when we want to create a new output, based on current parent
fn next_child<'a>(&mut self) -> Result<Identifier, Error>;
/// Return current details
fn details(&mut self, root_key_id: Identifier) -> Result<WalletDetails, Error>;
/// last verified height of outputs directly descending from the given parent key
fn last_confirmed_height<'a>(&mut self) -> Result<u64, Error>;
/// Attempt to restore the contents of a wallet from seed
fn restore(&mut self) -> Result<(), Error>;
@ -127,17 +143,30 @@ where
/// Delete data about an output from the backend
fn delete(&mut self, id: &Identifier) -> Result<(), Error>;
/// save wallet details
fn save_details(&mut self, r: Identifier, w: WalletDetails) -> Result<(), Error>;
/// Save last stored child index of a given parent
fn save_child_index(&mut self, parent_key_id: &Identifier, child_n: u32) -> Result<(), Error>;
/// get next tx log entry
fn next_tx_log_id(&mut self, root_key_id: Identifier) -> Result<u32, Error>;
/// Save last confirmed height of outputs for a given parent
fn save_last_confirmed_height(
&mut self,
parent_key_id: &Identifier,
height: u64,
) -> Result<(), Error>;
/// Iterate over all output data stored by the backend
/// get next tx log entry for the parent
fn next_tx_log_id(&mut self, parent_key_id: &Identifier) -> Result<u32, Error>;
/// Iterate over tx log data stored by the backend
fn tx_log_iter(&self) -> Box<Iterator<Item = TxLogEntry>>;
/// save a tx log entry
fn save_tx_log_entry(&self, t: TxLogEntry) -> Result<(), Error>;
fn save_tx_log_entry(&self, t: TxLogEntry, parent_id: &Identifier) -> Result<(), Error>;
/// save an account label -> path mapping
fn save_acct_path(&mut self, mapping: AcctPathMapping) -> Result<(), Error>;
/// Iterate over account names stored in backend
fn acct_path_iter(&self) -> Box<Iterator<Item = AcctPathMapping>>;
/// Save an output as locked in the backend
fn lock_output(&mut self, out: &mut OutputData) -> Result<(), Error>;
@ -419,8 +448,7 @@ impl BlockIdentifier {
/// convert to hex string
pub fn from_hex(hex: &str) -> Result<BlockIdentifier, Error> {
let hash =
Hash::from_hex(hex).context(ErrorKind::GenericError("Invalid hex".to_owned()))?;
let hash = Hash::from_hex(hex).context(ErrorKind::GenericError("Invalid hex".to_owned()))?;
Ok(BlockIdentifier(hash))
}
}
@ -508,29 +536,6 @@ pub struct WalletInfo {
pub amount_locked: u64,
}
/// Separate data for a wallet, containing fields
/// that are needed but not necessarily represented
/// via simple rows of OutputData
/// If a wallet is restored from seed this is obvious
/// lost and re-populated as well as possible
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WalletDetails {
/// The last block height at which the wallet data
/// was confirmed against a node
pub last_confirmed_height: u64,
/// The last child index used
pub last_child_index: u32,
}
impl Default for WalletDetails {
fn default() -> WalletDetails {
WalletDetails {
last_confirmed_height: 0,
last_child_index: 0,
}
}
}
/// Types of transactions that can be contained within a TXLog entry
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub enum TxLogEntryType {
@ -563,6 +568,8 @@ impl fmt::Display for TxLogEntryType {
/// maps to one or many outputs
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TxLogEntry {
/// BIP32 account path used for creating this tx
pub parent_key_id: Identifier,
/// Local id for this transaction (distinct from a slate transaction id)
pub id: u32,
/// Slate transaction this entry is associated with, if any
@ -608,8 +615,9 @@ impl ser::Readable for TxLogEntry {
impl TxLogEntry {
/// Return a new blank with TS initialised with next entry
pub fn new(t: TxLogEntryType, id: u32) -> Self {
pub fn new(parent_key_id: Identifier, t: TxLogEntryType, id: u32) -> Self {
TxLogEntry {
parent_key_id: parent_key_id,
tx_type: t,
id: id,
tx_slate_id: None,
@ -631,6 +639,28 @@ impl TxLogEntry {
}
}
/// Map of named accounts to BIP32 paths
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AcctPathMapping {
/// label used by user
pub label: String,
/// Corresponding parent BIP32 derivation path
pub path: Identifier,
}
impl ser::Writeable for AcctPathMapping {
fn write<W: ser::Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
writer.write_bytes(&serde_json::to_vec(self).map_err(|_| ser::Error::CorruptedData)?)
}
}
impl ser::Readable for AcctPathMapping {
fn read(reader: &mut ser::Reader) -> Result<AcctPathMapping, ser::Error> {
let data = reader.read_vec()?;
serde_json::from_slice(&data[..]).map_err(|_| ser::Error::CorruptedData)
}
}
/// Dummy wrapper for the hex-encoded serialized transaction.
#[derive(Serialize, Deserialize)]
pub struct TxWrapper {

View file

@ -19,8 +19,8 @@ use std::{fs, path};
use failure::ResultExt;
use uuid::Uuid;
use keychain::{Identifier, Keychain};
use store::{self, option_to_not_found, to_key, u64_to_key};
use keychain::{ChildNumber, ExtKeychain, Identifier, Keychain};
use store::{self, option_to_not_found, to_key, to_key_u64};
use libwallet::types::*;
use libwallet::{internal, Error, ErrorKind};
@ -36,6 +36,7 @@ const CONFIRMED_HEIGHT_PREFIX: u8 = 'c' as u8;
const PRIVATE_TX_CONTEXT_PREFIX: u8 = 'p' as u8;
const TX_LOG_ENTRY_PREFIX: u8 = 't' as u8;
const TX_LOG_ID_PREFIX: u8 = 'i' as u8;
const ACCOUNT_PATH_MAPPING_PREFIX: u8 = 'a' as u8;
impl From<store::Error> for Error {
fn from(error: store::Error) -> Error {
@ -56,7 +57,9 @@ pub struct LMDBBackend<C, K> {
/// passphrase: TODO better ways of dealing with this other than storing
passphrase: String,
/// Keychain
keychain: Option<K>,
pub keychain: Option<K>,
/// Parent path to use by default for output operations
parent_key_id: Identifier,
/// client
client: C,
}
@ -67,14 +70,40 @@ impl<C, K> LMDBBackend<C, K> {
fs::create_dir_all(&db_path).expect("Couldn't create wallet backend directory!");
let lmdb_env = Arc::new(store::new_env(db_path.to_str().unwrap().to_string()));
let db = store::Store::open(lmdb_env, DB_DIR);
Ok(LMDBBackend {
db,
let store = store::Store::open(lmdb_env, DB_DIR);
// Make sure default wallet derivation path always exists
let default_account = AcctPathMapping {
label: "default".to_owned(),
path: LMDBBackend::<C, K>::default_path(),
};
let acct_key = to_key(
ACCOUNT_PATH_MAPPING_PREFIX,
&mut default_account.label.as_bytes().to_vec(),
);
{
let batch = store.batch()?;
batch.put_ser(&acct_key, &default_account)?;
batch.commit()?;
}
let res = LMDBBackend {
db: store,
config: config.clone(),
passphrase: String::from(passphrase),
keychain: None,
parent_key_id: LMDBBackend::<C, K>::default_path(),
client: client,
})
};
Ok(res)
}
fn default_path() -> Identifier {
// return the default parent wallet path, corresponding to the default account
// in the BIP32 spec. Parent is account 0 at level 2, child output identifiers
// are all at level 3
ExtKeychain::derive_key_id(2, 0, 0, 0, 0)
}
/// Just test to see if database files exist in the current directory. If
@ -117,6 +146,27 @@ where
&mut self.client
}
/// Set parent path by account name
fn set_parent_key_id_by_name(&mut self, label: &str) -> Result<(), Error> {
let label = label.to_owned();
let res = self.acct_path_iter().find(|l| l.label == label);
if let Some(a) = res {
self.set_parent_key_id(a.path);
Ok(())
} else {
return Err(ErrorKind::UnknownAccountLabel(label.clone()).into());
}
}
/// set parent path
fn set_parent_key_id(&mut self, id: Identifier) {
self.parent_key_id = id;
}
fn parent_key_id(&mut self) -> Identifier {
self.parent_key_id.clone()
}
fn get(&self, id: &Identifier) -> Result<OutputData, Error> {
let key = to_key(OUTPUT_PREFIX, &mut id.to_bytes().to_vec());
option_to_not_found(self.db.get_ser(&key), &format!("Key Id: {}", id)).map_err(|e| e.into())
@ -170,6 +220,15 @@ where
).map_err(|e| e.into())
}
fn acct_path_iter<'a>(&'a self) -> Box<Iterator<Item = AcctPathMapping> + 'a> {
Box::new(self.db.iter(&[ACCOUNT_PATH_MAPPING_PREFIX]).unwrap())
}
fn get_acct_path(&self, label: String) -> Result<Option<AcctPathMapping>, Error> {
let acct_key = to_key(ACCOUNT_PATH_MAPPING_PREFIX, &mut label.as_bytes().to_vec());
self.db.get_ser(&acct_key).map_err(|e| e.into())
}
fn batch<'a>(&'a mut self) -> Result<Box<WalletOutputBatch<K> + 'a>, Error> {
Ok(Box::new(Batch {
_store: self,
@ -178,34 +237,37 @@ where
}))
}
fn next_child<'a>(&mut self, root_key_id: Identifier) -> Result<u32, Error> {
let mut details = self.details(root_key_id.clone())?;
fn next_child<'a>(&mut self) -> Result<Identifier, Error> {
let parent_key_id = self.parent_key_id.clone();
let mut deriv_idx = {
let batch = self.db.batch()?;
let deriv_key = to_key(DERIV_PREFIX, &mut self.parent_key_id.to_bytes().to_vec());
match batch.get_ser(&deriv_key)? {
Some(idx) => idx,
None => 0,
}
};
let mut return_path = self.parent_key_id.to_path();
return_path.depth = return_path.depth + 1;
return_path.path[return_path.depth as usize - 1] = ChildNumber::from(deriv_idx);
deriv_idx = deriv_idx + 1;
let mut batch = self.batch()?;
details.last_child_index = details.last_child_index + 1;
batch.save_details(root_key_id, details.clone())?;
batch.save_child_index(&parent_key_id, deriv_idx)?;
batch.commit()?;
Ok(details.last_child_index + 1)
Ok(Identifier::from_path(&return_path))
}
fn details(&mut self, root_key_id: Identifier) -> Result<WalletDetails, Error> {
fn last_confirmed_height<'a>(&mut self) -> Result<u64, Error> {
let batch = self.db.batch()?;
let deriv_key = to_key(DERIV_PREFIX, &mut root_key_id.to_bytes().to_vec());
let deriv_idx = match batch.get_ser(&deriv_key)? {
Some(idx) => idx,
None => 0,
};
let height_key = to_key(
CONFIRMED_HEIGHT_PREFIX,
&mut root_key_id.to_bytes().to_vec(),
&mut self.parent_key_id.to_bytes().to_vec(),
);
let last_confirmed_height = match batch.get_ser(&height_key)? {
Some(h) => h,
None => 0,
};
Ok(WalletDetails {
last_child_index: deriv_idx,
last_confirmed_height: last_confirmed_height,
})
Ok(last_confirmed_height)
}
fn restore(&mut self) -> Result<(), Error> {
@ -289,18 +351,17 @@ where
Ok(())
}
fn next_tx_log_id(&mut self, root_key_id: Identifier) -> Result<u32, Error> {
let tx_id_key = to_key(TX_LOG_ID_PREFIX, &mut root_key_id.to_bytes().to_vec());
let mut last_tx_log_id = match self.db.borrow().as_ref().unwrap().get_ser(&tx_id_key)? {
fn next_tx_log_id(&mut self, parent_key_id: &Identifier) -> Result<u32, Error> {
let tx_id_key = to_key(TX_LOG_ID_PREFIX, &mut parent_key_id.to_bytes().to_vec());
let last_tx_log_id = match self.db.borrow().as_ref().unwrap().get_ser(&tx_id_key)? {
Some(t) => t,
None => 0,
};
last_tx_log_id = last_tx_log_id + 1;
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&tx_id_key, &last_tx_log_id)?;
.put_ser(&tx_id_key, &(last_tx_log_id + 1))?;
Ok(last_tx_log_id)
}
@ -315,35 +376,67 @@ where
)
}
fn save_details(&mut self, root_key_id: Identifier, d: WalletDetails) -> Result<(), Error> {
let deriv_key = to_key(DERIV_PREFIX, &mut root_key_id.to_bytes().to_vec());
fn save_last_confirmed_height(
&mut self,
parent_key_id: &Identifier,
height: u64,
) -> Result<(), Error> {
let height_key = to_key(
CONFIRMED_HEIGHT_PREFIX,
&mut root_key_id.to_bytes().to_vec(),
&mut parent_key_id.to_bytes().to_vec(),
);
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&deriv_key, &d.last_child_index)?;
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&height_key, &d.last_confirmed_height)?;
.put_ser(&height_key, &height)?;
Ok(())
}
fn save_tx_log_entry(&self, t: TxLogEntry) -> Result<(), Error> {
let tx_log_key = u64_to_key(TX_LOG_ENTRY_PREFIX, t.id as u64);
fn save_child_index(&mut self, parent_id: &Identifier, child_n: u32) -> Result<(), Error> {
let deriv_key = to_key(DERIV_PREFIX, &mut parent_id.to_bytes().to_vec());
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&tx_log_key, &t)?;
.put_ser(&deriv_key, &child_n)?;
Ok(())
}
fn save_tx_log_entry(&self, t: TxLogEntry, parent_id: &Identifier) -> Result<(), Error> {
let tx_log_key = to_key_u64(
TX_LOG_ENTRY_PREFIX,
&mut parent_id.to_bytes().to_vec(),
t.id as u64,
);
self.db.borrow().as_ref().unwrap().put_ser(&tx_log_key, &t)?;
Ok(())
}
fn save_acct_path(&mut self, mapping: AcctPathMapping) -> Result<(), Error> {
let acct_key = to_key(
ACCOUNT_PATH_MAPPING_PREFIX,
&mut mapping.label.as_bytes().to_vec(),
);
self.db
.borrow()
.as_ref()
.unwrap()
.put_ser(&acct_key, &mapping)?;
Ok(())
}
fn acct_path_iter(&self) -> Box<Iterator<Item = AcctPathMapping>> {
Box::new(
self.db
.borrow()
.as_ref()
.unwrap()
.iter(&[ACCOUNT_PATH_MAPPING_PREFIX])
.unwrap(),
)
}
fn lock_output(&mut self, out: &mut OutputData) -> Result<(), Error> {
out.lock();
self.save(out.clone())

264
wallet/tests/accounts.rs Normal file
View file

@ -0,0 +1,264 @@
// Copyright 2018 The Grin Developers
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! tests differing accounts in the same wallet
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
extern crate rand;
#[macro_use]
extern crate slog;
extern crate chrono;
extern crate serde;
extern crate uuid;
mod common;
use common::testclient::{LocalWalletClient, WalletProxy};
use std::fs;
use std::thread;
use std::time::Duration;
use core::global;
use core::global::ChainTypes;
use keychain::{ExtKeychain, Keychain};
use util::LOGGER;
use wallet::libwallet;
fn clean_output_dir(test_dir: &str) {
let _ = fs::remove_dir_all(test_dir);
}
fn setup(test_dir: &str) {
util::init_test_logger();
clean_output_dir(test_dir);
global::set_mining_mode(ChainTypes::AutomatedTesting);
}
/// Various tests on accounts within the same wallet
fn accounts_test_impl(test_dir: &str) -> Result<(), libwallet::Error> {
setup(test_dir);
// Create a new proxy to simulate server and wallet responses
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);
let chain = wallet_proxy.chain.clone();
// Create a new wallet test client, and set its queues to communicate with the
// proxy
let client = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone());
let wallet1 = common::create_wallet(&format!("{}/wallet1", test_dir), client.clone());
wallet_proxy.add_wallet("wallet1", client.get_send_instance(), wallet1.clone());
// define recipient wallet, add to proxy
let wallet2 = common::create_wallet(&format!("{}/wallet2", test_dir), client.clone());
let client = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone());
wallet_proxy.add_wallet("wallet2", client.get_send_instance(), wallet2.clone());
// Set the wallet proxy listener running
thread::spawn(move || {
if let Err(e) = wallet_proxy.run() {
error!(LOGGER, "Wallet Proxy error: {}", e);
}
});
// few values to keep things shorter
let reward = core::consensus::REWARD;
let cm = global::coinbase_maturity(0); // assume all testing precedes soft fork height
// test default accounts exist
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let accounts = api.accounts()?;
assert_eq!(accounts[0].label, "default");
assert_eq!(accounts[0].path, ExtKeychain::derive_key_id(2, 0, 0, 0, 0));
Ok(())
})?;
// add some accounts
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let new_path = api.new_account_path("account1").unwrap();
assert_eq!(new_path, ExtKeychain::derive_key_id(2, 1, 0, 0, 0));
let new_path = api.new_account_path("account2").unwrap();
assert_eq!(new_path, ExtKeychain::derive_key_id(2, 2, 0, 0, 0));
let new_path = api.new_account_path("account3").unwrap();
assert_eq!(new_path, ExtKeychain::derive_key_id(2, 3, 0, 0, 0));
// trying to add same label again should fail
let res = api.new_account_path("account1");
assert!(res.is_err());
Ok(())
})?;
// add account to wallet 2
wallet::controller::owner_single_use(wallet2.clone(), |api| {
let new_path = api.new_account_path("listener_account").unwrap();
assert_eq!(new_path, ExtKeychain::derive_key_id(2, 1, 0, 0, 0));
Ok(())
})?;
// Default wallet 2 to listen on that account
{
let mut w = wallet2.lock().unwrap();
w.set_parent_key_id_by_name("listener_account")?;
}
// Mine into two different accounts in the same wallet
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("account1")?;
assert_eq!(w.parent_key_id(), ExtKeychain::derive_key_id(2, 1, 0, 0, 0));
}
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 7);
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("account2")?;
assert_eq!(w.parent_key_id(), ExtKeychain::derive_key_id(2, 2, 0, 0, 0));
}
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 5);
// Should have 5 in account1 (5 spendable), 5 in account (2 spendable)
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let (wallet1_refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
assert!(wallet1_refreshed);
assert_eq!(wallet1_info.last_confirmed_height, 12);
assert_eq!(wallet1_info.total, 5 * reward);
assert_eq!(wallet1_info.amount_currently_spendable, (5 - cm) * reward);
// check tx log as well
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 5);
Ok(())
})?;
// now check second account
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("account1")?;
}
wallet::controller::owner_single_use(wallet1.clone(), |api| {
// check last confirmed height on this account is different from above (should be 0)
let (_, wallet1_info) = api.retrieve_summary_info(false)?;
assert_eq!(wallet1_info.last_confirmed_height, 0);
let (wallet1_refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
assert!(wallet1_refreshed);
assert_eq!(wallet1_info.last_confirmed_height, 12);
assert_eq!(wallet1_info.total, 7 * reward);
assert_eq!(wallet1_info.amount_currently_spendable, 7 * reward);
// check tx log as well
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 7);
Ok(())
})?;
// should be nothing in default account
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("default")?;
}
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let (_, wallet1_info) = api.retrieve_summary_info(false)?;
assert_eq!(wallet1_info.last_confirmed_height, 0);
let (wallet1_refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
assert!(wallet1_refreshed);
assert_eq!(wallet1_info.last_confirmed_height, 12);
assert_eq!(wallet1_info.total, 0,);
assert_eq!(wallet1_info.amount_currently_spendable, 0,);
// check tx log as well
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 0);
Ok(())
})?;
// Send a tx to another wallet
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("account1")?;
}
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let slate = api.issue_send_tx(
reward, // amount
2, // minimum confirmations
"wallet2", // dest
500, // max outputs
1, // num change outputs
true, // select all outputs
)?;
api.post_tx(&slate, false)?;
Ok(())
})?;
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let (wallet1_refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
assert!(wallet1_refreshed);
assert_eq!(wallet1_info.last_confirmed_height, 13);
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 9);
Ok(())
})?;
// other account should be untouched
{
let mut w = wallet1.lock().unwrap();
w.set_parent_key_id_by_name("account2")?;
}
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let (_, wallet1_info) = api.retrieve_summary_info(false)?;
assert_eq!(wallet1_info.last_confirmed_height, 12);
let (_, wallet1_info) = api.retrieve_summary_info(true)?;
assert_eq!(wallet1_info.last_confirmed_height, 13);
let (_, txs) = api.retrieve_txs(true, None)?;
println!("{:?}", txs);
assert_eq!(txs.len(), 5);
Ok(())
})?;
// wallet 2 should only have this tx on the listener account
wallet::controller::owner_single_use(wallet2.clone(), |api| {
let (wallet2_refreshed, wallet2_info) = api.retrieve_summary_info(true)?;
assert!(wallet2_refreshed);
assert_eq!(wallet2_info.last_confirmed_height, 13);
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 1);
Ok(())
})?;
// Default account on wallet 2 should be untouched
{
let mut w = wallet2.lock().unwrap();
w.set_parent_key_id_by_name("default")?;
}
wallet::controller::owner_single_use(wallet2.clone(), |api| {
let (_, wallet2_info) = api.retrieve_summary_info(false)?;
assert_eq!(wallet2_info.last_confirmed_height, 0);
let (wallet2_refreshed, wallet2_info) = api.retrieve_summary_info(true)?;
assert!(wallet2_refreshed);
assert_eq!(wallet2_info.last_confirmed_height, 13);
assert_eq!(wallet2_info.total, 0,);
assert_eq!(wallet2_info.amount_currently_spendable, 0,);
// check tx log as well
let (_, txs) = api.retrieve_txs(true, None)?;
assert_eq!(txs.len(), 0);
Ok(())
})?;
// let logging finish
thread::sleep(Duration::from_millis(200));
Ok(())
}
#[test]
fn accounts() {
let test_dir = "test_output/accounts";
if let Err(e) = accounts_test_impl(test_dir) {
panic!("Libwallet Error: {} - {}", e, e.backtrace().unwrap());
}
}

View file

@ -27,7 +27,6 @@ use std::sync::{Arc, Mutex};
use chain::Chain;
use core::core::{OutputFeatures, OutputIdentifier, Transaction};
use core::{consensus, global, pow, ser};
use wallet::file_wallet::FileWallet;
use wallet::libwallet;
use wallet::libwallet::types::{BlockFees, CbData, WalletClient, WalletInst};
use wallet::lmdb_wallet::LMDBBackend;
@ -154,12 +153,8 @@ where
Ok(())
}
/// dispatch a wallet (extend later to optionally dispatch a db wallet)
pub fn create_wallet<C, K>(
dir: &str,
client: C,
backend_type: BackendType,
) -> Arc<Mutex<Box<WalletInst<C, K>>>>
/// dispatch a db wallet
pub fn create_wallet<C, K>(dir: &str, client: C) -> Arc<Mutex<Box<WalletInst<C, K>>>>
where
C: WalletClient + 'static,
K: keychain::Keychain + 'static,
@ -167,21 +162,12 @@ where
let mut wallet_config = WalletConfig::default();
wallet_config.data_file_dir = String::from(dir);
let _ = wallet::WalletSeed::init_file(&wallet_config);
let mut wallet: Box<WalletInst<C, K>> = match backend_type {
BackendType::FileBackend => {
let mut wallet: FileWallet<C, K> = FileWallet::new(wallet_config.clone(), "", client)
.unwrap_or_else(|e| {
panic!("Error creating wallet: {:?} Config: {:?}", e, wallet_config)
});
Box::new(wallet)
}
BackendType::LMDBBackend => {
let mut wallet: LMDBBackend<C, K> = LMDBBackend::new(wallet_config.clone(), "", client)
.unwrap_or_else(|e| {
panic!("Error creating wallet: {:?} Config: {:?}", e, wallet_config)
});
Box::new(wallet)
}
let mut wallet: Box<WalletInst<C, K>> = {
let mut wallet: LMDBBackend<C, K> = LMDBBackend::new(wallet_config.clone(), "", client)
.unwrap_or_else(|e| {
panic!("Error creating wallet: {:?} Config: {:?}", e, wallet_config)
});
Box::new(wallet)
};
wallet.open_with_credentials().unwrap_or_else(|e| {
panic!(

View file

@ -180,9 +180,9 @@ where
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper"),
)?;
let tx_bin = util::from_hex(wrapper.tx_hex).context(
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper: tx_bin"),
)?;
let tx_bin = util::from_hex(wrapper.tx_hex).context(libwallet::ErrorKind::ClientCallback(
"Error parsing TxWrapper: tx_bin",
))?;
let tx: Transaction = ser::deserialize(&mut &tx_bin[..]).context(
libwallet::ErrorKind::ClientCallback("Error parsing TxWrapper: tx"),

View file

@ -36,13 +36,9 @@ fn aggsig_sender_receiver_interaction() {
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let skey1 = sender_keychain.derive_key(&id1).unwrap().secret_key;
let skey2 = receiver_keychain.derive_key(&id1).unwrap().secret_key;
let keychain = ExtKeychain::from_random_seed().unwrap();
let blinding_factor = keychain
@ -64,10 +60,8 @@ fn aggsig_sender_receiver_interaction() {
// sender starts the tx interaction
let (sender_pub_excess, _sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
let id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let skey = keychain.derive_key(&id1).unwrap().secret_key;
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
@ -83,13 +77,14 @@ fn aggsig_sender_receiver_interaction() {
};
let pub_nonce_sum;
let pub_key_sum;
// receiver receives partial tx
let (receiver_pub_excess, _receiver_pub_nonce, rx_sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
// let blind = blind_sum.secret_key(&keychain.secp())?;
let blind = keychain.derived_key(&key_id).unwrap();
let blind = keychain.derive_key(&key_id).unwrap().secret_key;
rx_cx = Context::new(&keychain.secp(), blind);
let (pub_excess, pub_nonce) = rx_cx.get_public_keys(&keychain.secp());
@ -103,11 +98,20 @@ fn aggsig_sender_receiver_interaction() {
],
).unwrap();
pub_key_sum = PublicKey::from_combination(
keychain.secp(),
vec![
&s_cx.get_public_keys(keychain.secp()).0,
&rx_cx.get_public_keys(keychain.secp()).0,
],
).unwrap();
let sig_part = aggsig::calculate_partial_sig(
&keychain.secp(),
&rx_cx.sec_key,
&rx_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -123,6 +127,7 @@ fn aggsig_sender_receiver_interaction() {
&rx_sig_part,
&pub_nonce_sum,
&receiver_pub_excess,
Some(&pub_key_sum),
0,
0,
);
@ -137,6 +142,7 @@ fn aggsig_sender_receiver_interaction() {
&s_cx.sec_key,
&s_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -152,6 +158,7 @@ fn aggsig_sender_receiver_interaction() {
&sender_sig_part,
&pub_nonce_sum,
&sender_pub_excess,
Some(&pub_key_sum),
0,
0,
);
@ -167,6 +174,7 @@ fn aggsig_sender_receiver_interaction() {
&rx_cx.sec_key,
&rx_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -195,8 +203,14 @@ fn aggsig_sender_receiver_interaction() {
let keychain = receiver_keychain.clone();
// Receiver check the final signature verifies
let sig_verifies =
aggsig::verify_sig_build_msg(&keychain.secp(), &final_sig, &final_pubkey, 0, 0);
let sig_verifies = aggsig::verify_sig_build_msg(
&keychain.secp(),
&final_sig,
&final_pubkey,
Some(&final_pubkey),
0,
0,
);
assert!(!sig_verifies.is_err());
}
@ -226,13 +240,9 @@ fn aggsig_sender_receiver_interaction_offset() {
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let skey1 = sender_keychain.derive_key(&id1).unwrap().secret_key;
let skey2 = receiver_keychain.derive_key(&id1).unwrap().secret_key;
let keychain = ExtKeychain::from_random_seed().unwrap();
let blinding_factor = keychain
@ -257,10 +267,8 @@ fn aggsig_sender_receiver_interaction_offset() {
// sender starts the tx interaction
let (sender_pub_excess, _sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
let id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let skey = keychain.derive_key(&id1).unwrap().secret_key;
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
@ -282,11 +290,12 @@ fn aggsig_sender_receiver_interaction_offset() {
// receiver receives partial tx
let pub_nonce_sum;
let pub_key_sum;
let (receiver_pub_excess, _receiver_pub_nonce, sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let blind = keychain.derived_key(&key_id).unwrap();
let blind = keychain.derive_key(&key_id).unwrap().secret_key;
rx_cx = Context::new(&keychain.secp(), blind);
let (pub_excess, pub_nonce) = rx_cx.get_public_keys(&keychain.secp());
@ -300,11 +309,20 @@ fn aggsig_sender_receiver_interaction_offset() {
],
).unwrap();
pub_key_sum = PublicKey::from_combination(
keychain.secp(),
vec![
&s_cx.get_public_keys(keychain.secp()).0,
&rx_cx.get_public_keys(keychain.secp()).0,
],
).unwrap();
let sig_part = aggsig::calculate_partial_sig(
&keychain.secp(),
&rx_cx.sec_key,
&rx_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -320,6 +338,7 @@ fn aggsig_sender_receiver_interaction_offset() {
&sig_part,
&pub_nonce_sum,
&receiver_pub_excess,
Some(&pub_key_sum),
0,
0,
);
@ -334,6 +353,7 @@ fn aggsig_sender_receiver_interaction_offset() {
&s_cx.sec_key,
&s_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -349,6 +369,7 @@ fn aggsig_sender_receiver_interaction_offset() {
&sender_sig_part,
&pub_nonce_sum,
&sender_pub_excess,
Some(&pub_key_sum),
0,
0,
);
@ -363,6 +384,7 @@ fn aggsig_sender_receiver_interaction_offset() {
&rx_cx.sec_key,
&rx_cx.sec_nonce,
&pub_nonce_sum,
Some(&pub_key_sum),
0,
0,
).unwrap();
@ -391,8 +413,14 @@ fn aggsig_sender_receiver_interaction_offset() {
let keychain = receiver_keychain.clone();
// Receiver check the final signature verifies
let sig_verifies =
aggsig::verify_sig_build_msg(&keychain.secp(), &final_sig, &final_pubkey, 0, 0);
let sig_verifies = aggsig::verify_sig_build_msg(
&keychain.secp(),
&final_sig,
&final_pubkey,
Some(&final_pubkey),
0,
0,
);
assert!(!sig_verifies.is_err());
}
@ -412,8 +440,8 @@ fn aggsig_sender_receiver_interaction_offset() {
#[test]
fn test_rewind_range_proof() {
let keychain = ExtKeychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
let commit = keychain.commit(5, &key_id).unwrap();
let extra_data = [99u8; 64];
@ -429,6 +457,7 @@ fn test_rewind_range_proof() {
assert_eq!(proof_info.success, true);
assert_eq!(proof_info.value, 5);
assert_eq!(proof_info.message.as_bytes(), key_id.serialize_path());
// cannot rewind with a different commit
let commit2 = keychain.commit(5, &key_id2).unwrap();
@ -436,6 +465,7 @@ fn test_rewind_range_proof() {
proof::rewind(&keychain, commit2, Some(extra_data.to_vec().clone()), proof).unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
assert_eq!(proof_info.message, secp::pedersen::ProofMessage::empty());
// cannot rewind with a commitment to a different value
let commit3 = keychain.commit(4, &key_id).unwrap();

View file

@ -34,10 +34,11 @@ use std::time::Duration;
use core::global;
use core::global::ChainTypes;
use keychain::ExtKeychain;
use keychain::{ExtKeychain, Identifier, Keychain};
use util::LOGGER;
use wallet::libtx::slate::Slate;
use wallet::libwallet;
use wallet::libwallet::types::AcctPathMapping;
fn clean_output_dir(test_dir: &str) {
let _ = fs::remove_dir_all(test_dir);
@ -49,11 +50,7 @@ fn setup(test_dir: &str) {
global::set_mining_mode(ChainTypes::AutomatedTesting);
}
fn restore_wallet(
base_dir: &str,
wallet_dir: &str,
backend_type: common::BackendType,
) -> Result<(), libwallet::Error> {
fn restore_wallet(base_dir: &str, wallet_dir: &str) -> Result<(), libwallet::Error> {
let source_seed = format!("{}/{}/wallet.seed", base_dir, wallet_dir);
let dest_dir = format!("{}/{}_restore", base_dir, wallet_dir);
fs::create_dir_all(dest_dir.clone())?;
@ -63,7 +60,7 @@ fn restore_wallet(
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(base_dir);
let client = LocalWalletClient::new(wallet_dir, wallet_proxy.tx.clone());
let wallet = common::create_wallet(&dest_dir, client.clone(), backend_type.clone());
let wallet = common::create_wallet(&dest_dir, client.clone());
wallet_proxy.add_wallet(wallet_dir, client.get_send_instance(), wallet.clone());
@ -87,7 +84,7 @@ fn restore_wallet(
fn compare_wallet_restore(
base_dir: &str,
wallet_dir: &str,
backend_type: common::BackendType,
account_path: &Identifier,
) -> Result<(), libwallet::Error> {
let restore_name = format!("{}_restore", wallet_dir);
let source_dir = format!("{}/{}", base_dir, wallet_dir);
@ -96,7 +93,7 @@ fn compare_wallet_restore(
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(base_dir);
let client = LocalWalletClient::new(wallet_dir, wallet_proxy.tx.clone());
let wallet_source = common::create_wallet(&source_dir, client.clone(), backend_type.clone());
let wallet_source = common::create_wallet(&source_dir, client.clone());
wallet_proxy.add_wallet(
&wallet_dir,
client.get_send_instance(),
@ -104,13 +101,23 @@ fn compare_wallet_restore(
);
let client = LocalWalletClient::new(&restore_name, wallet_proxy.tx.clone());
let wallet_dest = common::create_wallet(&dest_dir, client.clone(), backend_type.clone());
let wallet_dest = common::create_wallet(&dest_dir, client.clone());
wallet_proxy.add_wallet(
&restore_name,
client.get_send_instance(),
wallet_dest.clone(),
);
{
let mut w = wallet_source.lock().unwrap();
w.set_parent_key_id(account_path.clone());
}
{
let mut w = wallet_dest.lock().unwrap();
w.set_parent_key_id(account_path.clone());
}
// Set the wallet proxy listener running
thread::spawn(move || {
if let Err(e) = wallet_proxy.run() {
@ -124,16 +131,21 @@ fn compare_wallet_restore(
let mut src_txs: Option<Vec<libwallet::types::TxLogEntry>> = None;
let mut dest_txs: Option<Vec<libwallet::types::TxLogEntry>> = None;
let mut src_accts: Option<Vec<AcctPathMapping>> = None;
let mut dest_accts: Option<Vec<AcctPathMapping>> = None;
// Overall wallet info should be the same
wallet::controller::owner_single_use(wallet_source.clone(), |api| {
src_info = Some(api.retrieve_summary_info(true)?.1);
src_txs = Some(api.retrieve_txs(true, None)?.1);
src_accts = Some(api.accounts()?);
Ok(())
})?;
wallet::controller::owner_single_use(wallet_dest.clone(), |api| {
dest_info = Some(api.retrieve_summary_info(true)?.1);
dest_txs = Some(api.retrieve_txs(true, None)?.1);
dest_accts = Some(api.accounts()?);
Ok(())
})?;
@ -142,12 +154,14 @@ fn compare_wallet_restore(
// Net differences in TX logs should be the same
let src_sum: i64 = src_txs
.clone()
.unwrap()
.iter()
.map(|t| t.amount_credited as i64 - t.amount_debited as i64)
.sum();
let dest_sum: i64 = dest_txs
.clone()
.unwrap()
.iter()
.map(|t| t.amount_credited as i64 - t.amount_debited as i64)
@ -155,15 +169,18 @@ fn compare_wallet_restore(
assert_eq!(src_sum, dest_sum);
// Number of created accounts should be the same
assert_eq!(
src_accts.as_ref().unwrap().len(),
dest_accts.as_ref().unwrap().len()
);
Ok(())
}
/// Build up 2 wallets, perform a few transactions on them
/// Then attempt to restore them in separate directories and check contents are the same
fn setup_restore(
test_dir: &str,
backend_type: common::BackendType,
) -> Result<(), libwallet::Error> {
fn setup_restore(test_dir: &str) -> Result<(), libwallet::Error> {
setup(test_dir);
// Create a new proxy to simulate server and wallet responses
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);
@ -172,29 +189,30 @@ fn setup_restore(
// Create a new wallet test client, and set its queues to communicate with the
// proxy
let client = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone());
let wallet1 = common::create_wallet(
&format!("{}/wallet1", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet1 = common::create_wallet(&format!("{}/wallet1", test_dir), client.clone());
wallet_proxy.add_wallet("wallet1", client.get_send_instance(), wallet1.clone());
// define recipient wallet, add to proxy
let client = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone());
let wallet2 = common::create_wallet(
&format!("{}/wallet2", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet2 = common::create_wallet(&format!("{}/wallet2", test_dir), client.clone());
wallet_proxy.add_wallet("wallet2", client.get_send_instance(), wallet2.clone());
// wallet 2 will use another account
wallet::controller::owner_single_use(wallet2.clone(), |api| {
api.new_account_path("account1")?;
api.new_account_path("account2")?;
Ok(())
})?;
// Default wallet 2 to listen on that account
{
let mut w = wallet2.lock().unwrap();
w.set_parent_key_id_by_name("account1")?;
}
// Another wallet
let client = LocalWalletClient::new("wallet3", wallet_proxy.tx.clone());
let wallet3 = common::create_wallet(
&format!("{}/wallet3", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet3 = common::create_wallet(&format!("{}/wallet3", test_dir), client.clone());
wallet_proxy.add_wallet("wallet3", client.get_send_instance(), wallet3.clone());
// Set the wallet proxy listener running
@ -261,6 +279,30 @@ fn setup_restore(
Ok(())
})?;
// Another listener account on wallet 2
{
let mut w = wallet2.lock().unwrap();
w.set_parent_key_id_by_name("account2")?;
}
// mine a few more blocks
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 2);
// Wallet3 to wallet 2 again (to another account)
wallet::controller::owner_single_use(wallet3.clone(), |sender_api| {
// note this will increment the block count as part of the transaction "Posting"
slate = sender_api.issue_send_tx(
amount * 3, // amount
2, // minimum confirmations
"wallet2", // dest
500, // max outputs
1, // num change outputs
true, // select all outputs
)?;
sender_api.post_tx(&slate, false)?;
Ok(())
})?;
// mine a few more blocks
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 5);
@ -281,26 +323,45 @@ fn setup_restore(
Ok(())
}
fn perform_restore(
test_dir: &str,
backend_type: common::BackendType,
) -> Result<(), libwallet::Error> {
restore_wallet(test_dir, "wallet1", backend_type.clone())?;
compare_wallet_restore(test_dir, "wallet1", backend_type.clone())?;
restore_wallet(test_dir, "wallet2", backend_type.clone())?;
compare_wallet_restore(test_dir, "wallet2", backend_type.clone())?;
restore_wallet(test_dir, "wallet3", backend_type.clone())?;
compare_wallet_restore(test_dir, "wallet3", backend_type)?;
fn perform_restore(test_dir: &str) -> Result<(), libwallet::Error> {
restore_wallet(test_dir, "wallet1")?;
compare_wallet_restore(
test_dir,
"wallet1",
&ExtKeychain::derive_key_id(2, 0, 0, 0, 0),
)?;
restore_wallet(test_dir, "wallet2")?;
compare_wallet_restore(
test_dir,
"wallet2",
&ExtKeychain::derive_key_id(2, 0, 0, 0, 0),
)?;
compare_wallet_restore(
test_dir,
"wallet2",
&ExtKeychain::derive_key_id(2, 1, 0, 0, 0),
)?;
compare_wallet_restore(
test_dir,
"wallet2",
&ExtKeychain::derive_key_id(2, 2, 0, 0, 0),
)?;
restore_wallet(test_dir, "wallet3")?;
compare_wallet_restore(
test_dir,
"wallet3",
&ExtKeychain::derive_key_id(2, 0, 0, 0, 0),
)?;
Ok(())
}
#[test]
fn db_wallet_restore() {
let test_dir = "test_output/wallet_restore_db";
if let Err(e) = setup_restore(test_dir, common::BackendType::LMDBBackend) {
fn wallet_restore() {
let test_dir = "test_output/wallet_restore";
if let Err(e) = setup_restore(test_dir) {
println!("Set up restore: Libwallet Error: {}", e);
}
if let Err(e) = perform_restore(test_dir, common::BackendType::LMDBBackend) {
if let Err(e) = perform_restore(test_dir) {
println!("Perform restore: Libwallet Error: {}", e);
}
// let logging finish

View file

@ -53,10 +53,7 @@ fn setup(test_dir: &str) {
/// Exercises the Transaction API fully with a test WalletClient operating
/// directly on a chain instance
/// Callable with any type of wallet
fn basic_transaction_api(
test_dir: &str,
backend_type: common::BackendType,
) -> Result<(), libwallet::Error> {
fn basic_transaction_api(test_dir: &str) -> Result<(), libwallet::Error> {
setup(test_dir);
// Create a new proxy to simulate server and wallet responses
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);
@ -65,20 +62,12 @@ fn basic_transaction_api(
// Create a new wallet test client, and set its queues to communicate with the
// proxy
let client = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone());
let wallet1 = common::create_wallet(
&format!("{}/wallet1", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet1 = common::create_wallet(&format!("{}/wallet1", test_dir), client.clone());
wallet_proxy.add_wallet("wallet1", client.get_send_instance(), wallet1.clone());
// define recipient wallet, add to proxy
let wallet2 = common::create_wallet(&format!("{}/wallet2", test_dir), client.clone());
let client = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone());
let wallet2 = common::create_wallet(
&format!("{}/wallet2", test_dir),
client.clone(),
backend_type.clone(),
);
wallet_proxy.add_wallet("wallet2", client.get_send_instance(), wallet2.clone());
// Set the wallet proxy listener running
@ -91,8 +80,7 @@ fn basic_transaction_api(
// few values to keep things shorter
let reward = core::consensus::REWARD;
let cm = global::coinbase_maturity(0); // assume all testing precedes soft fork height
// mine a few blocks
// mine a few blocks
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 10);
// Check wallet 1 contents are as expected
@ -310,7 +298,7 @@ fn basic_transaction_api(
/// Test rolling back transactions and outputs when a transaction is never
/// posted to a chain
fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(), libwallet::Error> {
fn tx_rollback(test_dir: &str) -> Result<(), libwallet::Error> {
setup(test_dir);
// Create a new proxy to simulate server and wallet responses
let mut wallet_proxy: WalletProxy<LocalWalletClient, ExtKeychain> = WalletProxy::new(test_dir);
@ -319,20 +307,12 @@ fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(),
// Create a new wallet test client, and set its queues to communicate with the
// proxy
let client = LocalWalletClient::new("wallet1", wallet_proxy.tx.clone());
let wallet1 = common::create_wallet(
&format!("{}/wallet1", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet1 = common::create_wallet(&format!("{}/wallet1", test_dir), client.clone());
wallet_proxy.add_wallet("wallet1", client.get_send_instance(), wallet1.clone());
// define recipient wallet, add to proxy
let client = LocalWalletClient::new("wallet2", wallet_proxy.tx.clone());
let wallet2 = common::create_wallet(
&format!("{}/wallet2", test_dir),
client.clone(),
backend_type.clone(),
);
let wallet2 = common::create_wallet(&format!("{}/wallet2", test_dir), client.clone());
wallet_proxy.add_wallet("wallet2", client.get_send_instance(), wallet2.clone());
// Set the wallet proxy listener running
@ -345,8 +325,7 @@ fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(),
// few values to keep things shorter
let reward = core::consensus::REWARD;
let cm = global::coinbase_maturity(0); // assume all testing precedes soft fork height
// mine a few blocks
// mine a few blocks
let _ = common::award_blocks_to_wallet(&chain, wallet1.clone(), 5);
let amount = 30_000_000_000;
@ -366,7 +345,11 @@ fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(),
// Check transaction log for wallet 1
wallet::controller::owner_single_use(wallet1.clone(), |api| {
let (refreshed, _wallet1_info) = api.retrieve_summary_info(true)?;
let (refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
println!(
"last confirmed height: {}",
wallet1_info.last_confirmed_height
);
assert!(refreshed);
let (_, txs) = api.retrieve_txs(true, None)?;
// we should have a transaction entry for this slate
@ -430,7 +413,12 @@ fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(),
api.cancel_tx(tx.id)?;
let (refreshed, wallet1_info) = api.retrieve_summary_info(true)?;
assert!(refreshed);
println!(
"last confirmed height: {}",
wallet1_info.last_confirmed_height
);
// check all eligible inputs should be now be spendable
println!("cm: {}", cm);
assert_eq!(
wallet1_info.amount_currently_spendable,
(wallet1_info.last_confirmed_height - cm) * reward
@ -467,25 +455,18 @@ fn tx_rollback(test_dir: &str, backend_type: common::BackendType) -> Result<(),
Ok(())
}
#[ignore]
#[test]
fn file_wallet_basic_transaction_api() {
let test_dir = "test_output/basic_transaction_api_file";
let _ = basic_transaction_api(test_dir, common::BackendType::FileBackend);
}
#[test]
fn db_wallet_basic_transaction_api() {
let test_dir = "test_output/basic_transaction_api_db";
if let Err(e) = basic_transaction_api(test_dir, common::BackendType::LMDBBackend) {
let test_dir = "test_output/basic_transaction_api";
if let Err(e) = basic_transaction_api(test_dir) {
println!("Libwallet Error: {}", e);
}
}
#[test]
fn db_wallet_tx_rollback() {
let test_dir = "test_output/tx_rollback_db";
if let Err(e) = tx_rollback(test_dir, common::BackendType::LMDBBackend) {
let test_dir = "test_output/tx_rollback";
if let Err(e) = tx_rollback(test_dir) {
println!("Libwallet Error: {}", e);
}
}