[WIP] Core PMMR and API updates to support wallet restore (#950)

* update pmmr to get batch of elements by insertion position

* update pmmr to get batch of elements by insertion position

* add api + chain calls to get traversed outputs back out

* add api + chain calls to get traversed outputs back out

* first pass getting wallet restore to work again with updated utxo-walking api
This commit is contained in:
Yeastplume 2018-04-11 10:02:07 +01:00 committed by GitHub
parent cf2ffbc11a
commit dcdf654bc9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 283 additions and 112 deletions

View file

@ -127,7 +127,12 @@ impl OutputHandler {
.iter()
.filter(|output| commitments.is_empty() || commitments.contains(&output.commit))
.map(|output| {
OutputPrintable::from_output(output, w(&self.chain), &header, include_proof)
OutputPrintable::from_output(
output,
w(&self.chain),
Some(&header),
include_proof,
)
})
.collect();
@ -226,6 +231,9 @@ impl Handler for OutputHandler {
// GET /v1/txhashset/lastoutputs?n=5
// GET /v1/txhashset/lastrangeproofs
// GET /v1/txhashset/lastkernels
// UTXO traversal::
// GET /v1/txhashset/outputs?start_index=1&max=100
struct TxHashSetHandler {
chain: Weak<chain::Chain>,
}
@ -250,12 +258,34 @@ impl TxHashSetHandler {
fn get_last_n_kernel(&self, distance: u64) -> Vec<TxHashSetNode> {
TxHashSetNode::get_last_n_kernel(w(&self.chain), distance)
}
// allows traversal of utxo set
fn outputs(&self, start_index: u64, mut max: u64) -> OutputListing {
//set a limit here
if max > 1000 {
max = 1000;
}
let outputs = w(&self.chain)
.unspent_outputs_by_insertion_index(start_index, max)
.unwrap();
OutputListing {
last_retrieved_index: outputs.0,
highest_index: outputs.1,
outputs: outputs
.2
.iter()
.map(|x| OutputPrintable::from_output(x, w(&self.chain), None, true))
.collect(),
}
}
}
impl Handler for TxHashSetHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
let url = req.url.clone();
let mut path_elems = url.path();
let mut start_index = 1;
let mut max = 100;
if *path_elems.last().unwrap() == "" {
path_elems.pop();
}
@ -269,12 +299,27 @@ impl Handler for TxHashSetHandler {
}
}
}
if let Some(start_indexes) = params.get("start_index") {
for si in start_indexes {
if let Ok(s) = str::parse(si) {
start_index = s;
}
}
}
if let Some(maxes) = params.get("max") {
for ma in maxes {
if let Ok(m) = str::parse(ma) {
max = m;
}
}
}
}
match *path_elems.last().unwrap() {
"roots" => json_response_pretty(&self.get_roots()),
"lastoutputs" => json_response_pretty(&self.get_last_n_output(last_n)),
"lastrangeproofs" => json_response_pretty(&self.get_last_n_rangeproof(last_n)),
"lastkernels" => json_response_pretty(&self.get_last_n_kernel(last_n)),
"outputs" => json_response_pretty(&self.outputs(start_index, max)),
_ => Ok(Response::with((status::BadRequest, ""))),
}
}
@ -710,6 +755,7 @@ pub fn start_rest_apis<T>(
"get txhashset/lastoutputs?n=10".to_string(),
"get txhashset/lastrangeproofs".to_string(),
"get txhashset/lastkernels".to_string(),
"get txhashset/outputs?start_index=1&max=100".to_string(),
"get pool".to_string(),
"post pool/push".to_string(),
"post peers/a.b.c.d:p/ban".to_string(),

View file

@ -240,7 +240,7 @@ impl OutputPrintable {
pub fn from_output(
output: &core::Output,
chain: Arc<chain::Chain>,
block_header: &core::BlockHeader,
block_header: Option<&core::BlockHeader>,
include_proof: bool,
) -> OutputPrintable {
let output_type = if output
@ -269,8 +269,9 @@ impl OutputPrintable {
if output
.features
.contains(core::transaction::OutputFeatures::COINBASE_OUTPUT) && !spent
&& block_header.is_some()
{
merkle_proof = chain.get_merkle_proof(&out_id, &block_header).ok()
merkle_proof = chain.get_merkle_proof(&out_id, &block_header.unwrap()).ok()
};
OutputPrintable {
@ -527,7 +528,12 @@ impl BlockPrintable {
.outputs
.iter()
.map(|output| {
OutputPrintable::from_output(output, chain.clone(), &block.header, include_proof)
OutputPrintable::from_output(
output,
chain.clone(),
Some(&block.header),
include_proof,
)
})
.collect();
let kernels = block
@ -566,7 +572,7 @@ impl CompactBlockPrintable {
let block = chain.get_block(&cb.hash()).unwrap();
let out_full = cb.out_full
.iter()
.map(|x| OutputPrintable::from_output(x, chain.clone(), &block.header, false))
.map(|x| OutputPrintable::from_output(x, chain.clone(), Some(&block.header), false))
.collect();
let kern_full = cb.kern_full
.iter()
@ -591,6 +597,18 @@ pub struct BlockOutputs {
pub outputs: Vec<OutputPrintable>,
}
// For traversing all outputs in the UTXO set
// transactions in the block
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct OutputListing {
/// The last available output index
pub highest_index: u64,
/// The last insertion index retrieved
pub last_retrieved_index: u64,
/// A printable version of the outputs
pub outputs: Vec<OutputPrintable>,
}
#[derive(Serialize, Deserialize)]
pub struct PoolInfo {
/// Size of the pool

View file

@ -20,7 +20,7 @@ use std::fs::File;
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant};
use core::core::{Block, BlockHeader, Input, OutputFeatures, OutputIdentifier, TxKernel};
use core::core::{Block, BlockHeader, Input, Output, OutputFeatures, OutputIdentifier, TxKernel};
use core::core::hash::{Hash, Hashed};
use core::core::pmmr::MerkleProof;
use core::core::target::Difficulty;
@ -657,6 +657,32 @@ impl Chain {
txhashset.last_n_kernel(distance)
}
/// outputs by insertion index
pub fn unspent_outputs_by_insertion_index(
&self,
start_index: u64,
max: u64,
) -> Result<(u64, u64, Vec<Output>), Error> {
let mut txhashset = self.txhashset.write().unwrap();
let max_index = txhashset.highest_output_insertion_index();
let outputs = txhashset.outputs_by_insertion_index(start_index, max);
let rangeproofs = txhashset.rangeproofs_by_insertion_index(start_index, max);
if outputs.0 != rangeproofs.0 || outputs.1.len() != rangeproofs.1.len() {
return Err(Error::TxHashSetErr(String::from(
"Output and rangeproof sets don't match",
)));
}
let mut output_vec: Vec<Output> = vec![];
for (ref x, &y) in outputs.1.iter().zip(rangeproofs.1.iter()) {
output_vec.push(Output {
commit: x.commit,
features: x.features,
proof: y,
});
}
Ok((outputs.0, max_index, output_vec))
}
/// Total difficulty at the head of the chain
pub fn total_difficulty(&self) -> Difficulty {
self.head.lock().unwrap().clone().total_difficulty

View file

@ -183,6 +183,34 @@ impl TxHashSet {
kernel_pmmr.get_last_n_insertions(distance)
}
/// returns outputs from the given insertion (leaf) index up to the specified
/// limit. Also returns the last index actually populated
pub fn outputs_by_insertion_index(
&mut self,
start_index: u64,
max_count: u64,
) -> (u64, Vec<OutputIdentifier>) {
let output_pmmr: PMMR<OutputIdentifier, _> =
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
output_pmmr.elements_from_insertion_index(start_index, max_count)
}
/// highest output insertion index availalbe
pub fn highest_output_insertion_index(&mut self) -> u64 {
pmmr::n_leaves(self.output_pmmr_h.last_pos)
}
/// As above, for rangeproofs
pub fn rangeproofs_by_insertion_index(
&mut self,
start_index: u64,
max_count: u64,
) -> (u64, Vec<RangeProof>) {
let rproof_pmmr: PMMR<RangeProof, _> =
PMMR::at(&mut self.rproof_pmmr_h.backend, self.rproof_pmmr_h.last_pos);
rproof_pmmr.elements_from_insertion_index(start_index, max_count)
}
/// Output and kernel MMR indexes at the end of the provided block
pub fn indexes_at(&self, bh: &Hash) -> Result<(u64, u64), Error> {
self.commit_index.get_block_marker(bh).map_err(&From::from)

View file

@ -37,7 +37,7 @@
use std::clone::Clone;
use std::marker::PhantomData;
use core::hash::{Hash};
use core::hash::Hash;
use ser;
use ser::{Readable, Reader, Writeable, Writer};
use ser::{PMMRIndexHashable, PMMRable};
@ -222,8 +222,8 @@ impl MerkleProof {
let mut bagged = None;
for peak in self.peaks.iter().rev() {
bagged = match bagged {
None => Some(*peak),
Some(rhs) => Some((*peak,rhs).hash_with_index(self.mmr_size)),
None => Some(*peak),
Some(rhs) => Some((*peak, rhs).hash_with_index(self.mmr_size)),
}
}
return bagged == Some(self.root);
@ -236,9 +236,9 @@ impl MerkleProof {
// hash our node and sibling together (noting left/right position of the
// sibling)
let parent = if is_left_sibling(sibling_pos) {
(sibling, self.node).hash_with_index(parent_pos-1)
(sibling, self.node).hash_with_index(parent_pos - 1)
} else {
(self.node, sibling).hash_with_index(parent_pos-1)
(self.node, sibling).hash_with_index(parent_pos - 1)
};
let proof = MerkleProof {
@ -315,8 +315,8 @@ where
let mut res = None;
for peak in self.peaks().iter().rev() {
res = match res {
None => Some(*peak),
Some(rhash) => Some((*peak,rhash).hash_with_index(self.unpruned_size())),
None => Some(*peak),
Some(rhash) => Some((*peak, rhash).hash_with_index(self.unpruned_size())),
}
}
res.expect("no root, invalid tree")
@ -370,7 +370,7 @@ where
/// the same time if applicable.
pub fn push(&mut self, elmt: T) -> Result<u64, String> {
let elmt_pos = self.last_pos + 1;
let mut current_hash = elmt.hash_with_index(elmt_pos-1);
let mut current_hash = elmt.hash_with_index(elmt_pos - 1);
let mut to_append = vec![(current_hash, Some(elmt))];
let mut height = 0;
@ -390,7 +390,7 @@ where
height += 1;
pos += 1;
current_hash = (left_hash, current_hash).hash_with_index(pos-1);
current_hash = (left_hash, current_hash).hash_with_index(pos - 1);
to_append.push((current_hash.clone(), None));
}
@ -525,6 +525,27 @@ where
return_vec
}
/// Helper function which returns un-pruned nodes from the insertion index
/// forward
/// returns last insertion index returned along with data
pub fn elements_from_insertion_index(&self, mut index: u64, max_count: u64) -> (u64, Vec<T>) {
let mut return_vec = vec![];
if index == 0 {
index = 1;
}
let mut return_index = index;
let mut pmmr_index = insertion_to_pmmr_index(index);
while return_vec.len() < max_count as usize && pmmr_index <= self.last_pos {
if let Some(t) = self.get_data(pmmr_index) {
return_vec.push(t);
return_index = index;
}
index += 1;
pmmr_index = insertion_to_pmmr_index(index);
}
(return_index, return_vec)
}
/// Walks all unpruned nodes in the MMR and revalidate all parent hashes
pub fn validate(&self) -> Result<(), String> {
// iterate on all parent nodes
@ -540,7 +561,9 @@ where
if let Some(right_child_hs) = self.get_from_file(right_pos) {
// hash the two child nodes together with parent_pos and compare
let (parent_pos, _) = family(left_pos);
if (left_child_hs, right_child_hs).hash_with_index(parent_pos-1) != hash {
if (left_child_hs, right_child_hs).hash_with_index(parent_pos - 1)
!= hash
{
return Err(format!(
"Invalid MMR, hash of parent at {} does \
not match children.",
@ -824,6 +847,13 @@ pub fn n_leaves(mut sz: u64) -> u64 {
.sum()
}
/// Returns the pmmr index of the nth inserted element
pub fn insertion_to_pmmr_index(mut sz: u64) -> u64 {
//1 based pmmrs
sz = sz - 1;
2 * sz - sz.count_ones() as u64 + 1
}
/// The height of a node in a full binary tree from its postorder traversal
/// index. This function is the base on which all others, as well as the MMR,
/// are built.
@ -1472,7 +1502,10 @@ mod test {
pmmr.push(elems[6]).unwrap();
let pos_10 = elems[6].hash_with_index(10);
assert_eq!(pmmr.peaks(), vec![pos_6, pos_9, pos_10]);
assert_eq!(pmmr.root(), (pos_6, (pos_9, pos_10).hash_with_index(11)).hash_with_index(11));
assert_eq!(
pmmr.root(),
(pos_6, (pos_9, pos_10).hash_with_index(11)).hash_with_index(11)
);
assert_eq!(pmmr.unpruned_size(), 11);
// 001001200100123
@ -1901,11 +1934,10 @@ mod test {
assert_eq!(n_leaves(10), 6);
}
#[test]
fn check_all_ones() {
for i in 0..1000000 {
assert_eq!(old_all_ones(i),all_ones(i));
assert_eq!(old_all_ones(i), all_ones(i));
}
}
@ -1927,7 +1959,7 @@ mod test {
#[test]
fn check_most_significant_pos() {
for i in 0u64..1000000 {
assert_eq!(old_most_significant_pos(i),most_significant_pos(i));
assert_eq!(old_most_significant_pos(i), most_significant_pos(i));
}
}
@ -1941,4 +1973,57 @@ mod test {
}
pos
}
#[test]
fn check_insertion_to_pmmr_index() {
assert_eq!(insertion_to_pmmr_index(1), 1);
assert_eq!(insertion_to_pmmr_index(2), 2);
assert_eq!(insertion_to_pmmr_index(3), 4);
assert_eq!(insertion_to_pmmr_index(4), 5);
assert_eq!(insertion_to_pmmr_index(5), 8);
assert_eq!(insertion_to_pmmr_index(6), 9);
assert_eq!(insertion_to_pmmr_index(7), 11);
assert_eq!(insertion_to_pmmr_index(8), 12);
}
#[test]
fn check_elements_from_insertion_index() {
let mut ba = VecBackend::new();
let mut pmmr = PMMR::new(&mut ba);
for x in 1..1000 {
pmmr.push(TestElem([0, 0, 0, x])).unwrap();
}
// Normal case
let res = pmmr.elements_from_insertion_index(1, 100);
assert_eq!(res.0, 100);
assert_eq!(res.1.len(), 100);
assert_eq!(res.1[0].0[3], 1);
assert_eq!(res.1[99].0[3], 100);
// middle of pack
let res = pmmr.elements_from_insertion_index(351, 70);
assert_eq!(res.0, 420);
assert_eq!(res.1.len(), 70);
assert_eq!(res.1[0].0[3], 351);
assert_eq!(res.1[69].0[3], 420);
// past the end
let res = pmmr.elements_from_insertion_index(650, 1000);
assert_eq!(res.0, 999);
assert_eq!(res.1.len(), 350);
assert_eq!(res.1[0].0[3], 650);
assert_eq!(res.1[349].0[3], 999);
// pruning a few nodes should get consistent results
pmmr.prune(insertion_to_pmmr_index(650), 0).unwrap();
pmmr.prune(insertion_to_pmmr_index(651), 0).unwrap();
pmmr.prune(insertion_to_pmmr_index(800), 0).unwrap();
pmmr.prune(insertion_to_pmmr_index(900), 0).unwrap();
pmmr.prune(insertion_to_pmmr_index(998), 0).unwrap();
let res = pmmr.elements_from_insertion_index(650, 1000);
assert_eq!(res.0, 999);
assert_eq!(res.1.len(), 345);
assert_eq!(res.1[0].0[3], 652);
assert_eq!(res.1[344].0[3], 999);
}
}

View file

@ -208,14 +208,6 @@ fn main() {
.long("api_server_address")
.help("Api address of running node on which to check inputs and post transactions")
.takes_value(true))
.arg(Arg::with_name("key_derivations")
.help("The number of keys possiblities to search for each output. \
Ideally, set this to a number greater than the number of outputs \
you believe should belong to this seed/password.")
.short("k")
.long("key_derivations")
.default_value("1000")
.takes_value(true))
.subcommand(SubCommand::with_name("listen")
.about("Runs the wallet in listening mode waiting for transactions.")
@ -528,12 +520,6 @@ fn wallet_command(wallet_args: &ArgMatches, global_config: GlobalConfig) {
wallet_config.check_node_api_http_addr = sa.to_string().clone();
}
let key_derivations: u32 = wallet_args
.value_of("key_derivations")
.unwrap()
.parse()
.unwrap();
let mut show_spent = false;
if wallet_args.is_present("show_spent") {
show_spent = true;
@ -656,7 +642,7 @@ fn wallet_command(wallet_args: &ArgMatches, global_config: GlobalConfig) {
wallet::show_outputs(&wallet_config, &keychain, show_spent);
}
("restore", Some(_)) => {
let _ = wallet::restore(&wallet_config, &keychain, key_derivations);
let _ = wallet::restore(&wallet_config, &keychain);
}
_ => panic!("Unknown wallet command, use 'grin help wallet' for details"),
}

View file

@ -21,7 +21,7 @@ use core::core::transaction::ProofMessageElements;
use types::{Error, ErrorKind, OutputData, OutputStatus, WalletConfig, WalletData};
use byteorder::{BigEndian, ByteOrder};
pub fn get_chain_height(config: &WalletConfig) -> Result<u64, Error> {
pub fn _get_chain_height(config: &WalletConfig) -> Result<u64, Error> {
let url = format!("{}/v1/chain", config.check_node_api_http_addr);
match api::client::get::<api::Tip>(url.as_str()) {
@ -46,28 +46,25 @@ fn coinbase_status(output: &api::OutputPrintable) -> bool {
}
}
pub fn outputs_batch_block(
pub fn outputs_batch(
config: &WalletConfig,
start_height: u64,
end_height: u64,
) -> Result<Vec<api::BlockOutputs>, Error> {
let query_param = format!(
"start_height={}&end_height={}&include_rp",
start_height, end_height
);
max: u64,
) -> Result<api::OutputListing, Error> {
let query_param = format!("start_index={}&max={}", start_height, max);
let url = format!(
"{}/v1/chain/outputs/byheight?{}",
"{}/v1/txhashset/outputs?{}",
config.check_node_api_http_addr, query_param,
);
match api::client::get::<Vec<api::BlockOutputs>>(url.as_str()) {
Ok(outputs) => Ok(outputs),
match api::client::get::<api::OutputListing>(url.as_str()) {
Ok(o) => Ok(o),
Err(e) => {
// if we got anything other than 200 back from server, bye
error!(
LOGGER,
"outputs_batch_block: Restore failed... unable to contact API {}. Error: {}",
"outputs_batch: Restore failed... unable to contact API {}. Error: {}",
config.check_node_api_http_addr,
e
);
@ -79,23 +76,18 @@ pub fn outputs_batch_block(
// TODO - wrap the many return values in a struct
fn find_outputs_with_key(
keychain: &Keychain,
block_outputs: api::BlockOutputs,
key_iterations: &mut usize,
outputs: Vec<api::OutputPrintable>,
) -> Vec<(pedersen::Commitment, Identifier, u32, u64, u64, u64, bool)> {
let mut wallet_outputs: Vec<(pedersen::Commitment, Identifier, u32, u64, u64, u64, bool)> =
Vec::new();
info!(
LOGGER,
"Scanning block {}, {} outputs, over {} key derivations",
block_outputs.header.height,
block_outputs.outputs.len(),
*key_iterations,
);
let max_derivations = 1_000_000;
info!(LOGGER, "Scanning {} outputs", outputs.len(),);
// skey doesn't matter in this case
let skey = keychain.derive_key_id(1).unwrap();
for output in block_outputs.outputs.iter().filter(|x| !x.spent) {
for output in outputs.iter().filter(|x| !x.spent) {
// attempt to unwind message from the RP and get a value.. note
// this will only return okay if the value is included in the
// message 3 times, indicating a strong match. Also, sec_key provided
@ -111,7 +103,7 @@ fn find_outputs_with_key(
}
// we have a match, now check through our key iterations to find a partial match
let mut found = false;
for i in 1..*key_iterations {
for i in 1..max_derivations {
let key_id = &keychain.derive_key_id(i as u32).unwrap();
if !message.compare_bf_first_8(key_id) {
continue;
@ -143,7 +135,8 @@ fn find_outputs_with_key(
.commit_with_key_index(BigEndian::read_u64(&commit_id), i as u32)
.expect("commit with key index");
let height = block_outputs.header.height;
//let height = outputs.header.height;
let height = 0;
let lock_height = if is_coinbase {
height + global::coinbase_maturity()
} else {
@ -159,31 +152,25 @@ fn find_outputs_with_key(
lock_height,
is_coinbase,
));
break;
}
if !found {
warn!(
LOGGER,
"Very probable matching output found with amount: {} \
run restore again with a larger value of key_iterations to claim",
message.value().unwrap()
but didn't match key child key up to {}",
message.value().unwrap(),
max_derivations,
);
}
}
debug!(
LOGGER,
"Found {} wallet_outputs for block {}",
wallet_outputs.len(),
block_outputs.header.height,
);
debug!(LOGGER, "Found {} wallet_outputs", wallet_outputs.len(),);
wallet_outputs
}
pub fn restore(
config: &WalletConfig,
keychain: &Keychain,
key_derivations: u32,
) -> Result<(), Error> {
pub fn restore(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
// Don't proceed if wallet.dat has anything in it
let is_empty = WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
Ok(wallet_data.outputs.len() == 0)
@ -196,53 +183,48 @@ pub fn restore(
return Ok(());
}
// Get height of chain from node (we'll check again when done)
let chain_height = get_chain_height(config)?;
info!(
LOGGER,
"Starting restore: Chain height is {}.", chain_height
);
info!(LOGGER, "Starting restore.");
let batch_size = 100;
let batch_size = 1000;
let mut start_index = 1;
// this will start here, then lower as outputs are found, moving backwards on
// the chain
let mut key_iterations = key_derivations as usize;
let mut h = chain_height;
while {
let end_batch = h;
if h >= batch_size {
h -= batch_size;
} else {
h = 0;
}
let mut blocks = outputs_batch_block(config, h + 1, end_batch)?;
blocks.reverse();
loop {
let output_listing = outputs_batch(config, start_index, batch_size)?;
info!(
LOGGER,
"Retrieved {} outputs, up to index {}. (Highest index: {})",
output_listing.outputs.len(),
output_listing.last_retrieved_index,
output_listing.highest_index
);
let _ = WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
for block in blocks {
let result_vec = find_outputs_with_key(keychain, block, &mut key_iterations);
if result_vec.len() > 0 {
for output in result_vec.clone() {
let root_key_id = keychain.root_key_id();
// Just plonk it in for now, and refresh actual values via wallet info
// command later
wallet_data.add_output(OutputData {
root_key_id: root_key_id.clone(),
key_id: output.1.clone(),
n_child: output.2,
value: output.3,
status: OutputStatus::Unconfirmed,
height: output.4,
lock_height: output.5,
is_coinbase: output.6,
block: None,
merkle_proof: None,
});
}
let result_vec = find_outputs_with_key(keychain, output_listing.outputs.clone());
if result_vec.len() > 0 {
for output in result_vec.clone() {
let root_key_id = keychain.root_key_id();
// Just plonk it in for now, and refresh actual values via wallet info
// command later
wallet_data.add_output(OutputData {
root_key_id: root_key_id.clone(),
key_id: output.1.clone(),
n_child: output.2,
value: output.3,
status: OutputStatus::Unconfirmed,
height: output.4,
lock_height: output.5,
is_coinbase: output.6,
block: None,
merkle_proof: None,
});
}
}
});
h > 0
} {}
if output_listing.highest_index == output_listing.last_retrieved_index {
break;
}
start_index = output_listing.last_retrieved_index + 1;
}
Ok(())
}