mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 17:01:09 +03:00
Wallet checker cleanup, remove old unconfirmed outputs (#1084)
Use the standard Rust `chunks` function instead of hand coded cursors. Add a cleanup function to get rid of old unconfirmed outputs.
This commit is contained in:
parent
3525df1bf1
commit
ff5d651b6f
1 changed files with 53 additions and 83 deletions
|
@ -15,26 +15,31 @@
|
|||
//! Utilities to check the status of all the outputs we have stored in
|
||||
//! the wallet storage and update them.
|
||||
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use failure::ResultExt;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
use api;
|
||||
use types::*;
|
||||
use keychain::{Identifier, Keychain};
|
||||
use util::secp::pedersen;
|
||||
use types::*;
|
||||
use util;
|
||||
use util::LOGGER;
|
||||
use util::secp::pedersen;
|
||||
|
||||
pub fn refresh_outputs(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
|
||||
refresh_output_state(config, keychain)?;
|
||||
refresh_missing_block_hashes(config, keychain)?;
|
||||
let tip = get_tip_from_node(config)?;
|
||||
refresh_output_state(config, keychain, &tip)?;
|
||||
refresh_missing_block_hashes(config, keychain, &tip)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO - this might be slow if we have really old outputs that have never been
|
||||
// refreshed
|
||||
fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
|
||||
fn refresh_missing_block_hashes(
|
||||
config: &WalletConfig,
|
||||
keychain: &Keychain,
|
||||
tip: &api::Tip,
|
||||
) -> Result<(), Error> {
|
||||
// build a local map of wallet outputs keyed by commit
|
||||
// and a list of outputs we want to query the node for
|
||||
let wallet_outputs = map_wallet_outputs_missing_block(config, keychain)?;
|
||||
|
@ -52,46 +57,21 @@ fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> R
|
|||
|
||||
let id_params: Vec<String> = wallet_outputs
|
||||
.keys()
|
||||
.map(|commit| {
|
||||
let id = util::to_hex(commit.as_ref().to_vec());
|
||||
format!("id={}", id)
|
||||
})
|
||||
.map(|commit| format!("id={}", util::to_hex(commit.as_ref().to_vec())))
|
||||
.collect();
|
||||
|
||||
let tip = get_tip_from_node(config)?;
|
||||
|
||||
let max_ids_in_query = 1000;
|
||||
let mut current_index = 0;
|
||||
let height_params = [format!("start_height={}&end_height={}", 0, tip.height)];
|
||||
|
||||
let mut api_blocks: HashMap<pedersen::Commitment, api::BlockHeaderInfo> = HashMap::new();
|
||||
let mut api_merkle_proofs: HashMap<pedersen::Commitment, MerkleProofWrapper> = HashMap::new();
|
||||
|
||||
// Split up into separate requests, to avoid hitting http limits
|
||||
loop {
|
||||
let q = id_params.clone();
|
||||
let mut cur_params: Vec<String> = q.into_iter()
|
||||
.skip(current_index)
|
||||
.take(max_ids_in_query)
|
||||
.collect();
|
||||
|
||||
if cur_params.len() == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
debug!(LOGGER, "Splitting query: {} ids", cur_params.len());
|
||||
|
||||
current_index = current_index + cur_params.len();
|
||||
|
||||
let height_params = format!("start_height={}&end_height={}", 0, tip.height,);
|
||||
let mut query_params = vec![height_params];
|
||||
query_params.append(&mut cur_params);
|
||||
|
||||
for mut query_chunk in id_params.chunks(1000) {
|
||||
let url = format!(
|
||||
"{}/v1/chain/outputs/byheight?{}",
|
||||
config.check_node_api_http_addr,
|
||||
query_params.join("&"),
|
||||
[&height_params, query_chunk].concat().join("&"),
|
||||
);
|
||||
debug!(LOGGER, "{:?}", url);
|
||||
|
||||
match api::client::get::<Vec<api::BlockOutputs>>(url.as_str()) {
|
||||
Ok(blocks) => for block in blocks {
|
||||
|
@ -105,7 +85,6 @@ fn refresh_missing_block_hashes(config: &WalletConfig, keychain: &Keychain) -> R
|
|||
},
|
||||
Err(e) => {
|
||||
// if we got anything other than 200 back from server, bye
|
||||
error!(LOGGER, "Refresh failed... unable to contact node: {}", e);
|
||||
return Err(e).context(ErrorKind::Node)?;
|
||||
}
|
||||
}
|
||||
|
@ -139,18 +118,19 @@ pub fn map_wallet_outputs(
|
|||
keychain: &Keychain,
|
||||
) -> Result<HashMap<pedersen::Commitment, Identifier>, Error> {
|
||||
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
|
||||
let _ =
|
||||
WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
|
||||
for out in wallet_data.outputs.values().filter(|x| {
|
||||
x.root_key_id == keychain.root_key_id() && x.status != OutputStatus::Spent
|
||||
}) {
|
||||
let commit = keychain
|
||||
.commit_with_key_index(out.value, out.n_child)
|
||||
.context(ErrorKind::Keychain)?;
|
||||
wallet_outputs.insert(commit, out.key_id.clone());
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
let _ = WalletData::read_wallet(&config.data_file_dir, |wallet_data| {
|
||||
let unspents = wallet_data
|
||||
.outputs
|
||||
.values()
|
||||
.filter(|x| x.root_key_id == keychain.root_key_id() && x.status != OutputStatus::Spent);
|
||||
for out in unspents {
|
||||
let commit = keychain
|
||||
.commit_with_key_index(out.value, out.n_child)
|
||||
.context(ErrorKind::Keychain)?;
|
||||
wallet_outputs.insert(commit, out.key_id.clone());
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
Ok(wallet_outputs)
|
||||
}
|
||||
|
||||
|
@ -182,9 +162,8 @@ pub fn apply_api_outputs(
|
|||
wallet_outputs: &HashMap<pedersen::Commitment, Identifier>,
|
||||
api_outputs: &HashMap<pedersen::Commitment, api::Output>,
|
||||
) -> Result<(), Error> {
|
||||
// now for each commit, find the output in the wallet and
|
||||
// the corresponding api output (if it exists)
|
||||
// and refresh it in-place in the wallet.
|
||||
// now for each commit, find the output in the wallet and the corresponding
|
||||
// api output (if it exists) and refresh it in-place in the wallet.
|
||||
// Note: minimizing the time we spend holding the wallet lock.
|
||||
WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
|
||||
for commit in wallet_outputs.keys() {
|
||||
|
@ -201,48 +180,32 @@ pub fn apply_api_outputs(
|
|||
|
||||
/// Builds a single api query to retrieve the latest output data from the node.
|
||||
/// So we can refresh the local wallet outputs.
|
||||
fn refresh_output_state(config: &WalletConfig, keychain: &Keychain) -> Result<(), Error> {
|
||||
fn refresh_output_state(
|
||||
config: &WalletConfig,
|
||||
keychain: &Keychain,
|
||||
tip: &api::Tip,
|
||||
) -> Result<(), Error> {
|
||||
debug!(LOGGER, "Refreshing wallet outputs");
|
||||
|
||||
// build a local map of wallet outputs keyed by commit
|
||||
// and a list of outputs we want to query the node for
|
||||
let wallet_outputs = map_wallet_outputs(config, keychain)?;
|
||||
|
||||
// build the necessary query params -
|
||||
// ?id=xxx&id=yyy&id=zzz
|
||||
let query_params: Vec<String> = wallet_outputs
|
||||
.keys()
|
||||
.map(|commit| {
|
||||
let id = util::to_hex(commit.as_ref().to_vec());
|
||||
format!("id={}", id)
|
||||
})
|
||||
.map(|commit| format!("id={}", util::to_hex(commit.as_ref().to_vec())))
|
||||
.collect();
|
||||
|
||||
// build a map of api outputs by commit so we can look them up efficiently
|
||||
let mut api_outputs: HashMap<pedersen::Commitment, api::Output> = HashMap::new();
|
||||
|
||||
let max_ids_in_query = 1000;
|
||||
let mut current_index = 0;
|
||||
|
||||
// Split up into separate requests, to avoid hitting http limits
|
||||
loop {
|
||||
let q = query_params.clone();
|
||||
let cur_params: Vec<String> = q.into_iter()
|
||||
.skip(current_index)
|
||||
.take(max_ids_in_query)
|
||||
.collect();
|
||||
|
||||
if cur_params.len() == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
debug!(LOGGER, "Splitting query: {} ids", cur_params.len());
|
||||
|
||||
current_index = current_index + cur_params.len();
|
||||
let query_string = cur_params.join("&");
|
||||
|
||||
for query_chunk in query_params.chunks(1000) {
|
||||
let url = format!(
|
||||
"{}/v1/chain/outputs/byids?{}",
|
||||
config.check_node_api_http_addr, query_string,
|
||||
config.check_node_api_http_addr,
|
||||
query_chunk.join("&"),
|
||||
);
|
||||
|
||||
match api::client::get::<Vec<api::Output>>(url.as_str()) {
|
||||
|
@ -252,18 +215,25 @@ fn refresh_output_state(config: &WalletConfig, keychain: &Keychain) -> Result<()
|
|||
Err(e) => {
|
||||
// if we got anything other than 200 back from server, don't attempt to refresh
|
||||
// the wallet data after
|
||||
error!(
|
||||
LOGGER,
|
||||
"Error sending wallet refresh request to server: {:?}", e
|
||||
);
|
||||
return Err(e).context(ErrorKind::Node)?;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
apply_api_outputs(config, &wallet_outputs, &api_outputs)?;
|
||||
clean_old_unconfirmed(config, tip)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn clean_old_unconfirmed(config: &WalletConfig, tip: &api::Tip) -> Result<(), Error> {
|
||||
WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
|
||||
wallet_data.outputs.retain(|_, ref mut out| {
|
||||
!(out.status == OutputStatus::Unconfirmed && out.height > 0
|
||||
&& out.height < tip.height - 500)
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_tip_from_node(config: &WalletConfig) -> Result<api::Tip, Error> {
|
||||
let url = format!("{}/v1/chain", config.check_node_api_http_addr);
|
||||
api::client::get::<api::Tip>(url.as_str())
|
||||
|
|
Loading…
Reference in a new issue