2018-03-02 23:47:27 +03:00
|
|
|
// Copyright 2018 The Grin Developers
|
2017-06-01 01:52:43 +03:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
//! Utilities to check the status of all the outputs we have stored in
|
|
|
|
//! the wallet storage and update them.
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
use failure::ResultExt;
|
2018-06-13 23:58:45 +03:00
|
|
|
use std::collections::hash_map::Entry;
|
2018-06-14 15:16:14 +03:00
|
|
|
use std::collections::HashMap;
|
2017-10-25 20:57:48 +03:00
|
|
|
|
2018-06-06 17:36:29 +03:00
|
|
|
use core::consensus::reward;
|
|
|
|
use core::core::{Output, TxKernel};
|
2018-06-14 15:16:14 +03:00
|
|
|
use core::{global, ser};
|
2018-06-08 08:21:54 +03:00
|
|
|
use keychain::{Identifier, Keychain};
|
2018-06-06 17:36:29 +03:00
|
|
|
use libtx::reward;
|
2018-06-14 19:02:05 +03:00
|
|
|
use libwallet;
|
2018-06-01 17:06:59 +03:00
|
|
|
use libwallet::error::{Error, ErrorKind};
|
2018-06-06 17:36:29 +03:00
|
|
|
use libwallet::internal::keys;
|
2018-06-14 15:16:14 +03:00
|
|
|
use libwallet::types::{BlockFees, CbData, OutputData, OutputStatus, WalletBackend, WalletClient,
|
|
|
|
WalletInfo};
|
2018-06-13 23:58:45 +03:00
|
|
|
use util::secp::pedersen;
|
2018-06-14 15:16:14 +03:00
|
|
|
use util::{self, LOGGER};
|
2017-06-01 01:52:43 +03:00
|
|
|
|
2018-06-06 17:36:29 +03:00
|
|
|
/// Retrieve all of the outputs (doesn't attempt to update from node)
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn retrieve_outputs<T, K>(wallet: &mut T, show_spent: bool) -> Result<Vec<OutputData>, Error>
|
|
|
|
where
|
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
|
|
|
{
|
2018-06-06 17:36:29 +03:00
|
|
|
let root_key_id = wallet.keychain().clone().root_key_id();
|
|
|
|
|
|
|
|
let mut outputs = vec![];
|
|
|
|
|
|
|
|
// just read the wallet here, no need for a write lock
|
|
|
|
let _ = wallet.read_wallet(|wallet_data| {
|
|
|
|
outputs = wallet_data
|
|
|
|
.outputs()
|
|
|
|
.values()
|
|
|
|
.filter(|out| out.root_key_id == root_key_id)
|
|
|
|
.filter(|out| {
|
|
|
|
if show_spent {
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
out.status != OutputStatus::Spent
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.iter()
|
|
|
|
.map(|&o| o.clone())
|
|
|
|
.collect();
|
|
|
|
outputs.sort_by_key(|out| out.n_child);
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
Ok(outputs)
|
|
|
|
}
|
|
|
|
|
2018-06-01 17:06:59 +03:00
|
|
|
/// Refreshes the outputs in a wallet with the latest information
|
|
|
|
/// from a node
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn refresh_outputs<T, K>(wallet: &mut T) -> Result<(), Error>
|
2018-05-30 19:48:32 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K> + WalletClient,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-06-07 17:04:21 +03:00
|
|
|
let height = wallet.get_chain_height(wallet.node_url())?;
|
|
|
|
refresh_output_state(wallet, height)?;
|
|
|
|
refresh_missing_block_hashes(wallet, height)?;
|
2018-01-17 06:03:40 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
2017-10-25 20:57:48 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
// TODO - this might be slow if we have really old outputs that have never been
|
|
|
|
// refreshed
|
2018-06-08 08:21:54 +03:00
|
|
|
fn refresh_missing_block_hashes<T, K>(wallet: &mut T, height: u64) -> Result<(), Error>
|
2018-05-30 19:48:32 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K> + WalletClient,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-01-17 06:03:40 +03:00
|
|
|
// build a local map of wallet outputs keyed by commit
|
2017-11-10 18:12:15 +03:00
|
|
|
// and a list of outputs we want to query the node for
|
2018-05-30 19:48:32 +03:00
|
|
|
let wallet_outputs = map_wallet_outputs_missing_block(wallet)?;
|
2017-10-25 20:57:48 +03:00
|
|
|
|
2018-06-07 17:04:21 +03:00
|
|
|
let wallet_output_keys = wallet_outputs.keys().map(|commit| commit.clone()).collect();
|
|
|
|
|
2018-01-17 06:03:40 +03:00
|
|
|
// nothing to do so return (otherwise we hit the api with a monster query...)
|
|
|
|
if wallet_outputs.is_empty() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
debug!(
|
|
|
|
LOGGER,
|
|
|
|
"Refreshing missing block hashes (and heights) for {} outputs",
|
|
|
|
wallet_outputs.len(),
|
|
|
|
);
|
|
|
|
|
2018-06-07 17:04:21 +03:00
|
|
|
let (api_blocks, api_merkle_proofs) =
|
|
|
|
wallet.get_missing_block_hashes_from_node(wallet.node_url(), height, wallet_output_keys)?;
|
2017-10-25 20:57:48 +03:00
|
|
|
|
2017-10-26 00:09:34 +03:00
|
|
|
// now for each commit, find the output in the wallet and
|
2018-01-06 00:03:53 +03:00
|
|
|
// the corresponding api output (if it exists)
|
|
|
|
// and refresh it in-place in the wallet.
|
|
|
|
// Note: minimizing the time we spend holding the wallet lock.
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet.with_wallet(|wallet_data| {
|
2018-01-17 06:03:40 +03:00
|
|
|
for commit in wallet_outputs.keys() {
|
2018-01-06 00:03:53 +03:00
|
|
|
let id = wallet_outputs.get(&commit).unwrap();
|
2018-05-30 19:48:32 +03:00
|
|
|
if let Entry::Occupied(mut output) = wallet_data.outputs().entry(id.to_hex()) {
|
2018-01-17 06:03:40 +03:00
|
|
|
if let Some(b) = api_blocks.get(&commit) {
|
|
|
|
let output = output.get_mut();
|
2018-06-07 17:04:21 +03:00
|
|
|
output.height = b.0;
|
|
|
|
output.block = Some(b.1.clone());
|
2018-03-02 23:47:27 +03:00
|
|
|
if let Some(merkle_proof) = api_merkle_proofs.get(&commit) {
|
|
|
|
output.merkle_proof = Some(merkle_proof.clone());
|
|
|
|
}
|
2018-01-17 06:03:40 +03:00
|
|
|
}
|
2018-01-06 00:03:53 +03:00
|
|
|
}
|
2017-06-01 01:52:43 +03:00
|
|
|
}
|
2017-09-22 19:44:12 +03:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-05-16 15:18:09 +03:00
|
|
|
/// build a local map of wallet outputs keyed by commit
|
|
|
|
/// and a list of outputs we want to query the node for
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn map_wallet_outputs<T, K>(
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet: &mut T,
|
|
|
|
) -> Result<HashMap<pedersen::Commitment, Identifier>, Error>
|
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-01-17 06:03:40 +03:00
|
|
|
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
|
2018-05-30 19:48:32 +03:00
|
|
|
let _ = wallet.read_wallet(|wallet_data| {
|
|
|
|
let keychain = wallet_data.keychain().clone();
|
|
|
|
let root_key_id = keychain.root_key_id().clone();
|
2018-05-24 03:14:34 +03:00
|
|
|
let unspents = wallet_data
|
2018-05-30 19:48:32 +03:00
|
|
|
.outputs()
|
2018-05-24 03:14:34 +03:00
|
|
|
.values()
|
2018-05-30 19:48:32 +03:00
|
|
|
.filter(|x| x.root_key_id == root_key_id && x.status != OutputStatus::Spent);
|
2018-05-24 03:14:34 +03:00
|
|
|
for out in unspents {
|
2018-06-01 17:06:59 +03:00
|
|
|
let commit = keychain.commit_with_key_index(out.value, out.n_child)?;
|
2018-05-24 03:14:34 +03:00
|
|
|
wallet_outputs.insert(commit, out.key_id.clone());
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
});
|
2018-05-16 15:18:09 +03:00
|
|
|
Ok(wallet_outputs)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// As above, but only return unspent outputs with missing block hashes
|
|
|
|
/// and a list of outputs we want to query the node for
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn map_wallet_outputs_missing_block<T, K>(
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet: &mut T,
|
|
|
|
) -> Result<HashMap<pedersen::Commitment, Identifier>, Error>
|
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-05-16 15:18:09 +03:00
|
|
|
let mut wallet_outputs: HashMap<pedersen::Commitment, Identifier> = HashMap::new();
|
2018-05-30 19:48:32 +03:00
|
|
|
let _ = wallet.read_wallet(|wallet_data| {
|
|
|
|
let keychain = wallet_data.keychain().clone();
|
2018-06-08 08:21:54 +03:00
|
|
|
let unspents = wallet_data.outputs().values().filter(|x| {
|
|
|
|
x.root_key_id == keychain.root_key_id() && x.block.is_none()
|
2018-05-16 15:18:09 +03:00
|
|
|
&& x.status == OutputStatus::Unspent
|
2018-06-08 08:21:54 +03:00
|
|
|
});
|
|
|
|
for out in unspents {
|
2018-06-01 17:06:59 +03:00
|
|
|
let commit = keychain.commit_with_key_index(out.value, out.n_child)?;
|
2018-05-16 15:18:09 +03:00
|
|
|
wallet_outputs.insert(commit, out.key_id.clone());
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
Ok(wallet_outputs)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Apply refreshed API output data to the wallet
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn apply_api_outputs<T, K>(
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet: &mut T,
|
2018-05-16 15:18:09 +03:00
|
|
|
wallet_outputs: &HashMap<pedersen::Commitment, Identifier>,
|
2018-06-07 17:04:21 +03:00
|
|
|
api_outputs: &HashMap<pedersen::Commitment, String>,
|
2018-06-14 19:02:05 +03:00
|
|
|
height: u64,
|
|
|
|
) -> Result<(), libwallet::Error>
|
2018-05-30 19:48:32 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-05-24 03:14:34 +03:00
|
|
|
// now for each commit, find the output in the wallet and the corresponding
|
|
|
|
// api output (if it exists) and refresh it in-place in the wallet.
|
2018-05-16 15:18:09 +03:00
|
|
|
// Note: minimizing the time we spend holding the wallet lock.
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet.with_wallet(|wallet_data| {
|
2018-05-16 15:18:09 +03:00
|
|
|
for commit in wallet_outputs.keys() {
|
|
|
|
let id = wallet_outputs.get(&commit).unwrap();
|
2018-05-30 19:48:32 +03:00
|
|
|
if let Entry::Occupied(mut output) = wallet_data.outputs().entry(id.to_hex()) {
|
2018-05-16 15:18:09 +03:00
|
|
|
match api_outputs.get(&commit) {
|
2018-05-22 23:39:32 +03:00
|
|
|
Some(_) => output.get_mut().mark_unspent(),
|
|
|
|
None => output.get_mut().mark_spent(),
|
2018-05-16 15:18:09 +03:00
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2018-06-14 19:02:05 +03:00
|
|
|
let details = wallet_data.details();
|
|
|
|
details.last_confirmed_height = height;
|
|
|
|
})?;
|
|
|
|
Ok(())
|
2018-05-16 15:18:09 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Builds a single api query to retrieve the latest output data from the node.
|
|
|
|
/// So we can refresh the local wallet outputs.
|
2018-06-08 08:21:54 +03:00
|
|
|
fn refresh_output_state<T, K>(wallet: &mut T, height: u64) -> Result<(), Error>
|
2018-05-30 19:48:32 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K> + WalletClient,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-05-16 15:18:09 +03:00
|
|
|
debug!(LOGGER, "Refreshing wallet outputs");
|
2018-01-17 06:03:40 +03:00
|
|
|
|
2018-05-16 15:18:09 +03:00
|
|
|
// build a local map of wallet outputs keyed by commit
|
|
|
|
// and a list of outputs we want to query the node for
|
2018-05-30 19:48:32 +03:00
|
|
|
let wallet_outputs = map_wallet_outputs(wallet)?;
|
2018-05-24 03:14:34 +03:00
|
|
|
|
2018-06-07 17:04:21 +03:00
|
|
|
let wallet_output_keys = wallet_outputs.keys().map(|commit| commit.clone()).collect();
|
2018-05-24 03:14:34 +03:00
|
|
|
|
2018-06-07 17:04:21 +03:00
|
|
|
let api_outputs = wallet.get_outputs_from_node(wallet.node_url(), wallet_output_keys)?;
|
2018-06-14 19:02:05 +03:00
|
|
|
apply_api_outputs(wallet, &wallet_outputs, &api_outputs, height)?;
|
2018-06-07 17:04:21 +03:00
|
|
|
clean_old_unconfirmed(wallet, height)?;
|
2018-05-16 15:18:09 +03:00
|
|
|
Ok(())
|
2018-01-17 06:03:40 +03:00
|
|
|
}
|
|
|
|
|
2018-06-08 08:21:54 +03:00
|
|
|
fn clean_old_unconfirmed<T, K>(wallet: &mut T, height: u64) -> Result<(), Error>
|
2018-05-30 19:48:32 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-05-30 19:48:32 +03:00
|
|
|
{
|
2018-06-07 17:04:21 +03:00
|
|
|
if height < 500 {
|
2018-05-24 18:27:26 +03:00
|
|
|
return Ok(());
|
|
|
|
}
|
2018-05-30 19:48:32 +03:00
|
|
|
wallet.with_wallet(|wallet_data| {
|
|
|
|
wallet_data.outputs().retain(|_, ref mut out| {
|
2018-05-24 03:14:34 +03:00
|
|
|
!(out.status == OutputStatus::Unconfirmed && out.height > 0
|
2018-06-07 17:04:21 +03:00
|
|
|
&& out.height < height - 500)
|
2018-05-24 03:14:34 +03:00
|
|
|
});
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-06-13 19:03:34 +03:00
|
|
|
/// Retrieve summary info about the wallet
|
2018-06-13 23:58:45 +03:00
|
|
|
/// caller should refresh first if desired
|
2018-06-14 19:02:05 +03:00
|
|
|
pub fn retrieve_info<T, K>(wallet: &mut T) -> Result<WalletInfo, Error>
|
2018-06-01 17:06:59 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K> + WalletClient,
|
|
|
|
K: Keychain,
|
2018-06-01 17:06:59 +03:00
|
|
|
{
|
|
|
|
let ret_val = wallet.read_wallet(|wallet_data| {
|
2018-06-14 19:02:05 +03:00
|
|
|
let current_height = wallet_data.details().last_confirmed_height;
|
2018-06-01 17:06:59 +03:00
|
|
|
let mut unspent_total = 0;
|
2018-06-14 19:02:05 +03:00
|
|
|
let mut immature_total = 0;
|
2018-06-01 17:06:59 +03:00
|
|
|
let mut unconfirmed_total = 0;
|
|
|
|
let mut locked_total = 0;
|
|
|
|
for out in wallet_data
|
|
|
|
.outputs()
|
|
|
|
.clone()
|
|
|
|
.values()
|
|
|
|
.filter(|out| out.root_key_id == wallet_data.keychain().root_key_id())
|
|
|
|
{
|
2018-06-14 19:02:05 +03:00
|
|
|
if out.status == OutputStatus::Unspent && out.lock_height <= current_height {
|
2018-06-01 17:06:59 +03:00
|
|
|
unspent_total += out.value;
|
2018-06-14 19:02:05 +03:00
|
|
|
}
|
|
|
|
if out.status == OutputStatus::Unspent && out.lock_height > current_height {
|
|
|
|
immature_total += out.value;
|
2018-06-01 17:06:59 +03:00
|
|
|
}
|
|
|
|
if out.status == OutputStatus::Unconfirmed && !out.is_coinbase {
|
|
|
|
unconfirmed_total += out.value;
|
|
|
|
}
|
|
|
|
if out.status == OutputStatus::Locked {
|
|
|
|
locked_total += out.value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(WalletInfo {
|
2018-06-14 19:02:05 +03:00
|
|
|
last_confirmed_height: wallet_data.details().last_confirmed_height,
|
|
|
|
total: unspent_total + unconfirmed_total + immature_total,
|
2018-06-01 17:06:59 +03:00
|
|
|
amount_awaiting_confirmation: unconfirmed_total,
|
2018-06-14 19:02:05 +03:00
|
|
|
amount_immature: immature_total,
|
2018-06-01 17:06:59 +03:00
|
|
|
amount_locked: locked_total,
|
2018-06-14 19:02:05 +03:00
|
|
|
amount_currently_spendable: unspent_total,
|
2018-06-01 17:06:59 +03:00
|
|
|
})
|
|
|
|
});
|
|
|
|
ret_val
|
|
|
|
}
|
2018-06-06 17:36:29 +03:00
|
|
|
|
|
|
|
/// Build a coinbase output and insert into wallet
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn build_coinbase<T, K>(wallet: &mut T, block_fees: &BlockFees) -> Result<CbData, Error>
|
2018-06-06 17:36:29 +03:00
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-06-06 17:36:29 +03:00
|
|
|
{
|
|
|
|
let (out, kern, block_fees) = receive_coinbase(wallet, block_fees).context(ErrorKind::Node)?;
|
|
|
|
|
|
|
|
let out_bin = ser::ser_vec(&out).context(ErrorKind::Node)?;
|
|
|
|
|
|
|
|
let kern_bin = ser::ser_vec(&kern).context(ErrorKind::Node)?;
|
|
|
|
|
|
|
|
let key_id_bin = match block_fees.key_id {
|
|
|
|
Some(key_id) => ser::ser_vec(&key_id).context(ErrorKind::Node)?,
|
|
|
|
None => vec![],
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(CbData {
|
|
|
|
output: util::to_hex(out_bin),
|
|
|
|
kernel: util::to_hex(kern_bin),
|
|
|
|
key_id: util::to_hex(key_id_bin),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
//TODO: Split up the output creation and the wallet insertion
|
|
|
|
/// Build a coinbase output and the corresponding kernel
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn receive_coinbase<T, K>(
|
2018-06-06 17:36:29 +03:00
|
|
|
wallet: &mut T,
|
|
|
|
block_fees: &BlockFees,
|
|
|
|
) -> Result<(Output, TxKernel, BlockFees), Error>
|
|
|
|
where
|
2018-06-08 08:21:54 +03:00
|
|
|
T: WalletBackend<K>,
|
|
|
|
K: Keychain,
|
2018-06-06 17:36:29 +03:00
|
|
|
{
|
|
|
|
let root_key_id = wallet.keychain().root_key_id();
|
|
|
|
|
|
|
|
let height = block_fees.height;
|
|
|
|
let lock_height = height + global::coinbase_maturity();
|
|
|
|
|
|
|
|
// Now acquire the wallet lock and write the new output.
|
|
|
|
let (key_id, derivation) = wallet.with_wallet(|wallet_data| {
|
|
|
|
let key_id = block_fees.key_id();
|
|
|
|
let (key_id, derivation) = match key_id {
|
|
|
|
Some(key_id) => keys::retrieve_existing_key(wallet_data, key_id),
|
|
|
|
None => keys::next_available_key(wallet_data),
|
|
|
|
};
|
|
|
|
|
|
|
|
// track the new output and return the stuff needed for reward
|
|
|
|
wallet_data.add_output(OutputData {
|
|
|
|
root_key_id: root_key_id.clone(),
|
|
|
|
key_id: key_id.clone(),
|
|
|
|
n_child: derivation,
|
|
|
|
value: reward(block_fees.fees),
|
|
|
|
status: OutputStatus::Unconfirmed,
|
|
|
|
height: height,
|
|
|
|
lock_height: lock_height,
|
|
|
|
is_coinbase: true,
|
|
|
|
block: None,
|
|
|
|
merkle_proof: None,
|
|
|
|
});
|
|
|
|
|
|
|
|
(key_id, derivation)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
debug!(
|
|
|
|
LOGGER,
|
|
|
|
"receive_coinbase: built candidate output - {:?}, {}",
|
|
|
|
key_id.clone(),
|
|
|
|
derivation,
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut block_fees = block_fees.clone();
|
|
|
|
block_fees.key_id = Some(key_id.clone());
|
|
|
|
|
|
|
|
debug!(LOGGER, "receive_coinbase: {:?}", block_fees);
|
|
|
|
|
|
|
|
let (out, kern) = reward::output(
|
2018-06-08 08:21:54 +03:00
|
|
|
wallet.keychain(),
|
2018-06-06 17:36:29 +03:00
|
|
|
&key_id,
|
|
|
|
block_fees.fees,
|
|
|
|
block_fees.height,
|
|
|
|
).unwrap();
|
|
|
|
/* .context(ErrorKind::Keychain)?; */
|
|
|
|
Ok((out, kern, block_fees))
|
|
|
|
}
|