Wallet amounts (#241)

* allow selecting a commit while providing a key index

* misnamed variable

* added static reference to libsecp that can be called throughout

* don't serialise rangeproof to json if it's not desired

* forgotten new file

* amounts input and displayed in wallet are now in full grins, with optional decimal place

* rustfmt

* merge branch

* better acknowledgement of transaction being sent
This commit is contained in:
Yeastplume 2017-11-07 21:20:36 +00:00 committed by GitHub
parent 48a60858ba
commit 1eeb1fae22
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 178 additions and 140 deletions

View file

@ -344,8 +344,8 @@ impl Block {
..time::now_utc() ..time::now_utc()
}, },
previous: prev.hash(), previous: prev.hash(),
total_difficulty: prev.pow.clone().to_difficulty() total_difficulty: prev.pow.clone().to_difficulty() +
+ prev.total_difficulty.clone(), prev.total_difficulty.clone(),
..Default::default() ..Default::default()
}, },
inputs: inputs, inputs: inputs,
@ -466,9 +466,7 @@ impl Block {
} }
if k.lock_height > self.header.height { if k.lock_height > self.header.height {
return Err(Error::KernelLockHeight { return Err(Error::KernelLockHeight { lock_height: k.lock_height });
lock_height: k.lock_height,
});
} }
} }
@ -494,17 +492,18 @@ impl Block {
} }
// Validate the coinbase outputs generated by miners. Entails 2 main checks: // Validate the coinbase outputs generated by miners. Entails 2 main checks:
// //
// * That the sum of all coinbase-marked outputs equal the supply. // * That the sum of all coinbase-marked outputs equal the supply.
// * That the sum of blinding factors for all coinbase-marked outputs match // * That the sum of blinding factors for all coinbase-marked outputs match
// the coinbase-marked kernels. // the coinbase-marked kernels.
fn verify_coinbase(&self, secp: &Secp256k1) -> Result<(), Error> { fn verify_coinbase(&self, secp: &Secp256k1) -> Result<(), Error> {
let cb_outs = filter_map_vec!(self.outputs, |out| { let cb_outs = filter_map_vec!(self.outputs, |out| if out.features.contains(
if out.features.contains(COINBASE_OUTPUT) { COINBASE_OUTPUT,
Some(out.commitment()) )
} else { {
None Some(out.commitment())
} } else {
None
}); });
let cb_kerns = filter_map_vec!(self.kernels, |k| if k.features.contains(COINBASE_KERNEL) { let cb_kerns = filter_map_vec!(self.kernels, |k| if k.features.contains(COINBASE_KERNEL) {
Some(k.excess) Some(k.excess)
@ -585,14 +584,14 @@ mod test {
use util::secp; use util::secp;
// utility to create a block without worrying about the key or previous // utility to create a block without worrying about the key or previous
// header // header
fn new_block(txs: Vec<&Transaction>, keychain: &Keychain) -> Block { fn new_block(txs: Vec<&Transaction>, keychain: &Keychain) -> Block {
let key_id = keychain.derive_key_id(1).unwrap(); let key_id = keychain.derive_key_id(1).unwrap();
Block::new(&BlockHeader::default(), txs, keychain, &key_id).unwrap() Block::new(&BlockHeader::default(), txs, keychain, &key_id).unwrap()
} }
// utility producing a transaction that spends an output with the provided // utility producing a transaction that spends an output with the provided
// value and blinding key // value and blinding key
fn txspend1i1o( fn txspend1i1o(
v: u64, v: u64,
keychain: &Keychain, keychain: &Keychain,
@ -652,7 +651,7 @@ mod test {
let b = new_block(vec![&mut btx1, &mut btx2, &mut btx3], &keychain); let b = new_block(vec![&mut btx1, &mut btx2, &mut btx3], &keychain);
// block should have been automatically compacted (including reward // block should have been automatically compacted (including reward
// output) and should still be valid // output) and should still be valid
b.validate(&keychain.secp()).unwrap(); b.validate(&keychain.secp()).unwrap();
assert_eq!(b.inputs.len(), 3); assert_eq!(b.inputs.len(), 3);
assert_eq!(b.outputs.len(), 3); assert_eq!(b.outputs.len(), 3);
@ -660,7 +659,7 @@ mod test {
#[test] #[test]
// builds 2 different blocks with a tx spending another and check if merging // builds 2 different blocks with a tx spending another and check if merging
// occurs // occurs
fn mergeable_blocks() { fn mergeable_blocks() {
let keychain = Keychain::from_random_seed().unwrap(); let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap(); let key_id1 = keychain.derive_key_id(1).unwrap();
@ -713,14 +712,14 @@ mod test {
assert_eq!(coinbase_kernels.len(), 1); assert_eq!(coinbase_kernels.len(), 1);
// the block should be valid here (single coinbase output with corresponding // the block should be valid here (single coinbase output with corresponding
// txn kernel) // txn kernel)
assert_eq!(b.validate(&keychain.secp()), Ok(())); assert_eq!(b.validate(&keychain.secp()), Ok(()));
} }
#[test] #[test]
// test that flipping the COINBASE_OUTPUT flag on the output features // test that flipping the COINBASE_OUTPUT flag on the output features
// invalidates the block and specifically it causes verify_coinbase to fail // invalidates the block and specifically it causes verify_coinbase to fail
// additionally verifying the merkle_inputs_outputs also fails // additionally verifying the merkle_inputs_outputs also fails
fn remove_coinbase_output_flag() { fn remove_coinbase_output_flag() {
let keychain = Keychain::from_random_seed().unwrap(); let keychain = Keychain::from_random_seed().unwrap();
let mut b = new_block(vec![], &keychain); let mut b = new_block(vec![], &keychain);
@ -742,7 +741,7 @@ mod test {
#[test] #[test]
// test that flipping the COINBASE_KERNEL flag on the kernel features // test that flipping the COINBASE_KERNEL flag on the kernel features
// invalidates the block and specifically it causes verify_coinbase to fail // invalidates the block and specifically it causes verify_coinbase to fail
fn remove_coinbase_kernel_flag() { fn remove_coinbase_kernel_flag() {
let keychain = Keychain::from_random_seed().unwrap(); let keychain = Keychain::from_random_seed().unwrap();
let mut b = new_block(vec![], &keychain); let mut b = new_block(vec![], &keychain);

View file

@ -40,8 +40,7 @@ pub struct Context<'a> {
/// Function type returned by the transaction combinators. Transforms a /// Function type returned by the transaction combinators. Transforms a
/// (Transaction, BlindSum) pair into another, provided some context. /// (Transaction, BlindSum) pair into another, provided some context.
pub type Append = for<'a> Fn(&'a mut Context, (Transaction, BlindSum)) pub type Append = for<'a> Fn(&'a mut Context, (Transaction, BlindSum)) -> (Transaction, BlindSum);
-> (Transaction, BlindSum);
/// Adds an input with the provided value and blinding key to the transaction /// Adds an input with the provided value and blinding key to the transaction
/// being built. /// being built.
@ -133,11 +132,10 @@ pub fn transaction(
keychain: &keychain::Keychain, keychain: &keychain::Keychain,
) -> Result<(Transaction, BlindingFactor), keychain::Error> { ) -> Result<(Transaction, BlindingFactor), keychain::Error> {
let mut ctx = Context { keychain }; let mut ctx = Context { keychain };
let (mut tx, sum) = elems let (mut tx, sum) = elems.iter().fold(
.iter() (Transaction::empty(), BlindSum::new()),
.fold((Transaction::empty(), BlindSum::new()), |acc, elem| { |acc, elem| elem(&mut ctx, acc),
elem(&mut ctx, acc) );
});
let blind_sum = ctx.keychain.blind_sum(&sum)?; let blind_sum = ctx.keychain.blind_sum(&sum)?;
let msg = secp::Message::from_slice(&kernel_sig_msg(tx.fee, tx.lock_height))?; let msg = secp::Message::from_slice(&kernel_sig_msg(tx.fee, tx.lock_height))?;
let sig = ctx.keychain.sign_with_blinding(&msg, &blind_sum)?; let sig = ctx.keychain.sign_with_blinding(&msg, &blind_sum)?;

View file

@ -153,9 +153,7 @@ impl HashWriter {
impl Default for HashWriter { impl Default for HashWriter {
fn default() -> HashWriter { fn default() -> HashWriter {
HashWriter { HashWriter { state: Blake2b::new(32) }
state: Blake2b::new(32),
}
} }
} }
@ -204,8 +202,7 @@ impl<T: Writeable> VerifySortOrder<T> for Vec<T> {
.map(|item| item.hash()) .map(|item| item.hash())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.windows(2) .windows(2)
.any(|pair| pair[0] > pair[1]) .any(|pair| pair[0] > pair[1]) {
{
true => Err(ser::Error::BadlySorted), true => Err(ser::Error::BadlySorted),
false => Ok(()), false => Ok(()),
} }

View file

@ -25,6 +25,8 @@ pub mod transaction;
use std::fmt; use std::fmt;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::num::ParseFloatError;
use consensus::GRIN_BASE;
use util::secp::{self, Secp256k1}; use util::secp::{self, Secp256k1};
use util::secp::pedersen::*; use util::secp::pedersen::*;
@ -53,7 +55,7 @@ pub trait Committed {
let mut output_commits = map_vec!(self.outputs_committed(), |out| out.commitment()); let mut output_commits = map_vec!(self.outputs_committed(), |out| out.commitment());
// add the overage as output commitment if positive, as an input commitment if // add the overage as output commitment if positive, as an input commitment if
// negative // negative
let overage = self.overage(); let overage = self.overage();
if overage != 0 { if overage != 0 {
let over_commit = secp.commit_value(overage.abs() as u64).unwrap(); let over_commit = secp.commit_value(overage.abs() as u64).unwrap();
@ -182,6 +184,22 @@ impl Writeable for Proof {
} }
} }
/// Common method for parsing an amount from human-readable, and converting
/// to internally-compatible u64
pub fn amount_from_hr_string(amount: &str) -> Result<u64, ParseFloatError> {
let amount = amount.parse::<f64>()?;
Ok((amount * GRIN_BASE as f64) as u64)
}
/// Common method for converting an amount to a human-readable string
pub fn amount_to_hr_string(amount: u64) -> String {
let amount = (amount as f64 / GRIN_BASE as f64) as f64;
let places = (GRIN_BASE as f64).log(10.0) as usize + 1;
String::from(format!("{:.*}", places, amount))
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -192,6 +210,27 @@ mod test {
use keychain; use keychain;
use keychain::{BlindingFactor, Keychain}; use keychain::{BlindingFactor, Keychain};
#[test]
pub fn test_amount_to_hr() {
assert!(50123456789 == amount_from_hr_string("50.123456789").unwrap());
assert!(50 == amount_from_hr_string(".000000050").unwrap());
assert!(1 == amount_from_hr_string(".000000001").unwrap());
assert!(0 == amount_from_hr_string(".0000000009").unwrap());
assert!(500_000_000_000 == amount_from_hr_string("500").unwrap());
assert!(
5_000_000_000_000_000_000 == amount_from_hr_string("5000000000.00000000000").unwrap()
);
}
#[test]
pub fn test_hr_to_amount() {
assert!("50.123456789" == amount_to_hr_string(50123456789));
assert!("0.000000050" == amount_to_hr_string(50));
assert!("0.000000001" == amount_to_hr_string(1));
assert!("500.000000000" == amount_to_hr_string(500_000_000_000));
assert!("5000000000.000000000" == amount_to_hr_string(5_000_000_000_000_000_000));
}
#[test] #[test]
#[should_panic(expected = "InvalidSecretKey")] #[should_panic(expected = "InvalidSecretKey")]
fn test_zero_commit_fails() { fn test_zero_commit_fails() {
@ -308,11 +347,11 @@ mod test {
{ {
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they // Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
// become inputs in the new transaction // become inputs in the new transaction
let (in1, in2) = (input(4, key_id1), input(3, key_id2)); let (in1, in2) = (input(4, key_id1), input(3, key_id2));
// Alice builds her transaction, with change, which also produces the sum // Alice builds her transaction, with change, which also produces the sum
// of blinding factors before they're obscured. // of blinding factors before they're obscured.
let (tx, sum) = let (tx, sum) =
build::transaction(vec![in1, in2, output(1, key_id3), with_fee(2)], &keychain) build::transaction(vec![in1, in2, output(1, key_id3), with_fee(2)], &keychain)
.unwrap(); .unwrap();
@ -321,8 +360,8 @@ mod test {
} }
// From now on, Bob only has the obscured transaction and the sum of // From now on, Bob only has the obscured transaction and the sum of
// blinding factors. He adds his output, finalizes the transaction so it's // blinding factors. He adds his output, finalizes the transaction so it's
// ready for broadcast. // ready for broadcast.
let (tx_final, _) = build::transaction( let (tx_final, _) = build::transaction(
vec![ vec![
initial_tx(tx_alice), initial_tx(tx_alice),
@ -382,7 +421,7 @@ mod test {
let key_id3 = keychain.derive_key_id(3).unwrap(); let key_id3 = keychain.derive_key_id(3).unwrap();
// first check we can add a timelocked tx where lock height matches current block height // first check we can add a timelocked tx where lock height matches current block height
// and that the resulting block is valid // and that the resulting block is valid
let tx1 = build::transaction( let tx1 = build::transaction(
vec![ vec![
input(5, key_id1.clone()), input(5, key_id1.clone()),
@ -421,9 +460,7 @@ mod test {
&key_id3.clone(), &key_id3.clone(),
).unwrap(); ).unwrap();
match b.validate(keychain.secp()) { match b.validate(keychain.secp()) {
Err(KernelLockHeight { Err(KernelLockHeight { lock_height: height }) => {
lock_height: height,
}) => {
assert_eq!(height, 2); assert_eq!(height, 2);
} }
_ => panic!("expecting KernelLockHeight error here"), _ => panic!("expecting KernelLockHeight error here"),

View file

@ -267,14 +267,14 @@ where
let mut pos = elmt_pos; let mut pos = elmt_pos;
// we look ahead one position in the MMR, if the expected node has a higher // we look ahead one position in the MMR, if the expected node has a higher
// height it means we have to build a higher peak by summing with a previous // height it means we have to build a higher peak by summing with a previous
// sibling. we do it iteratively in case the new peak itself allows the // sibling. we do it iteratively in case the new peak itself allows the
// creation of another parent. // creation of another parent.
while bintree_postorder_height(pos + 1) > height { while bintree_postorder_height(pos + 1) > height {
let left_sibling = bintree_jump_left_sibling(pos); let left_sibling = bintree_jump_left_sibling(pos);
let left_hashsum = self.backend let left_hashsum = self.backend.get(left_sibling).expect(
.get(left_sibling) "missing left sibling in tree, should not have been pruned",
.expect("missing left sibling in tree, should not have been pruned"); );
current_hashsum = left_hashsum + current_hashsum; current_hashsum = left_hashsum + current_hashsum;
to_append.push(current_hashsum.clone()); to_append.push(current_hashsum.clone());
@ -293,8 +293,8 @@ where
/// well as the consumer-provided index of when the change occurred. /// well as the consumer-provided index of when the change occurred.
pub fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> { pub fn rewind(&mut self, position: u64, index: u32) -> Result<(), String> {
// identify which actual position we should rewind to as the provided // identify which actual position we should rewind to as the provided
// position is a leaf, which may had some parent that needs to exist // position is a leaf, which may had some parent that needs to exist
// afterward for the MMR to be valid // afterward for the MMR to be valid
let mut pos = position; let mut pos = position;
while bintree_postorder_height(pos + 1) > 0 { while bintree_postorder_height(pos + 1) > 0 {
pos += 1; pos += 1;
@ -320,7 +320,7 @@ where
} }
// loop going up the tree, from node to parent, as long as we stay inside // loop going up the tree, from node to parent, as long as we stay inside
// the tree. // the tree.
let mut to_prune = vec![]; let mut to_prune = vec![];
let mut current = position; let mut current = position;
while current + 1 < self.last_pos { while current + 1 < self.last_pos {
@ -332,7 +332,7 @@ where
to_prune.push(current); to_prune.push(current);
// if we have a pruned sibling, we can continue up the tree // if we have a pruned sibling, we can continue up the tree
// otherwise we're done // otherwise we're done
if let None = self.backend.get(sibling) { if let None = self.backend.get(sibling) {
current = parent; current = parent;
} else { } else {
@ -357,13 +357,13 @@ where
let mut last_leaf = self.last_pos; let mut last_leaf = self.last_pos;
let size = self.unpruned_size(); let size = self.unpruned_size();
// Special case that causes issues in bintree functions, // Special case that causes issues in bintree functions,
// just return // just return
if size == 1 { if size == 1 {
return_vec.push(self.backend.get(last_leaf).unwrap()); return_vec.push(self.backend.get(last_leaf).unwrap());
return return_vec; return return_vec;
} }
// if size is even, we're already at the bottom, otherwise // if size is even, we're already at the bottom, otherwise
// we need to traverse down to it (reverse post-order direction) // we need to traverse down to it (reverse post-order direction)
if size % 2 == 1 { if size % 2 == 1 {
last_leaf = bintree_rightmost(self.last_pos); last_leaf = bintree_rightmost(self.last_pos);
} }
@ -503,21 +503,19 @@ pub struct PruneList {
impl PruneList { impl PruneList {
/// Instantiate a new empty prune list /// Instantiate a new empty prune list
pub fn new() -> PruneList { pub fn new() -> PruneList {
PruneList { PruneList { pruned_nodes: vec![] }
pruned_nodes: vec![],
}
} }
/// Computes by how many positions a node at pos should be shifted given the /// Computes by how many positions a node at pos should be shifted given the
/// number of nodes that have already been pruned before it. /// number of nodes that have already been pruned before it.
pub fn get_shift(&self, pos: u64) -> Option<u64> { pub fn get_shift(&self, pos: u64) -> Option<u64> {
// get the position where the node at pos would fit in the pruned list, if // get the position where the node at pos would fit in the pruned list, if
// it's already pruned, nothing to skip // it's already pruned, nothing to skip
match self.pruned_pos(pos) { match self.pruned_pos(pos) {
None => None, None => None,
Some(idx) => { Some(idx) => {
// skip by the number of elements pruned in the preceding subtrees, // skip by the number of elements pruned in the preceding subtrees,
// which is the sum of the size of each subtree // which is the sum of the size of each subtree
Some( Some(
self.pruned_nodes[0..(idx as usize)] self.pruned_nodes[0..(idx as usize)]
.iter() .iter()
@ -559,8 +557,8 @@ impl PruneList {
Err(idx) => { Err(idx) => {
if self.pruned_nodes.len() > idx { if self.pruned_nodes.len() > idx {
// the node at pos can't be a child of lower position nodes by MMR // the node at pos can't be a child of lower position nodes by MMR
// construction but can be a child of the next node, going up parents // construction but can be a child of the next node, going up parents
// from pos to make sure it's not the case // from pos to make sure it's not the case
let next_peak_pos = self.pruned_nodes[idx]; let next_peak_pos = self.pruned_nodes[idx];
let mut cursor = pos; let mut cursor = pos;
loop { loop {
@ -586,13 +584,13 @@ impl PruneList {
/// of the range. /// of the range.
fn peaks(num: u64) -> Vec<u64> { fn peaks(num: u64) -> Vec<u64> {
// detecting an invalid mountain range, when siblings exist but no parent // detecting an invalid mountain range, when siblings exist but no parent
// exists // exists
if bintree_postorder_height(num + 1) > bintree_postorder_height(num) { if bintree_postorder_height(num + 1) > bintree_postorder_height(num) {
return vec![]; return vec![];
} }
// our top peak is always on the leftmost side of the tree and leftmost trees // our top peak is always on the leftmost side of the tree and leftmost trees
// have for index a binary values with all 1s (i.e. 11, 111, 1111, etc.) // have for index a binary values with all 1s (i.e. 11, 111, 1111, etc.)
let mut top = 1; let mut top = 1;
while (top - 1) <= num { while (top - 1) <= num {
top = top << 1; top = top << 1;
@ -605,7 +603,7 @@ fn peaks(num: u64) -> Vec<u64> {
let mut peaks = vec![top]; let mut peaks = vec![top];
// going down the range, next peaks are right neighbors of the top. if one // going down the range, next peaks are right neighbors of the top. if one
// doesn't exist yet, we go down to a smaller peak to the left // doesn't exist yet, we go down to a smaller peak to the left
let mut peak = top; let mut peak = top;
'outer: loop { 'outer: loop {
peak = bintree_jump_right_sibling(peak); peak = bintree_jump_right_sibling(peak);
@ -845,9 +843,9 @@ mod test {
type Sum = u64; type Sum = u64;
fn sum(&self) -> u64 { fn sum(&self) -> u64 {
// sums are not allowed to overflow, so we use this simple // sums are not allowed to overflow, so we use this simple
// non-injective "sum" function that will still be homomorphic // non-injective "sum" function that will still be homomorphic
self.0[0] as u64 * 0x1000 + self.0[1] as u64 * 0x100 + self.0[2] as u64 * 0x10 self.0[0] as u64 * 0x1000 + self.0[1] as u64 * 0x100 + self.0[2] as u64 * 0x10 +
+ self.0[3] as u64 self.0[3] as u64
} }
fn sum_len() -> usize { fn sum_len() -> usize {
8 8
@ -897,8 +895,8 @@ mod test {
// two elements // two elements
pmmr.push(elems[1], None::<TestElem>).unwrap(); pmmr.push(elems[1], None::<TestElem>).unwrap();
let sum2 = HashSum::from_summable(1, &elems[0], None::<TestElem>) let sum2 = HashSum::from_summable(1, &elems[0], None::<TestElem>) +
+ HashSum::from_summable(2, &elems[1], None::<TestElem>); HashSum::from_summable(2, &elems[1], None::<TestElem>);
assert_eq!(pmmr.root(), sum2); assert_eq!(pmmr.root(), sum2);
assert_eq!(pmmr.unpruned_size(), 3); assert_eq!(pmmr.unpruned_size(), 3);
@ -910,9 +908,9 @@ mod test {
// four elements // four elements
pmmr.push(elems[3], None::<TestElem>).unwrap(); pmmr.push(elems[3], None::<TestElem>).unwrap();
let sum4 = sum2 let sum4 = sum2 +
+ (HashSum::from_summable(4, &elems[2], None::<TestElem>) (HashSum::from_summable(4, &elems[2], None::<TestElem>) +
+ HashSum::from_summable(5, &elems[3], None::<TestElem>)); HashSum::from_summable(5, &elems[3], None::<TestElem>));
assert_eq!(pmmr.root(), sum4); assert_eq!(pmmr.root(), sum4);
assert_eq!(pmmr.unpruned_size(), 7); assert_eq!(pmmr.unpruned_size(), 7);
@ -924,9 +922,9 @@ mod test {
// six elements // six elements
pmmr.push(elems[5], None::<TestElem>).unwrap(); pmmr.push(elems[5], None::<TestElem>).unwrap();
let sum6 = sum4.clone() let sum6 = sum4.clone() +
+ (HashSum::from_summable(8, &elems[4], None::<TestElem>) (HashSum::from_summable(8, &elems[4], None::<TestElem>) +
+ HashSum::from_summable(9, &elems[5], None::<TestElem>)); HashSum::from_summable(9, &elems[5], None::<TestElem>));
assert_eq!(pmmr.root(), sum6.clone()); assert_eq!(pmmr.root(), sum6.clone());
assert_eq!(pmmr.unpruned_size(), 10); assert_eq!(pmmr.unpruned_size(), 10);
@ -938,11 +936,11 @@ mod test {
// eight elements // eight elements
pmmr.push(elems[7], None::<TestElem>).unwrap(); pmmr.push(elems[7], None::<TestElem>).unwrap();
let sum8 = sum4 let sum8 = sum4 +
+ ((HashSum::from_summable(8, &elems[4], None::<TestElem>) ((HashSum::from_summable(8, &elems[4], None::<TestElem>) +
+ HashSum::from_summable(9, &elems[5], None::<TestElem>)) HashSum::from_summable(9, &elems[5], None::<TestElem>)) +
+ (HashSum::from_summable(11, &elems[6], None::<TestElem>) (HashSum::from_summable(11, &elems[6], None::<TestElem>) +
+ HashSum::from_summable(12, &elems[7], None::<TestElem>))); HashSum::from_summable(12, &elems[7], None::<TestElem>)));
assert_eq!(pmmr.root(), sum8); assert_eq!(pmmr.root(), sum8);
assert_eq!(pmmr.unpruned_size(), 15); assert_eq!(pmmr.unpruned_size(), 15);
@ -991,8 +989,7 @@ mod test {
let res = pmmr.get_last_n_insertions(19); let res = pmmr.get_last_n_insertions(19);
assert!( assert!(
res[0].sum == 4 && res[1].sum == 3 && res[2].sum == 2 && res[3].sum == 1 res[0].sum == 4 && res[1].sum == 3 && res[2].sum == 2 && res[3].sum == 1 && res.len() == 4
&& res.len() == 4
); );
pmmr.push(elems[5], None::<TestElem>).unwrap(); pmmr.push(elems[5], None::<TestElem>).unwrap();
@ -1002,8 +999,7 @@ mod test {
let res = pmmr.get_last_n_insertions(7); let res = pmmr.get_last_n_insertions(7);
assert!( assert!(
res[0].sum == 9 && res[1].sum == 8 && res[2].sum == 7 && res[3].sum == 6 res[0].sum == 9 && res[1].sum == 8 && res[2].sum == 7 && res[3].sum == 6 && res.len() == 7
&& res.len() == 7
); );
} }

View file

@ -63,9 +63,7 @@ impl Difficulty {
let mut in_vec = h.to_vec(); let mut in_vec = h.to_vec();
in_vec.truncate(8); in_vec.truncate(8);
let num = BigEndian::read_u64(&in_vec); let num = BigEndian::read_u64(&in_vec);
Difficulty { Difficulty { num: max_target / num }
num: max_target / num,
}
} }
/// Converts the difficulty into a u64 /// Converts the difficulty into a u64
@ -83,36 +81,28 @@ impl fmt::Display for Difficulty {
impl Add<Difficulty> for Difficulty { impl Add<Difficulty> for Difficulty {
type Output = Difficulty; type Output = Difficulty;
fn add(self, other: Difficulty) -> Difficulty { fn add(self, other: Difficulty) -> Difficulty {
Difficulty { Difficulty { num: self.num + other.num }
num: self.num + other.num,
}
} }
} }
impl Sub<Difficulty> for Difficulty { impl Sub<Difficulty> for Difficulty {
type Output = Difficulty; type Output = Difficulty;
fn sub(self, other: Difficulty) -> Difficulty { fn sub(self, other: Difficulty) -> Difficulty {
Difficulty { Difficulty { num: self.num - other.num }
num: self.num - other.num,
}
} }
} }
impl Mul<Difficulty> for Difficulty { impl Mul<Difficulty> for Difficulty {
type Output = Difficulty; type Output = Difficulty;
fn mul(self, other: Difficulty) -> Difficulty { fn mul(self, other: Difficulty) -> Difficulty {
Difficulty { Difficulty { num: self.num * other.num }
num: self.num * other.num,
}
} }
} }
impl Div<Difficulty> for Difficulty { impl Div<Difficulty> for Difficulty {
type Output = Difficulty; type Output = Difficulty;
fn div(self, other: Difficulty) -> Difficulty { fn div(self, other: Difficulty) -> Difficulty {
Difficulty { Difficulty { num: self.num / other.num }
num: self.num / other.num,
}
} }
} }
@ -167,8 +157,6 @@ impl<'de> de::Visitor<'de> for DiffVisitor {
&"a value number", &"a value number",
)); ));
}; };
Ok(Difficulty { Ok(Difficulty { num: num_in.unwrap() })
num: num_in.unwrap(),
})
} }
} }

View file

@ -129,8 +129,9 @@ impl Writeable for TxKernel {
impl Readable for TxKernel { impl Readable for TxKernel {
fn read(reader: &mut Reader) -> Result<TxKernel, ser::Error> { fn read(reader: &mut Reader) -> Result<TxKernel, ser::Error> {
let features = let features = KernelFeatures::from_bits(reader.read_u8()?).ok_or(
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?; ser::Error::CorruptedData,
)?;
Ok(TxKernel { Ok(TxKernel {
features: features, features: features,
@ -483,8 +484,9 @@ impl Writeable for Output {
/// an Output from a binary stream. /// an Output from a binary stream.
impl Readable for Output { impl Readable for Output {
fn read(reader: &mut Reader) -> Result<Output, ser::Error> { fn read(reader: &mut Reader) -> Result<Output, ser::Error> {
let features = let features = OutputFeatures::from_bits(reader.read_u8()?).ok_or(
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?; ser::Error::CorruptedData,
)?;
Ok(Output { Ok(Output {
features: features, features: features,
@ -520,11 +522,13 @@ impl Output {
/// value from the range proof and the commitment /// value from the range proof and the commitment
pub fn recover_value(&self, keychain: &Keychain, key_id: &Identifier) -> Option<u64> { pub fn recover_value(&self, keychain: &Keychain, key_id: &Identifier) -> Option<u64> {
match keychain.rewind_range_proof(key_id, self.commit, self.proof) { match keychain.rewind_range_proof(key_id, self.commit, self.proof) {
Ok(proof_info) => if proof_info.success { Ok(proof_info) => {
Some(proof_info.value) if proof_info.success {
} else { Some(proof_info.value)
None } else {
}, None
}
}
Err(_) => None, Err(_) => None,
} }
} }
@ -542,9 +546,7 @@ impl Summable for SumCommit {
type Sum = SumCommit; type Sum = SumCommit;
fn sum(&self) -> SumCommit { fn sum(&self) -> SumCommit {
SumCommit { SumCommit { commit: self.commit.clone() }
commit: self.commit.clone(),
}
} }
fn sum_len() -> usize { fn sum_len() -> usize {
@ -563,9 +565,7 @@ impl Readable for SumCommit {
fn read(reader: &mut Reader) -> Result<SumCommit, ser::Error> { fn read(reader: &mut Reader) -> Result<SumCommit, ser::Error> {
let commit = Commitment::read(reader)?; let commit = Commitment::read(reader)?;
Ok(SumCommit { Ok(SumCommit { commit: commit })
commit: commit,
})
} }
} }
@ -574,15 +574,17 @@ impl ops::Add for SumCommit {
fn add(self, other: SumCommit) -> SumCommit { fn add(self, other: SumCommit) -> SumCommit {
let secp = static_secp_instance(); let secp = static_secp_instance();
let sum = match secp.lock().unwrap() let sum = match secp.lock().unwrap().commit_sum(
.commit_sum(vec![self.commit.clone(), other.commit.clone()], vec![]) vec![
{ self.commit.clone(),
other.commit.clone(),
],
vec![],
) {
Ok(s) => s, Ok(s) => s,
Err(_) => Commitment::from_vec(vec![1; 33]), Err(_) => Commitment::from_vec(vec![1; 33]),
}; };
SumCommit { SumCommit { commit: sum }
commit: sum,
}
} }
} }

View file

@ -201,7 +201,7 @@ fn main() {
provided, the command will attempt to contact the receiver at that \ provided, the command will attempt to contact the receiver at that \
address and send the transaction directly.") address and send the transaction directly.")
.arg(Arg::with_name("amount") .arg(Arg::with_name("amount")
.help("Amount to send in the smallest denomination") .help("Number of coins to send with optional fraction, e.g. 12.423")
.index(1)) .index(1))
.arg(Arg::with_name("minimum_confirmations") .arg(Arg::with_name("minimum_confirmations")
.help("Minimum number of confirmations required for an output to be spendable.") .help("Minimum number of confirmations required for an output to be spendable.")
@ -220,7 +220,7 @@ fn main() {
key. Similar to send but burns an output to allow single-party \ key. Similar to send but burns an output to allow single-party \
transactions.") transactions.")
.arg(Arg::with_name("amount") .arg(Arg::with_name("amount")
.help("Amount to burn in the smallest denomination") .help("Number of coins to burn")
.index(1)) .index(1))
.arg(Arg::with_name("minimum_confirmations") .arg(Arg::with_name("minimum_confirmations")
.help("Minimum number of confirmations required for an output to be spendable.") .help("Minimum number of confirmations required for an output to be spendable.")
@ -389,9 +389,9 @@ fn wallet_command(wallet_args: &ArgMatches) {
("send", Some(send_args)) => { ("send", Some(send_args)) => {
let amount = send_args let amount = send_args
.value_of("amount") .value_of("amount")
.expect("Amount to send required") .expect("Amount to send required");
.parse() let amount = core::core::amount_from_hr_string(amount)
.expect("Could not parse amount as a whole number."); .expect("Could not parse amount as a number with optional decimal point.");
let minimum_confirmations: u64 = send_args let minimum_confirmations: u64 = send_args
.value_of("minimum_confirmations") .value_of("minimum_confirmations")
.unwrap_or("1") .unwrap_or("1")
@ -401,20 +401,25 @@ fn wallet_command(wallet_args: &ArgMatches) {
if let Some(d) = send_args.value_of("dest") { if let Some(d) = send_args.value_of("dest") {
dest = d; dest = d;
} }
wallet::issue_send_tx( let result=wallet::issue_send_tx(
&wallet_config, &wallet_config,
&keychain, &keychain,
amount, amount,
minimum_confirmations, minimum_confirmations,
dest.to_string(), dest.to_string(),
).unwrap(); );
match result {
Ok(_) => {}, //success messaged logged internally
Err(wallet::Error::NotEnoughFunds(_)) => {},
Err(e) => panic!(e),
};
} }
("burn", Some(send_args)) => { ("burn", Some(send_args)) => {
let amount = send_args let amount = send_args
.value_of("amount") .value_of("amount")
.expect("Amount to burn required") .expect("Amount to burn required");
.parse() let amount = core::core::amount_from_hr_string(amount)
.expect("Could not parse amount as a whole number."); .expect("Could not parse amount as number with optional decimal point.");
let minimum_confirmations: u64 = send_args let minimum_confirmations: u64 = send_args
.value_of("minimum_confirmations") .value_of("minimum_confirmations")
.unwrap_or("1") .unwrap_or("1")

View file

@ -69,7 +69,14 @@ fn single_send_partial_tx(url: &str, partial_tx: &JSONPartialTx) -> Result<(), E
req.set_body(json); req.set_body(json);
let work = client.request(req); let work = client.request(req);
let _ = core.run(work)?; let _ = core.run(work).and_then(|res|{
if res.status()==hyper::StatusCode::Ok {
info!(LOGGER, "Transaction sent successfully");
} else {
error!(LOGGER, "Error sending transaction - status: {}", res.status());
}
Ok(())
})?;
Ok(()) Ok(())
} }

View file

@ -14,6 +14,7 @@
use checker; use checker;
use keychain::Keychain; use keychain::Keychain;
use core::core;
use types::{WalletConfig, WalletData}; use types::{WalletConfig, WalletData};
pub fn show_info(config: &WalletConfig, keychain: &Keychain) { pub fn show_info(config: &WalletConfig, keychain: &Keychain) {
@ -51,7 +52,7 @@ pub fn show_info(config: &WalletConfig, keychain: &Keychain) {
out.status, out.status,
out.is_coinbase, out.is_coinbase,
out.num_confirmations(current_height), out.num_confirmations(current_height),
out.value, core::amount_to_hr_string(out.value),
); );
} }
}); });

View file

@ -17,7 +17,7 @@ use serde_json;
use api; use api;
use client; use client;
use checker; use checker;
use core::core::{build, Transaction}; use core::core::{build, Transaction, amount_to_hr_string};
use core::ser; use core::ser;
use keychain::{BlindingFactor, Identifier, Keychain}; use keychain::{BlindingFactor, Identifier, Keychain};
use receiver::TxWrapper; use receiver::TxWrapper;
@ -88,10 +88,18 @@ fn build_send_tx(
})?; })?;
// build transaction skeleton with inputs and change // build transaction skeleton with inputs and change
let mut parts = inputs_and_change(&coins, config, keychain, key_id, amount)?; let parts = inputs_and_change(&coins, config, keychain, key_id, amount);
if let Err(p) = parts {
let total: u64 = coins.iter().map(|c| c.value).sum();
error!(LOGGER, "Transaction not sent - Not enough funds (Max: {})", amount_to_hr_string(total));
return Err(p);
}
let mut parts=parts.unwrap();
// This is more proof of concept than anything but here we set lock_height // This is more proof of concept than anything but here we set lock_height
// on tx being sent (based on current chain height via api). // on tx being sent (based on current chain height via api).
parts.push(build::with_lock_height(lock_height)); parts.push(build::with_lock_height(lock_height));
let (tx, blind) = build::transaction(parts, &keychain)?; let (tx, blind) = build::transaction(parts, &keychain)?;