From ebd801f14e647fb98fb8ed2184b8f30149f23426 Mon Sep 17 00:00:00 2001
From: Antioch Peverell <30642645+antiochp@users.noreply.github.com>
Date: Tue, 13 Feb 2018 10:35:30 -0500
Subject: [PATCH] Transaction contain kernels. Transactions and blocks maintain
a kernel offset (split key). (#681)
* WIP - split the key in final tx step
store "offset" on transaction itself
* rebase
* commit
* tx with offset
* got a test tx validating successfully using a sig from a split key and the appropriate offset
* sum up the offset for the block_header
* fix size tests for blocks and compact blocks (header now includes offset)
* use txs with offsets in most of the core tests
some tests now failing
* build kernel from k1G (k2 stored on tx, sum stored on header)
* commit
* tx now has vec of kernels
rework tx and kernel validation
* add test for tx cut_through
* wip - working on splitting in aggsig
* split the key when creating the initial sender aggsig context
* cleanup
* cleanup
* code needs claning up but split keys working for sender/receiver aggsig flow
* cleanup debug logging
* fix tests
* fix merge and basic cleanup
* fix keychain tests to use new tx_id
---
chain/tests/mine_simple_chain.rs | 4 +-
chain/tests/test_coinbase_maturity.rs | 4 +-
core/src/core/block.rs | 110 +++++--
core/src/core/build.rs | 140 +++++++--
core/src/core/mod.rs | 165 ++++++++---
core/src/core/transaction.rs | 218 ++++++++------
core/src/ser.rs | 40 ++-
grin/src/miner.rs | 2 +-
keychain/src/blind.rs | 123 +++++++-
keychain/src/keychain.rs | 405 +++++++++++++++++++++++++-
pool/src/graph.rs | 12 +-
pool/src/pool.rs | 15 +-
wallet/src/receiver.rs | 135 ++++++---
wallet/src/sender.rs | 68 +++--
wallet/src/types.rs | 12 +-
15 files changed, 1160 insertions(+), 293 deletions(-)
diff --git a/chain/tests/mine_simple_chain.rs b/chain/tests/mine_simple_chain.rs
index 063746164..8fff8b7a6 100644
--- a/chain/tests/mine_simple_chain.rs
+++ b/chain/tests/mine_simple_chain.rs
@@ -263,7 +263,7 @@ fn spend_in_fork() {
let lock_height = 1 + global::coinbase_maturity();
assert_eq!(lock_height, 4);
- let (tx1, _) = build::transaction(
+ let tx1 = build::transaction(
vec![
build::coinbase_input(consensus::REWARD, block_hash, kc.derive_key_id(2).unwrap()),
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
@@ -276,7 +276,7 @@ fn spend_in_fork() {
let prev_main = next.header.clone();
chain.process_block(next.clone(), chain::Options::SKIP_POW).unwrap();
- let (tx2, _) = build::transaction(
+ let tx2 = build::transaction(
vec![
build::input(consensus::REWARD - 20000, next.hash(), kc.derive_key_id(30).unwrap()),
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
diff --git a/chain/tests/test_coinbase_maturity.rs b/chain/tests/test_coinbase_maturity.rs
index 99c0c6401..3d6d7d397 100644
--- a/chain/tests/test_coinbase_maturity.rs
+++ b/chain/tests/test_coinbase_maturity.rs
@@ -116,7 +116,7 @@ fn test_coinbase_maturity() {
// here we build a tx that attempts to spend the earlier coinbase output
// this is not a valid tx as the coinbase output cannot be spent yet
- let (coinbase_txn, _) = build::transaction(
+ let coinbase_txn = build::transaction(
vec![
build::coinbase_input(amount, block_hash, key_id1.clone()),
build::output(amount - 2, key_id2.clone()),
@@ -183,7 +183,7 @@ fn test_coinbase_maturity() {
let prev = chain.head_header().unwrap();
- let (coinbase_txn, _) = build::transaction(
+ let coinbase_txn = build::transaction(
vec![
build::coinbase_input(amount, block_hash, key_id1.clone()),
build::output(amount - 2, key_id2.clone()),
diff --git a/core/src/core/block.rs b/core/src/core/block.rs
index ce5ea24ae..47f030cb3 100644
--- a/core/src/core/block.rs
+++ b/core/src/core/block.rs
@@ -43,12 +43,13 @@ use util::kernel_sig_msg;
use util::LOGGER;
use global;
use keychain;
+use keychain::BlindingFactor;
/// Errors thrown by Block validation
#[derive(Debug, Clone, PartialEq)]
pub enum Error {
- /// The sum of output minus input commitments does not match the sum of
- /// kernel commitments
+ /// The sum of output minus input commitments does not
+ /// match the sum of kernel commitments
KernelSumMismatch,
/// Same as above but for the coinbase part of a block, including reward
CoinbaseSumMismatch,
@@ -126,6 +127,8 @@ pub struct BlockHeader {
pub difficulty: Difficulty,
/// Total accumulated difficulty since genesis block
pub total_difficulty: Difficulty,
+ /// The single aggregate "offset" that needs to be applied for all commitments to sum
+ pub kernel_offset: BlindingFactor,
}
impl Default for BlockHeader {
@@ -143,6 +146,7 @@ impl Default for BlockHeader {
kernel_root: ZERO_HASH,
nonce: 0,
pow: Proof::zero(proof_size),
+ kernel_offset: BlindingFactor::zero(),
}
}
}
@@ -164,6 +168,7 @@ impl Writeable for BlockHeader {
try!(writer.write_u64(self.nonce));
try!(self.difficulty.write(writer));
try!(self.total_difficulty.write(writer));
+ try!(self.kernel_offset.write(writer));
if writer.serialization_mode() != ser::SerializationMode::Hash {
try!(self.pow.write(writer));
@@ -184,6 +189,7 @@ impl Readable for BlockHeader {
let nonce = reader.read_u64()?;
let difficulty = Difficulty::read(reader)?;
let total_difficulty = Difficulty::read(reader)?;
+ let kernel_offset = BlindingFactor::read(reader)?;
let pow = Proof::read(reader)?;
Ok(BlockHeader {
@@ -201,6 +207,7 @@ impl Readable for BlockHeader {
nonce: nonce,
difficulty: difficulty,
total_difficulty: total_difficulty,
+ kernel_offset: kernel_offset,
})
}
}
@@ -284,7 +291,7 @@ pub struct Block {
pub inputs: Vec ,
/// List of transaction outputs
pub outputs: Vec,
- /// List of transaction kernels and associated proofs
+ /// List of kernels with associated proofs (note these are offset from tx_kernels)
pub kernels: Vec,
}
@@ -379,7 +386,7 @@ impl Block {
key_id: &keychain::Identifier,
difficulty: Difficulty,
) -> Result {
- let fees = txs.iter().map(|tx| tx.fee).sum();
+ let fees = txs.iter().map(|tx| tx.fee()).sum();
let (reward_out, reward_proof) = Block::reward_output(
keychain,
key_id,
@@ -486,26 +493,33 @@ impl Block {
let mut inputs = vec![];
let mut outputs = vec![];
+ // we will sum these together at the end
+ // to give us the overall offset for the block
+ let mut kernel_offsets = vec![];
+
// iterate over the all the txs
// build the kernel for each
// and collect all the kernels, inputs and outputs
// to build the block (which we can sort of think of as one big tx?)
for tx in txs {
// validate each transaction and gather their kernels
- let excess = tx.validate()?;
- let kernel = tx.build_kernel(excess);
- kernels.push(kernel);
+ // tx has an offset k2 where k = k1 + k2
+ // and the tx is signed using k1
+ // the kernel excess is k1G
+ // we will sum all the offsets later and store the total offset
+ // on the block_header
+ tx.validate()?;
- for input in tx.inputs.clone() {
- inputs.push(input);
- }
+ // we will summ these later to give a single aggregate offset
+ kernel_offsets.push(tx.offset);
- for output in tx.outputs.clone() {
- outputs.push(output);
- }
+ // add all tx inputs/outputs/kernels to the block
+ kernels.extend(tx.kernels.iter().cloned());
+ inputs.extend(tx.inputs.iter().cloned());
+ outputs.extend(tx.outputs.iter().cloned());
}
- // also include the reward kernel and output
+ // include the reward kernel and output
kernels.push(reward_kern);
outputs.push(reward_out);
@@ -514,7 +528,28 @@ impl Block {
outputs.sort();
kernels.sort();
- // calculate the overall Merkle tree and fees (todo?)
+ // now sum the kernel_offsets up to give us
+ // an aggregate offset for the entire block
+ let kernel_offset = {
+ let secp = static_secp_instance();
+ let secp = secp.lock().unwrap();
+ let keys = kernel_offsets
+ .iter()
+ .cloned()
+ .filter(|x| *x != BlindingFactor::zero())
+ .filter_map(|x| {
+ x.secret_key(&secp).ok()
+ })
+ .collect::>();
+ if keys.is_empty() {
+ BlindingFactor::zero()
+ } else {
+ let sum = secp.blind_sum(keys, vec![])?;
+
+ BlindingFactor::from_secret_key(sum)
+ }
+ };
+
Ok(
Block {
header: BlockHeader {
@@ -526,6 +561,7 @@ impl Block {
previous: prev.hash(),
total_difficulty: difficulty +
prev.total_difficulty.clone(),
+ kernel_offset: kernel_offset,
..Default::default()
},
inputs: inputs,
@@ -641,22 +677,34 @@ impl Block {
let io_sum = self.sum_commitments()?;
// sum all kernels commitments
- let proof_commits = map_vec!(self.kernels, |proof| proof.excess);
+ let kernel_sum = {
+ let mut kernel_commits = self.kernels
+ .iter()
+ .map(|x| x.excess)
+ .collect::>();
- let proof_sum = {
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
- secp.commit_sum(proof_commits, vec![])?
+
+ // add the kernel_offset in as necessary (unless offset is zero)
+ if self.header.kernel_offset != BlindingFactor::zero() {
+ let skey = self.header.kernel_offset.secret_key(&secp)?;
+ let offset_commit = secp.commit(0, skey)?;
+ kernel_commits.push(offset_commit);
+ }
+
+ secp.commit_sum(kernel_commits, vec![])?
};
- // both should be the same
- if proof_sum != io_sum {
+ // sum of kernel commitments (including kernel_offset) must match
+ // the sum of input/output commitments (minus fee)
+ if kernel_sum != io_sum {
return Err(Error::KernelSumMismatch);
}
// verify all signatures with the commitment as pk
- for proof in &self.kernels {
- proof.verify()?;
+ for kernel in &self.kernels {
+ kernel.verify()?;
}
Ok(())
@@ -839,8 +887,7 @@ mod test {
build::transaction(
vec![input(v, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap()
+ ).unwrap()
}
// Too slow for now #[test]
@@ -863,7 +910,6 @@ mod test {
let now = Instant::now();
parts.append(&mut vec![input(500000, ZERO_HASH, pks.pop().unwrap()), with_fee(2)]);
let mut tx = build::transaction(parts, &keychain)
- .map(|(tx, _)| tx)
.unwrap();
println!("Build tx: {}", now.elapsed().as_secs());
@@ -898,7 +944,7 @@ mod test {
let key_id3 = keychain.derive_key_id(3).unwrap();
let mut btx1 = tx2i1o();
- let (mut btx2, _) = build::transaction(
+ let mut btx2 = build::transaction(
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
@@ -1010,7 +1056,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!(
vec.len(),
- 5_676
+ 5_708,
);
}
@@ -1023,7 +1069,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!(
vec.len(),
- 16_224
+ 16_256,
);
}
@@ -1035,7 +1081,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!(
vec.len(),
- 5_676
+ 5_708,
);
}
@@ -1048,7 +1094,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!(
vec.len(),
- 5_682
+ 5_714,
);
}
@@ -1070,7 +1116,7 @@ mod test {
ser::serialize(&mut vec, &b).expect("serialization failed");
assert_eq!(
vec.len(),
- 111_156
+ 111_188,
);
}
@@ -1092,7 +1138,7 @@ mod test {
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
assert_eq!(
vec.len(),
- 5_736
+ 5_768,
);
}
diff --git a/core/src/core/build.rs b/core/src/core/build.rs
index f833ae66f..252872297 100644
--- a/core/src/core/build.rs
+++ b/core/src/core/build.rs
@@ -27,7 +27,7 @@
use util::{secp, kernel_sig_msg};
-use core::{Transaction, Input, Output, OutputFeatures, SwitchCommitHash};
+use core::{Transaction, TxKernel, Input, Output, OutputFeatures, SwitchCommitHash};
use core::hash::Hash;
use keychain;
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
@@ -40,7 +40,7 @@ pub struct Context<'a> {
/// Function type returned by the transaction combinators. Transforms a
/// (Transaction, BlindSum) pair into another, provided some context.
-pub type Append = for<'a> Fn(&'a mut Context, (Transaction, BlindSum)) -> (Transaction, BlindSum);
+pub type Append = for<'a> Fn(&'a mut Context, (Transaction, TxKernel, BlindSum)) -> (Transaction, TxKernel, BlindSum);
/// Adds an input with the provided value and blinding key to the transaction
/// being built.
@@ -50,14 +50,14 @@ fn build_input(
out_block: Option,
key_id: Identifier,
) -> Box {
- Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
+ Box::new(move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
let commit = build.keychain.commit(value, &key_id).unwrap();
let input = Input::new(
features,
commit,
out_block,
);
- (tx.with_input(input), sum.sub_key_id(key_id.clone()))
+ (tx.with_input(input), kern, sum.sub_key_id(key_id.clone()))
})
}
@@ -86,7 +86,7 @@ pub fn coinbase_input(
/// Adds an output with the provided value and key identifier from the
/// keychain.
pub fn output(value: u64, key_id: Identifier) -> Box {
- Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
+ Box::new(move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
debug!(
LOGGER,
"Building an output: {}, {}",
@@ -125,6 +125,7 @@ pub fn output(value: u64, key_id: Identifier) -> Box {
switch_commit_hash: switch_commit_hash,
proof: rproof,
}),
+ kern,
sum.add_key_id(key_id.clone()),
)
})
@@ -132,31 +133,41 @@ pub fn output(value: u64, key_id: Identifier) -> Box {
/// Sets the fee on the transaction being built.
pub fn with_fee(fee: u64) -> Box {
- Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
- (tx.with_fee(fee), sum)
+ Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
+ (tx, kern.with_fee(fee), sum)
})
}
/// Sets the lock_height on the transaction being built.
pub fn with_lock_height(lock_height: u64) -> Box {
- Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
- (tx.with_lock_height(lock_height), sum)
+ Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
+ (tx, kern.with_lock_height(lock_height), sum)
})
}
-/// Sets a known excess value on the transaction being built. Usually used in
+/// Adds a known excess value on the transaction being built. Usually used in
/// combination with the initial_tx function when a new transaction is built
/// by adding to a pre-existing one.
pub fn with_excess(excess: BlindingFactor) -> Box {
- Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
- (tx, sum.add_blinding_factor(excess.clone()))
+ Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
+ (tx, kern, sum.add_blinding_factor(excess.clone()))
+ })
+}
+
+/// Sets a known tx "offset". Used in final step of tx construction.
+pub fn with_offset(offset: BlindingFactor) -> Box {
+ Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
+ (tx.with_offset(offset), kern, sum)
})
}
/// Sets an initial transaction to add to when building a new transaction.
-pub fn initial_tx(tx: Transaction) -> Box {
- Box::new(move |_build, (_, sum)| -> (Transaction, BlindSum) {
- (tx.clone(), sum)
+/// We currently only support building a tx with a single kernel with build::transaction()
+pub fn initial_tx(mut tx: Transaction) -> Box {
+ assert_eq!(tx.kernels.len(), 1);
+ let kern = tx.kernels.remove(0);
+ Box::new(move |_build, (_, _, sum)| -> (Transaction, TxKernel, BlindSum) {
+ (tx.clone(), kern.clone(), sum)
})
}
@@ -170,21 +181,78 @@ pub fn initial_tx(tx: Transaction) -> Box {
/// let (tx2, _) = build::transaction(vec![initial_tx(tx1), with_excess(sum),
/// output_rand(2)], keychain).unwrap();
///
-pub fn transaction(
+pub fn partial_transaction(
elems: Vec>,
keychain: &keychain::Keychain,
) -> Result<(Transaction, BlindingFactor), keychain::Error> {
let mut ctx = Context { keychain };
- let (mut tx, sum) = elems.iter().fold(
- (Transaction::empty(), BlindSum::new()),
+ let (mut tx, kern, sum) = elems.iter().fold(
+ (Transaction::empty(), TxKernel::empty(), BlindSum::new()),
|acc, elem| elem(&mut ctx, acc),
);
let blind_sum = ctx.keychain.blind_sum(&sum)?;
- let msg = secp::Message::from_slice(&kernel_sig_msg(tx.fee, tx.lock_height))?;
- tx.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &blind_sum)?;
+
+ // we only support building a tx with a single kernel via build::transaction()
+ assert!(tx.kernels.is_empty());
+ tx.kernels.push(kern);
+
Ok((tx, blind_sum))
}
+/// Builds a complete transaction.
+pub fn transaction(
+ elems: Vec>,
+ keychain: &keychain::Keychain,
+) -> Result {
+ let (mut tx, blind_sum) = partial_transaction(elems, keychain)?;
+ assert_eq!(tx.kernels.len(), 1);
+
+ let mut kern = tx.kernels.remove(0);
+ let msg = secp::Message::from_slice(&kernel_sig_msg(kern.fee, kern.lock_height))?;
+
+ let skey = blind_sum.secret_key(&keychain.secp())?;
+ kern.excess = keychain.secp().commit(0, skey)?;
+ kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &blind_sum)?;
+
+ tx.kernels.push(kern);
+
+ Ok(tx)
+}
+
+/// Builds a complete transaction, splitting the key and
+/// setting the excess, excess_sig and tx offset as necessary.
+pub fn transaction_with_offset(
+ elems: Vec>,
+ keychain: &keychain::Keychain,
+) -> Result {
+ let mut ctx = Context { keychain };
+ let (mut tx, mut kern, sum) = elems.iter().fold(
+ (Transaction::empty(), TxKernel::empty(), BlindSum::new()),
+ |acc, elem| elem(&mut ctx, acc),
+ );
+ let blind_sum = ctx.keychain.blind_sum(&sum)?;
+
+ let split = blind_sum.split(&keychain.secp())?;
+ let k1 = split.blind_1;
+ let k2 = split.blind_2;
+
+ let msg = secp::Message::from_slice(&kernel_sig_msg(kern.fee, kern.lock_height))?;
+
+ // generate kernel excess and excess_sig using the split key k1
+ let skey = k1.secret_key(&keychain.secp())?;
+ kern.excess = ctx.keychain.secp().commit(0, skey)?;
+ kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &k1)?;
+
+ // store the kernel offset (k2) on the tx itself
+ // commitments will sum correctly when including the offset
+ tx.offset = k2.clone();
+
+ assert!(tx.kernels.is_empty());
+ tx.kernels.push(kern);
+
+ Ok(tx)
+}
+
// Just a simple test, most exhaustive tests in the core mod.rs.
#[cfg(test)]
mod test {
@@ -198,17 +266,37 @@ mod test {
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
- let (tx, _) = transaction(
+ let tx = transaction(
vec![
input(10, ZERO_HASH, key_id1),
- input(11, ZERO_HASH, key_id2),
+ input(12, ZERO_HASH, key_id2),
output(20, key_id3),
- with_fee(1),
+ with_fee(2),
],
&keychain,
).unwrap();
- tx.verify_sig().unwrap();
+ tx.validate().unwrap();
+ }
+
+ #[test]
+ fn blind_simple_tx_with_offset() {
+ let keychain = Keychain::from_random_seed().unwrap();
+ let key_id1 = keychain.derive_key_id(1).unwrap();
+ let key_id2 = keychain.derive_key_id(2).unwrap();
+ let key_id3 = keychain.derive_key_id(3).unwrap();
+
+ let tx = transaction_with_offset(
+ vec![
+ input(10, ZERO_HASH, key_id1),
+ input(12, ZERO_HASH, key_id2),
+ output(20, key_id3),
+ with_fee(2),
+ ],
+ &keychain,
+ ).unwrap();
+
+ tx.validate().unwrap();
}
#[test]
@@ -217,11 +305,11 @@ mod test {
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
- let (tx, _) = transaction(
+ let tx = transaction(
vec![input(6, ZERO_HASH, key_id1), output(2, key_id2), with_fee(4)],
&keychain,
).unwrap();
- tx.verify_sig().unwrap();
+ tx.validate().unwrap();
}
}
diff --git a/core/src/core/mod.rs b/core/src/core/mod.rs
index 18082fdac..7eeb5755e 100644
--- a/core/src/core/mod.rs
+++ b/core/src/core/mod.rs
@@ -263,8 +263,10 @@ mod test {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
- println!("{}", vec.len());
- assert!(vec.len() == 5364);
+ assert_eq!(
+ vec.len(),
+ 5_438,
+ );
}
#[test]
@@ -273,7 +275,7 @@ mod test {
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
- assert_eq!(dtx.fee, 2);
+ assert_eq!(dtx.fee(), 2);
assert_eq!(dtx.inputs.len(), 2);
assert_eq!(dtx.outputs.len(), 1);
assert_eq!(tx.hash(), dtx.hash());
@@ -304,7 +306,7 @@ mod test {
let key_id3 = keychain.derive_key_id(3).unwrap();
// first build a valid tx with corresponding blinding factor
- let (tx, blind) = build::transaction(
+ let tx = build::transaction(
vec![
input(10, ZERO_HASH, key_id1),
output(5, key_id2),
@@ -314,14 +316,51 @@ mod test {
&keychain,
).unwrap();
- // confirm the tx validates and that we can construct a valid tx_kernel from it
- let excess = tx.validate().unwrap();
- let tx_kernel = tx.build_kernel(excess);
- let _ = tx_kernel.verify().unwrap();
+ // check the tx is valid
+ tx.validate().unwrap();
- assert_eq!(tx_kernel.features, KernelFeatures::DEFAULT_KERNEL);
- assert_eq!(tx_kernel.fee, tx.fee);
- assert_eq!(tx_kernel.excess, excess);
+ // check the kernel is also itself valid
+ assert_eq!(tx.kernels.len(), 1);
+ let kern = &tx.kernels[0];
+ kern.verify().unwrap();
+
+ assert_eq!(kern.features, KernelFeatures::DEFAULT_KERNEL);
+ assert_eq!(kern.fee, tx.fee());
+ }
+
+ // Combine two transactions into one big transaction (with multiple kernels)
+ // and check it still validates.
+ #[test]
+ fn transaction_cut_through() {
+ let tx1 = tx1i2o();
+ let tx2 = tx2i1o();
+
+ assert!(tx1.validate().is_ok());
+ assert!(tx2.validate().is_ok());
+
+ // now build a "cut_through" tx from tx1 and tx2
+ let mut tx3 = tx1.clone();
+ tx3.inputs.extend(tx2.inputs.iter().cloned());
+ tx3.outputs.extend(tx2.outputs.iter().cloned());
+ tx3.kernels.extend(tx2.kernels.iter().cloned());
+
+ // make sure everything is sorted
+ tx3.inputs.sort();
+ tx3.outputs.sort();
+ tx3.kernels.sort();
+
+ // finally sum the offsets up
+ // TODO - hide this in a convenience function somewhere
+ tx3.offset = {
+ let secp = static_secp_instance();
+ let secp = secp.lock().unwrap();
+ let skey1 = tx1.offset.secret_key(&secp).unwrap();
+ let skey2 = tx2.offset.secret_key(&secp).unwrap();
+ let skey3 = secp.blind_sum(vec![skey1, skey2], vec![]).unwrap();
+ BlindingFactor::from_secret_key(skey3)
+ };
+
+ assert!(tx3.validate().is_ok());
}
#[test]
@@ -331,7 +370,7 @@ mod test {
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
- let (tx, _) = build::transaction(
+ let tx = build::transaction(
vec![
input(75, ZERO_HASH, key_id1),
output(42, key_id2),
@@ -349,7 +388,7 @@ mod test {
#[test]
fn blind_tx() {
let btx = tx2i1o();
- btx.verify_sig().unwrap(); // unwrap will panic if invalid
+ assert!(btx.validate().is_ok());
// checks that the range proof on our blind output is sufficiently hiding
let Output { proof, .. } = btx.outputs[0];
@@ -372,6 +411,57 @@ mod test {
}
}
+ // #[test]
+ // fn tx_build_aggsig() {
+ // let keychain = Keychain::from_random_seed().unwrap();
+ // let key_id1 = keychain.derive_key_id(1).unwrap();
+ // let key_id2 = keychain.derive_key_id(2).unwrap();
+ // let key_id3 = keychain.derive_key_id(3).unwrap();
+ // let key_id4 = keychain.derive_key_id(4).unwrap();
+ //
+ // let (tx_alice, blind_sum) = {
+ // // Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
+ // // become inputs in the new transaction
+ // let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
+ //
+ // // Alice builds her transaction, with change, which also produces the sum
+ // // of blinding factors before they're obscured.
+ // let (tx, sum) = build::partial_transaction(
+ // vec![in1, in2, output(1, key_id3),
+ // with_fee(2)],
+ // &keychain,
+ // ).unwrap();
+ //
+ // (tx, sum)
+ // };
+ //
+ // let blind = blind_sum.secret_key(&keychain.secp())?;
+ // keychain.aggsig_create_context(blind);
+ // let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys();
+ //
+ // let sig_part = keychain.aggsig_calculate_partial_sig(
+ // &pub_nonce,
+ // tx.fee(),
+ // tx.lock_height(),
+ // ).unwrap();
+ //
+ //
+ // // From now on, Bob only has the obscured transaction and the sum of
+ // // blinding factors. He adds his output, finalizes the transaction so it's
+ // // ready for broadcast.
+ // let tx_final = build::transaction(
+ // vec![
+ // initial_tx(tx_alice),
+ // with_excess(blind_sum),
+ // output(4, key_id4),
+ // ],
+ // &keychain,
+ // ).unwrap();
+ //
+ // tx_final.validate().unwrap();
+ //
+ // }
+
/// Simulate the standard exchange between 2 parties when creating a basic
/// 2 inputs, 2 outputs transaction.
#[test]
@@ -382,27 +472,26 @@ mod test {
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
- let tx_alice: Transaction;
- let blind_sum: BlindingFactor;
-
- {
+ let (tx_alice, blind_sum) = {
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
// become inputs in the new transaction
let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
// Alice builds her transaction, with change, which also produces the sum
// of blinding factors before they're obscured.
- let (tx, sum) =
- build::transaction(vec![in1, in2, output(1, key_id3), with_fee(2)], &keychain)
- .unwrap();
- tx_alice = tx;
- blind_sum = sum;
- }
+ let (tx, sum) = build::partial_transaction(
+ vec![in1, in2, output(1, key_id3),
+ with_fee(2)],
+ &keychain,
+ ).unwrap();
+
+ (tx, sum)
+ };
// From now on, Bob only has the obscured transaction and the sum of
// blinding factors. He adds his output, finalizes the transaction so it's
// ready for broadcast.
- let (tx_final, _) = build::transaction(
+ let tx_final = build::transaction(
vec![
initial_tx(tx_alice),
with_excess(blind_sum),
@@ -435,7 +524,7 @@ mod test {
let key_id = keychain.derive_key_id(1).unwrap();
let mut tx1 = tx2i1o();
- tx1.verify_sig().unwrap();
+ tx1.validate().unwrap();
let b = Block::new(
&BlockHeader::default(),
@@ -483,8 +572,7 @@ mod test {
with_lock_height(1),
],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap();
+ ).unwrap();
let b = Block::new(
&BlockHeader::default(),
@@ -504,8 +592,7 @@ mod test {
with_lock_height(2),
],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap();
+ ).unwrap();
let b = Block::new(
&BlockHeader::default(),
@@ -525,13 +612,13 @@ mod test {
#[test]
pub fn test_verify_1i1o_sig() {
let tx = tx1i1o();
- tx.verify_sig().unwrap();
+ tx.validate().unwrap();
}
#[test]
pub fn test_verify_2i1o_sig() {
let tx = tx2i1o();
- tx.verify_sig().unwrap();
+ tx.validate().unwrap();
}
// utility producing a transaction with 2 inputs and a single outputs
@@ -541,7 +628,7 @@ mod test {
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
- build::transaction(
+ build::transaction_with_offset(
vec![
input(10, ZERO_HASH, key_id1),
input(11, ZERO_HASH, key_id2),
@@ -549,8 +636,7 @@ mod test {
with_fee(2),
],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap()
+ ).unwrap()
}
// utility producing a transaction with a single input and output
@@ -559,22 +645,22 @@ mod test {
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
- build::transaction(
+ build::transaction_with_offset(
vec![input(5, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap()
+ ).unwrap()
}
// utility producing a transaction with a single input
// and two outputs (one change output)
+ // Note: this tx has an "offset" kernel
pub fn tx1i2o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
- build::transaction(
+ build::transaction_with_offset(
vec![
input(6, ZERO_HASH, key_id1),
output(3, key_id2),
@@ -582,7 +668,6 @@ mod test {
with_fee(2),
],
&keychain,
- ).map(|(tx, _)| tx)
- .unwrap()
+ ).unwrap()
}
}
diff --git a/core/src/core/transaction.rs b/core/src/core/transaction.rs
index 427c4a68e..3e697e88b 100644
--- a/core/src/core/transaction.rs
+++ b/core/src/core/transaction.rs
@@ -17,7 +17,7 @@ use blake2::blake2b::blake2b;
use util::secp::{self, Message, Signature};
use util::{static_secp_instance, kernel_sig_msg};
use util::secp::pedersen::{Commitment, RangeProof};
-use std::cmp::min;
+use std::cmp::{min, max};
use std::cmp::Ordering;
use std::ops;
@@ -26,7 +26,8 @@ use consensus::VerifySortOrder;
use core::Committed;
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::pmmr::Summable;
-use keychain::{Identifier, Keychain};
+use keychain;
+use keychain::{Identifier, Keychain, BlindingFactor};
use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSorted, Writer};
use util;
@@ -74,8 +75,15 @@ macro_rules! hashable_ord {
pub enum Error {
/// Transaction fee can't be odd, due to half fee burning
OddFee,
+ /// Kernel fee can't be odd, due to half fee burning
+ OddKernelFee,
/// Underlying Secp256k1 error (signature validation or invalid public key typically)
Secp(secp::Error),
+ /// Underlying keychain related error
+ Keychain(keychain::Error),
+ /// The sum of output minus input commitments does not
+ /// match the sum of kernel commitments
+ KernelSumMismatch,
/// Restrict number of incoming inputs
TooManyInputs,
/// Underlying consensus error (currently for sort order)
@@ -98,6 +106,13 @@ impl From for Error {
}
}
+impl From for Error {
+ fn from(e: keychain::Error) -> Error {
+ Error::Keychain(e)
+ }
+}
+
+
/// A proof that a transaction sums to zero. Includes both the transaction's
/// Pedersen commitment and the signature, that guarantees that the commitments
/// amount to zero.
@@ -168,6 +183,30 @@ impl TxKernel {
Ok(())
}
+ /// Build an empty tx kernel with zero values.
+ pub fn empty() -> TxKernel {
+ TxKernel {
+ features: KernelFeatures::DEFAULT_KERNEL,
+ fee: 0,
+ lock_height: 0,
+ excess: Commitment::from_vec(vec![0; 33]),
+ excess_sig: Signature::from_raw_data(&[0; 64]).unwrap(),
+ }
+ }
+
+ /// Builds a new tx kernel with the provided fee.
+ pub fn with_fee(self, fee: u64) -> TxKernel {
+ TxKernel { fee: fee, ..self }
+ }
+
+ /// Builds a new tx kernel with the provided lock_height.
+ pub fn with_lock_height(self, lock_height: u64) -> TxKernel {
+ TxKernel {
+ lock_height: lock_height,
+ ..self
+ }
+ }
+
/// Size in bytes of a kernel, necessary for binary storage
pub fn size() -> usize {
17 + // features plus fee and lock_height
@@ -179,41 +218,37 @@ impl TxKernel {
/// A transaction
#[derive(Debug, Clone)]
pub struct Transaction {
- /// Set of inputs spent by the transaction.
+ /// List of inputs spent by the transaction.
pub inputs: Vec ,
- /// Set of outputs the transaction produces.
+ /// List of outputs the transaction produces.
pub outputs: Vec,
- /// Fee paid by the transaction.
- pub fee: u64,
- /// Transaction is not valid before this chain height.
- pub lock_height: u64,
- /// The signature proving the excess is a valid public key, which signs
- /// the transaction fee.
- pub excess_sig: Signature,
+ /// List of kernels that make up this transaction (usually a single kernel).
+ pub kernels: Vec,
+ /// The kernel "offset" k2
+ /// excess is k1G after splitting the key k = k1 + k2
+ pub offset: BlindingFactor,
}
/// Implementation of Writeable for a fully blinded transaction, defines how to
/// write the transaction as binary.
impl Writeable for Transaction {
fn write(&self, writer: &mut W) -> Result<(), ser::Error> {
- ser_multiwrite!(
- writer,
- [write_u64, self.fee],
- [write_u64, self.lock_height]
- );
- self.excess_sig.write(writer)?;
+ self.offset.write(writer)?;
ser_multiwrite!(
writer,
[write_u64, self.inputs.len() as u64],
- [write_u64, self.outputs.len() as u64]
+ [write_u64, self.outputs.len() as u64],
+ [write_u64, self.kernels.len() as u64]
);
// Consensus rule that everything is sorted in lexicographical order on the wire.
let mut inputs = self.inputs.clone();
let mut outputs = self.outputs.clone();
+ let mut kernels = self.kernels.clone();
try!(inputs.write_sorted(writer));
try!(outputs.write_sorted(writer));
+ try!(kernels.write_sorted(writer));
Ok(())
}
@@ -223,23 +258,20 @@ impl Writeable for Transaction {
/// transaction from a binary stream.
impl Readable for Transaction {
fn read(reader: &mut Reader) -> Result {
- let (fee, lock_height) =
- ser_multiread!(reader, read_u64, read_u64);
+ let offset = BlindingFactor::read(reader)?;
- let excess_sig = Signature::read(reader)?;
-
- let (input_len, output_len) =
- ser_multiread!(reader, read_u64, read_u64);
+ let (input_len, output_len, kernel_len) =
+ ser_multiread!(reader, read_u64, read_u64, read_u64);
let inputs = read_and_verify_sorted(reader, input_len)?;
let outputs = read_and_verify_sorted(reader, output_len)?;
+ let kernels = read_and_verify_sorted(reader, kernel_len)?;
Ok(Transaction {
- fee: fee,
- lock_height: lock_height,
- excess_sig: excess_sig,
- inputs: inputs,
- outputs: outputs,
+ offset,
+ inputs,
+ outputs,
+ kernels,
..Default::default()
})
}
@@ -253,7 +285,7 @@ impl Committed for Transaction {
&self.outputs
}
fn overage(&self) -> i64 {
- (self.fee as i64)
+ (self.fee() as i64)
}
}
@@ -267,28 +299,34 @@ impl Transaction {
/// Creates a new empty transaction (no inputs or outputs, zero fee).
pub fn empty() -> Transaction {
Transaction {
- fee: 0,
- lock_height: 0,
- excess_sig: Signature::from_raw_data(&[0;64]).unwrap(),
+ offset: BlindingFactor::zero(),
inputs: vec![],
outputs: vec![],
+ kernels: vec![],
}
}
/// Creates a new transaction initialized with
- /// the provided inputs, outputs, fee and lock_height.
+ /// the provided inputs, outputs, kernels
pub fn new(
inputs: Vec ,
outputs: Vec,
- fee: u64,
- lock_height: u64,
+ kernels: Vec,
) -> Transaction {
Transaction {
- fee: fee,
- lock_height: lock_height,
- excess_sig: Signature::from_raw_data(&[0;64]).unwrap(),
+ offset: BlindingFactor::zero(),
inputs: inputs,
outputs: outputs,
+ kernels: kernels,
+ }
+ }
+
+ /// Creates a new transaction using this transaction as a template
+ /// and with the specified offset.
+ pub fn with_offset(self, offset: BlindingFactor) -> Transaction {
+ Transaction {
+ offset: offset,
+ ..self
}
}
@@ -316,74 +354,92 @@ impl Transaction {
}
}
- /// Builds a new transaction with the provided fee.
- pub fn with_fee(self, fee: u64) -> Transaction {
- Transaction { fee: fee, ..self }
+ /// Total fee for a transaction is the sum of fees of all kernels.
+ pub fn fee(&self) -> u64 {
+ self.kernels.iter().fold(0, |acc, ref x| acc + x.fee)
}
- /// Builds a new transaction with the provided lock_height.
- pub fn with_lock_height(self, lock_height: u64) -> Transaction {
- Transaction {
- lock_height: lock_height,
- ..self
- }
+ /// Lock height of a transaction is the max lock height of the kernels.
+ pub fn lock_height(&self) -> u64 {
+ self.kernels.iter().fold(0, |acc, ref x| max(acc, x.lock_height))
}
- /// The verification for a MimbleWimble transaction involves getting the
- /// excess of summing all commitments and using it as a public key
- /// to verify the embedded signature. The rational is that if the values
- /// sum to zero as they should in r.G + v.H then only k.G the excess
- /// of the sum of r.G should be left. And r.G is the definition of a
- /// public key generated using r as a private key.
- pub fn verify_sig(&self) -> Result {
- let rsum = self.sum_commitments()?;
-
- let msg = Message::from_slice(&kernel_sig_msg(self.fee, self.lock_height))?;
-
- let secp = static_secp_instance();
- let secp = secp.lock().unwrap();
- let sig = self.excess_sig;
- // pretend the sum is a public key (which it is, being of the form r.G) and
- // verify the transaction sig with it
- let valid = Keychain::aggsig_verify_single_from_commit(&secp, &sig, &msg, &rsum);
- if !valid {
- return Err(secp::Error::IncorrectSignature);
+ /// To verify transaction kernels we check that -
+ /// * all kernels have an even fee
+ /// * sum of input/output commitments matches sum of kernel commitments after applying offset
+ /// * each kernel sig is valid (i.e. tx commitments sum to zero, given above is true)
+ fn verify_kernels(&self) -> Result<(), Error> {
+ // check that each individual kernel fee is even
+ // TODO - is this strictly necessary given that we check overall tx fee?
+ // TODO - move this into verify_fee() check or maybe kernel.verify()?
+ for k in &self.kernels {
+ if k.fee & 1 != 0 {
+ return Err(Error::OddKernelFee);
+ }
}
- Ok(rsum)
- }
- /// Builds a transaction kernel
- pub fn build_kernel(&self, excess: Commitment) -> TxKernel {
- TxKernel {
- features: KernelFeatures::DEFAULT_KERNEL,
- excess: excess,
- excess_sig: self.excess_sig.clone(),
- fee: self.fee,
- lock_height: self.lock_height,
+ // sum all input and output commitments
+ let io_sum = self.sum_commitments()?;
+
+ // sum all kernels commitments
+ let kernel_sum = {
+ let mut kernel_commits = self.kernels
+ .iter()
+ .map(|x| x.excess)
+ .collect::>();
+
+ let secp = static_secp_instance();
+ let secp = secp.lock().unwrap();
+
+ // add the offset in as necessary (unless offset is zero)
+ if self.offset != BlindingFactor::zero() {
+ let skey = self.offset.secret_key(&secp)?;
+ let offset_commit = secp.commit(0, skey)?;
+ kernel_commits.push(offset_commit);
+ }
+
+ secp.commit_sum(kernel_commits, vec![])?
+ };
+
+ // sum of kernel commitments (including the offset) must match
+ // the sum of input/output commitments (minus fee)
+ if kernel_sum != io_sum {
+ return Err(Error::KernelSumMismatch);
}
+
+ // verify all signatures with the commitment as pk
+ for kernel in &self.kernels {
+ kernel.verify()?;
+ }
+
+ Ok(())
}
/// Validates all relevant parts of a fully built transaction. Checks the
/// excess value against the signature as well as range proofs for each
/// output.
- pub fn validate(&self) -> Result {
- if self.fee & 1 != 0 {
+ pub fn validate(&self) -> Result<(), Error> {
+ if self.fee() & 1 != 0 {
return Err(Error::OddFee);
}
if self.inputs.len() > consensus::MAX_BLOCK_INPUTS {
return Err(Error::TooManyInputs);
}
self.verify_sorted()?;
+
for out in &self.outputs {
out.verify_proof()?;
}
- let excess = self.verify_sig()?;
- Ok(excess)
+
+ self.verify_kernels()?;
+
+ Ok(())
}
fn verify_sorted(&self) -> Result<(), Error> {
self.inputs.verify_sort_order()?;
self.outputs.verify_sort_order()?;
+ self.kernels.verify_sort_order()?;
Ok(())
}
}
diff --git a/core/src/ser.rs b/core/src/ser.rs
index 6afde4039..b18f6ad96 100644
--- a/core/src/ser.rs
+++ b/core/src/ser.rs
@@ -22,7 +22,7 @@
use std::{cmp, error, fmt};
use std::io::{self, Read, Write};
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
-use keychain::{Identifier, IDENTIFIER_SIZE};
+use keychain::{Identifier, BlindingFactor, IDENTIFIER_SIZE};
use consensus;
use consensus::VerifySortOrder;
use core::hash::Hashed;
@@ -30,7 +30,12 @@ use core::transaction::{SWITCH_COMMIT_HASH_SIZE, SwitchCommitHash};
use util::secp::pedersen::Commitment;
use util::secp::pedersen::RangeProof;
use util::secp::Signature;
-use util::secp::constants::{MAX_PROOF_SIZE, PEDERSEN_COMMITMENT_SIZE, AGG_SIGNATURE_SIZE};
+use util::secp::constants::{
+ MAX_PROOF_SIZE,
+ PEDERSEN_COMMITMENT_SIZE,
+ AGG_SIGNATURE_SIZE,
+ SECRET_KEY_SIZE,
+};
/// Possible errors deriving from serializing or deserializing.
#[derive(Debug)]
@@ -325,6 +330,19 @@ impl Writeable for Commitment {
}
}
+impl Writeable for BlindingFactor {
+ fn write(&self, writer: &mut W) -> Result<(), Error> {
+ writer.write_fixed_bytes(self)
+ }
+}
+
+impl Readable for BlindingFactor {
+ fn read(reader: &mut Reader) -> Result {
+ let bytes = reader.read_fixed_bytes(SECRET_KEY_SIZE)?;
+ Ok(BlindingFactor::from_slice(&bytes))
+ }
+}
+
impl Writeable for Identifier {
fn write(&self, writer: &mut W) -> Result<(), Error> {
writer.write_fixed_bytes(self)
@@ -590,11 +608,14 @@ impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
return self.plen;
}
}
-impl AsFixedBytes for ::util::secp::key::SecretKey {
- fn len(&self) -> usize {
- return 1;
- }
-}
+// // TODO - is this (single byte) so we do not ever serialize a secret_key?
+// // Note: we *can* serialize a blinding_factor built from a secret_key
+// // but this needs to be done explicitly (tx kernel offset for example)
+// impl AsFixedBytes for ::util::secp::key::SecretKey {
+// fn len(&self) -> usize {
+// return 1;
+// }
+// }
impl AsFixedBytes for ::util::secp::Signature {
fn len(&self) -> usize {
return 64;
@@ -605,6 +626,11 @@ impl AsFixedBytes for ::util::secp::pedersen::Commitment {
return PEDERSEN_COMMITMENT_SIZE;
}
}
+impl AsFixedBytes for BlindingFactor {
+ fn len(&self) -> usize {
+ return SECRET_KEY_SIZE;
+ }
+}
impl AsFixedBytes for SwitchCommitHash {
fn len(&self) -> usize {
return SWITCH_COMMIT_HASH_SIZE;
diff --git a/grin/src/miner.rs b/grin/src/miner.rs
index 8efccded9..9e3a353a8 100644
--- a/grin/src/miner.rs
+++ b/grin/src/miner.rs
@@ -594,7 +594,7 @@ impl Miner {
let txs: Vec<&Transaction> = txs_box.iter().map(|tx| tx.as_ref()).collect();
// build the coinbase and the block itself
- let fees = txs.iter().map(|tx| tx.fee).sum();
+ let fees = txs.iter().map(|tx| tx.fee()).sum();
let height = head.height + 1;
let block_fees = BlockFees {
fees,
diff --git a/keychain/src/blind.rs b/keychain/src/blind.rs
index 6c915eb81..6de15e76d 100644
--- a/keychain/src/blind.rs
+++ b/keychain/src/blind.rs
@@ -13,28 +13,90 @@
// limitations under the License.
/// Encapsulate a secret key for the blind_sum operation
+use std::cmp::min;
+use rand::thread_rng;
-use util::secp::{self, Secp256k1};
use extkey::Identifier;
use keychain::Error;
+use util;
+use util::secp::{self, Secp256k1};
+use util::secp::constants::SECRET_KEY_SIZE;
-#[derive(Clone, Debug, PartialEq)]
-pub struct BlindingFactor(secp::key::SecretKey);
+
+#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
+pub struct BlindingFactor([u8; SECRET_KEY_SIZE]);
+
+impl AsRef<[u8]> for BlindingFactor {
+ fn as_ref(&self) -> &[u8] {
+ &self.0
+ }
+}
impl BlindingFactor {
- pub fn new(secret_key: secp::key::SecretKey) -> BlindingFactor {
- BlindingFactor(secret_key)
+ pub fn from_secret_key(skey: secp::key::SecretKey) -> BlindingFactor {
+ BlindingFactor::from_slice(&skey.as_ref())
}
- pub fn secret_key(&self) -> secp::key::SecretKey {
- self.0
+ pub fn from_slice(data: &[u8]) -> BlindingFactor {
+ let mut blind = [0; SECRET_KEY_SIZE];
+ for i in 0..min(SECRET_KEY_SIZE, data.len()) {
+ blind[i] = data[i];
+ }
+ BlindingFactor(blind)
}
- pub fn from_slice(secp: &Secp256k1, data: &[u8]) -> Result {
- Ok(BlindingFactor(
- secp::key::SecretKey::from_slice(&secp, data)?,
- ))
+ pub fn zero() -> BlindingFactor {
+ BlindingFactor::from_secret_key(secp::key::ZERO_KEY)
}
+
+ pub fn to_hex(&self) -> String {
+ util::to_hex(self.0.to_vec())
+ }
+
+ pub fn from_hex(hex: &str) -> Result {
+ let bytes = util::from_hex(hex.to_string()).unwrap();
+ Ok(BlindingFactor::from_slice(&bytes))
+ }
+
+ pub fn secret_key(&self, secp: &Secp256k1) -> Result {
+ if *self == BlindingFactor::zero() {
+ // TODO - need this currently for tx tests
+ // the "zero" secret key is not actually a valid secret_key
+ // and secp lib checks this
+ Ok(secp::key::ZERO_KEY)
+ } else {
+ secp::key::SecretKey::from_slice(secp, &self.0)
+ .map_err(|e| Error::Secp(e))
+ }
+ }
+
+ /// Split a blinding_factor (aka secret_key) into a pair of blinding_factors.
+ /// We use one of these (k1) to sign the tx_kernel (k1G)
+ /// and the other gets aggregated in the block_header as the "offset".
+ /// This prevents an actor from being able to sum a set of inputs, outputs and kernels
+ /// from a block to identify and reconstruct a particular tx from a block.
+ /// You would need both k1, k2 to do this.
+ pub fn split(&self, secp: &Secp256k1) -> Result {
+ let skey_1 = secp::key::SecretKey::new(secp, &mut thread_rng());
+
+ // use blind_sum to subtract skey_1 from our key (to give k = k1 + k2)
+ let skey = self.secret_key(secp)?;
+ let skey_2 = secp.blind_sum(vec![skey], vec![skey_1])?;
+
+ let blind_1 = BlindingFactor::from_secret_key(skey_1);
+ let blind_2 = BlindingFactor::from_secret_key(skey_2);
+
+ Ok(SplitBlindingFactor {
+ blind_1,
+ blind_2,
+ })
+ }
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct SplitBlindingFactor {
+ pub blind_1: BlindingFactor,
+ pub blind_2: BlindingFactor,
}
/// Accumulator to compute the sum of blinding factors. Keeps track of each
@@ -74,9 +136,46 @@ impl BlindSum {
self
}
- /// Subtractss the provided key to the sum of blinding factors.
+ /// Subtracts the provided key to the sum of blinding factors.
pub fn sub_blinding_factor(mut self, blind: BlindingFactor) -> BlindSum {
self.negative_blinding_factors.push(blind);
self
}
}
+
+#[cfg(test)]
+mod test {
+ use rand::thread_rng;
+
+ use blind::BlindingFactor;
+ use util::secp::Secp256k1;
+ use util::secp::key::{SecretKey, ZERO_KEY};
+
+ #[test]
+ fn split_blinding_factor() {
+ let secp = Secp256k1::new();
+ let skey_in = SecretKey::new(&secp, &mut thread_rng());
+ let blind = BlindingFactor::from_secret_key(skey_in);
+ let split = blind.split(&secp).unwrap();
+
+ // split a key, sum the split keys and confirm the sum matches the original key
+ let mut skey_sum = split.blind_1.secret_key(&secp).unwrap();
+ let skey_2 = split.blind_2.secret_key(&secp).unwrap();
+ let _ = skey_sum.add_assign(&secp, &skey_2).unwrap();
+ assert_eq!(skey_in, skey_sum);
+ }
+
+ // Sanity check that we can add the zero key to a secret key and it is still
+ // the same key that we started with (k + 0 = k)
+ #[test]
+ fn zero_key_addition() {
+ let secp = Secp256k1::new();
+ let skey_in = SecretKey::new(&secp, &mut thread_rng());
+ let skey_zero = ZERO_KEY;
+
+ let mut skey_out = skey_in.clone();
+ let _ = skey_out.add_assign(&secp, &skey_zero).unwrap();
+
+ assert_eq!(skey_in, skey_out);
+ }
+}
diff --git a/keychain/src/keychain.rs b/keychain/src/keychain.rs
index 2b24d62e9..0d4cd322c 100644
--- a/keychain/src/keychain.rs
+++ b/keychain/src/keychain.rs
@@ -258,20 +258,20 @@ impl Keychain {
pos_keys.extend(&blind_sum
.positive_blinding_factors
.iter()
- .map(|b| b.secret_key())
+ .filter_map(|b| b.secret_key(&self.secp).ok())
.collect::>());
neg_keys.extend(&blind_sum
.negative_blinding_factors
.iter()
- .map(|b| b.secret_key())
+ .filter_map(|b| b.secret_key(&self.secp).ok())
.collect::>());
- let blinding = self.secp.blind_sum(pos_keys, neg_keys)?;
- Ok(BlindingFactor::new(blinding))
+ let sum = self.secp.blind_sum(pos_keys, neg_keys)?;
+ Ok(BlindingFactor::from_secret_key(sum))
}
- pub fn aggsig_create_context(&self, transaction_id: &Uuid, sec_key:SecretKey)
+ pub fn aggsig_create_context(&self, transaction_id: &Uuid, sec_key:SecretKey)
-> Result<(), Error>{
let mut contexts = self.aggsig_contexts.write().unwrap();
if contexts.is_none() {
@@ -381,11 +381,12 @@ impl Keychain {
self.aggsig_verify_single(sig, &msg, Some(&nonce_sum), pubkey, true)
}
- pub fn aggsig_calculate_partial_sig(&self,
+ pub fn aggsig_calculate_partial_sig(
+ &self,
transaction_id: &Uuid,
- other_pub_nonce:&PublicKey,
- fee:u64,
- lock_height:u64) -> Result{
+ other_pub_nonce: &PublicKey,
+ fee: u64,
+ lock_height: u64) -> Result{
// Add public nonces kR*G + kS*G
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
let mut nonce_sum = other_pub_nonce.clone();
@@ -396,8 +397,9 @@ impl Keychain {
self.aggsig_sign_single(transaction_id, &msg, Some(&sec_nonce), Some(&nonce_sum), Some(&nonce_sum))
}
- /// Helper function to calculate final singature
- pub fn aggsig_calculate_final_sig(&self,
+ /// Helper function to calculate final signature
+ pub fn aggsig_calculate_final_sig(
+ &self,
transaction_id: &Uuid,
their_sig: &Signature,
our_sig: &Signature,
@@ -459,7 +461,8 @@ impl Keychain {
msg: &Message,
blinding: &BlindingFactor,
) -> Result {
- let sig = aggsig::sign_single(secp, &msg, &blinding.secret_key(), None, None, None)?;
+ let skey = &blinding.secret_key(&secp)?;
+ let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
Ok(sig)
}
@@ -474,7 +477,8 @@ impl Keychain {
msg: &Message,
blinding: &BlindingFactor,
) -> Result {
- let sig = self.secp.sign(msg, &blinding.secret_key())?;
+ let skey = &blinding.secret_key(&self.secp)?;
+ let sig = self.secp.sign(msg, &skey)?;
Ok(sig)
}
@@ -485,7 +489,12 @@ impl Keychain {
#[cfg(test)]
mod test {
+ use rand::thread_rng;
+
+ use uuid::Uuid;
+
use keychain::{BlindSum, BlindingFactor, Keychain};
+ use util::kernel_sig_msg;
use util::secp;
use util::secp::pedersen::ProofMessage;
use util::secp::key::SecretKey;
@@ -608,10 +617,374 @@ mod test {
// in the same way (convenience function)
assert_eq!(
keychain.blind_sum(&BlindSum::new()
- .add_blinding_factor(BlindingFactor::new(skey1))
- .add_blinding_factor(BlindingFactor::new(skey2))
+ .add_blinding_factor(BlindingFactor::from_secret_key(skey1))
+ .add_blinding_factor(BlindingFactor::from_secret_key(skey2))
).unwrap(),
- BlindingFactor::new(skey3)
+ BlindingFactor::from_secret_key(skey3),
);
}
+
+ #[test]
+ fn aggsig_sender_receiver_interaction() {
+ let sender_keychain = Keychain::from_random_seed().unwrap();
+ let receiver_keychain = Keychain::from_random_seed().unwrap();
+
+ // tx identifier for wallet interaction
+ let tx_id = Uuid::new_v4();
+
+ // Calculate the kernel excess here for convenience.
+ // Normally this would happen during transaction building.
+ let kernel_excess = {
+ let skey1 = sender_keychain.derived_key(
+ &sender_keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ let skey2 = receiver_keychain.derived_key(
+ &receiver_keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ let keychain = Keychain::from_random_seed().unwrap();
+ let blinding_factor = keychain.blind_sum(
+ &BlindSum::new()
+ .sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
+ .add_blinding_factor(BlindingFactor::from_secret_key(skey2))
+ ).unwrap();
+
+ keychain.secp.commit(
+ 0,
+ blinding_factor.secret_key(&keychain.secp).unwrap(),
+ ).unwrap()
+ };
+
+ // sender starts the tx interaction
+ let (sender_pub_excess, sender_pub_nonce) = {
+ let keychain = sender_keychain.clone();
+
+ let skey = keychain.derived_key(
+ &keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ // dealing with an input here so we need to negate the blinding_factor
+ // rather than use it as is
+ let blinding_factor = keychain.blind_sum(
+ &BlindSum::new()
+ .sub_blinding_factor(BlindingFactor::from_secret_key(skey))
+ ).unwrap();
+
+ let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
+
+ keychain.aggsig_create_context(&tx_id, blind);
+ keychain.aggsig_get_public_keys(&tx_id)
+ };
+
+ // receiver receives partial tx
+ let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
+ let keychain = receiver_keychain.clone();
+ let key_id = keychain.derive_key_id(1).unwrap();
+
+ // let blind = blind_sum.secret_key(&keychain.secp())?;
+ let blind = keychain.derived_key(&key_id).unwrap();
+
+ keychain.aggsig_create_context(&tx_id, blind);
+ let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
+ keychain.aggsig_add_output(&tx_id, &key_id);
+
+ let sig_part = keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &sender_pub_nonce,
+ 0,
+ 0,
+ ).unwrap();
+ (pub_excess, pub_nonce, sig_part)
+ };
+
+ // check the sender can verify the partial signature
+ // received in the response back from the receiver
+ {
+ let keychain = sender_keychain.clone();
+ let sig_verifies = keychain.aggsig_verify_partial_sig(
+ &tx_id,
+ &sig_part,
+ &receiver_pub_nonce,
+ &receiver_pub_excess,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // now sender signs with their key
+ let sender_sig_part = {
+ let keychain = sender_keychain.clone();
+ keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &receiver_pub_nonce,
+ 0,
+ 0,
+ ).unwrap()
+ };
+
+ // check the receiver can verify the partial signature
+ // received by the sender
+ {
+ let keychain = receiver_keychain.clone();
+ let sig_verifies = keychain.aggsig_verify_partial_sig(
+ &tx_id,
+ &sender_sig_part,
+ &sender_pub_nonce,
+ &sender_pub_excess,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // Receiver now builds final signature from sender and receiver parts
+ let (final_sig, final_pubkey) = {
+ let keychain = receiver_keychain.clone();
+
+ // Receiver recreates their partial sig (we do not maintain state from earlier)
+ let our_sig_part = keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &sender_pub_nonce,
+ 0,
+ 0,
+ ).unwrap();
+
+ // Receiver now generates final signature from the two parts
+ let final_sig = keychain.aggsig_calculate_final_sig(
+ &tx_id,
+ &sender_sig_part,
+ &our_sig_part,
+ &sender_pub_nonce,
+ ).unwrap();
+
+ // Receiver calculates the final public key (to verify sig later)
+ let final_pubkey = keychain.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess).unwrap();
+
+ (final_sig, final_pubkey)
+ };
+
+ // Receiver checks the final signature verifies
+ {
+ let keychain = receiver_keychain.clone();
+
+ // Receiver check the final signature verifies
+ let sig_verifies = keychain.aggsig_verify_final_sig_build_msg(
+ &final_sig,
+ &final_pubkey,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // Check we can verify the sig using the kernel excess
+ {
+ let keychain = Keychain::from_random_seed().unwrap();
+
+ let msg = secp::Message::from_slice(
+ &kernel_sig_msg(
+ 0,
+ 0,
+ ),
+ ).unwrap();
+
+ let sig_verifies = Keychain::aggsig_verify_single_from_commit(
+ &keychain.secp,
+ &final_sig,
+ &msg,
+ &kernel_excess,
+ );
+
+ assert!(sig_verifies);
+ }
+ }
+
+ #[test]
+ fn aggsig_sender_receiver_interaction_offset() {
+ let sender_keychain = Keychain::from_random_seed().unwrap();
+ let receiver_keychain = Keychain::from_random_seed().unwrap();
+
+ // tx identifier for wallet interaction
+ let tx_id = Uuid::new_v4();
+
+ // This is the kernel offset that we use to split the key
+ // Summing these at the block level prevents the
+ // kernels from being used to reconstruct (or identify) individual transactions
+ let kernel_offset = SecretKey::new(&sender_keychain.secp(), &mut thread_rng());
+
+ // Calculate the kernel excess here for convenience.
+ // Normally this would happen during transaction building.
+ let kernel_excess = {
+ let skey1 = sender_keychain.derived_key(
+ &sender_keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ let skey2 = receiver_keychain.derived_key(
+ &receiver_keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ let keychain = Keychain::from_random_seed().unwrap();
+ let blinding_factor = keychain.blind_sum(
+ &BlindSum::new()
+ .sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
+ .add_blinding_factor(BlindingFactor::from_secret_key(skey2))
+ // subtract the kernel offset here like as would when
+ // verifying a kernel signature
+ .sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset))
+ ).unwrap();
+
+ keychain.secp.commit(
+ 0,
+ blinding_factor.secret_key(&keychain.secp).unwrap(),
+ ).unwrap()
+ };
+
+ // sender starts the tx interaction
+ let (sender_pub_excess, sender_pub_nonce) = {
+ let keychain = sender_keychain.clone();
+
+ let skey = keychain.derived_key(
+ &keychain.derive_key_id(1).unwrap(),
+ ).unwrap();
+
+ // dealing with an input here so we need to negate the blinding_factor
+ // rather than use it as is
+ let blinding_factor = keychain.blind_sum(
+ &BlindSum::new()
+ .sub_blinding_factor(BlindingFactor::from_secret_key(skey))
+ // subtract the kernel offset to create an aggsig context
+ // with our "split" key
+ .sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset))
+ ).unwrap();
+
+ let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
+
+ keychain.aggsig_create_context(&tx_id, blind);
+ keychain.aggsig_get_public_keys(&tx_id)
+ };
+
+ // receiver receives partial tx
+ let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
+ let keychain = receiver_keychain.clone();
+ let key_id = keychain.derive_key_id(1).unwrap();
+
+ let blind = keychain.derived_key(&key_id).unwrap();
+
+ keychain.aggsig_create_context(&tx_id, blind);
+ let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
+ keychain.aggsig_add_output(&tx_id, &key_id);
+
+ let sig_part = keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &sender_pub_nonce,
+ 0,
+ 0,
+ ).unwrap();
+ (pub_excess, pub_nonce, sig_part)
+ };
+
+ // check the sender can verify the partial signature
+ // received in the response back from the receiver
+ {
+ let keychain = sender_keychain.clone();
+ let sig_verifies = keychain.aggsig_verify_partial_sig(
+ &tx_id,
+ &sig_part,
+ &receiver_pub_nonce,
+ &receiver_pub_excess,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // now sender signs with their key
+ let sender_sig_part = {
+ let keychain = sender_keychain.clone();
+ keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &receiver_pub_nonce,
+ 0,
+ 0,
+ ).unwrap()
+ };
+
+ // check the receiver can verify the partial signature
+ // received by the sender
+ {
+ let keychain = receiver_keychain.clone();
+ let sig_verifies = keychain.aggsig_verify_partial_sig(
+ &tx_id,
+ &sender_sig_part,
+ &sender_pub_nonce,
+ &sender_pub_excess,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // Receiver now builds final signature from sender and receiver parts
+ let (final_sig, final_pubkey) = {
+ let keychain = receiver_keychain.clone();
+
+ // Receiver recreates their partial sig (we do not maintain state from earlier)
+ let our_sig_part = keychain.aggsig_calculate_partial_sig(
+ &tx_id,
+ &sender_pub_nonce,
+ 0,
+ 0,
+ ).unwrap();
+
+ // Receiver now generates final signature from the two parts
+ let final_sig = keychain.aggsig_calculate_final_sig(
+ &tx_id,
+ &sender_sig_part,
+ &our_sig_part,
+ &sender_pub_nonce,
+ ).unwrap();
+
+ // Receiver calculates the final public key (to verify sig later)
+ let final_pubkey = keychain.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess).unwrap();
+
+ (final_sig, final_pubkey)
+ };
+
+ // Receiver checks the final signature verifies
+ {
+ let keychain = receiver_keychain.clone();
+
+ // Receiver check the final signature verifies
+ let sig_verifies = keychain.aggsig_verify_final_sig_build_msg(
+ &final_sig,
+ &final_pubkey,
+ 0,
+ 0,
+ );
+ assert!(sig_verifies);
+ }
+
+ // Check we can verify the sig using the kernel excess
+ {
+ let keychain = Keychain::from_random_seed().unwrap();
+
+ let msg = secp::Message::from_slice(
+ &kernel_sig_msg(
+ 0,
+ 0,
+ ),
+ ).unwrap();
+
+ let sig_verifies = Keychain::aggsig_verify_single_from_commit(
+ &keychain.secp,
+ &final_sig,
+ &msg,
+ &kernel_excess,
+ );
+
+ assert!(sig_verifies);
+ }
+ }
}
diff --git a/pool/src/graph.rs b/pool/src/graph.rs
index c216a6d9e..3e567cc00 100644
--- a/pool/src/graph.rs
+++ b/pool/src/graph.rs
@@ -337,8 +337,16 @@ mod tests {
.range_proof(100, &key_id1, output_commit, msg)
.unwrap(),
};
- let outputs = vec![output];
- let test_transaction = core::transaction::Transaction::new(inputs, outputs, 5, 0);
+
+ let kernel = core::transaction::TxKernel::empty()
+ .with_fee(5)
+ .with_lock_height(0);
+
+ let test_transaction = core::transaction::Transaction::new(
+ inputs,
+ vec![output],
+ vec![kernel],
+ );
let test_pool_entry = PoolEntry::new(&test_transaction);
diff --git a/pool/src/pool.rs b/pool/src/pool.rs
index 49c246f6b..e8cb85e9e 100644
--- a/pool/src/pool.rs
+++ b/pool/src/pool.rs
@@ -160,9 +160,9 @@ where
}
let head_header = self.blockchain.head_header()?;
- if head_header.height < tx.lock_height {
+ if head_header.height < tx.lock_height() {
return Err(PoolError::ImmatureTransaction {
- lock_height: tx.lock_height,
+ lock_height: tx.lock_height(),
});
}
@@ -583,7 +583,7 @@ where
tx_weight = 1;
}
let threshold = (tx_weight as u64) * self.config.accept_fee_base;
- if tx.fee < threshold {
+ if tx.fee() < threshold {
return Err(PoolError::LowFeeTransaction(threshold));
}
}
@@ -1242,8 +1242,7 @@ mod tests {
}
tx_elements.push(build::with_fee(fees as u64));
- let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
- tx
+ build::transaction(tx_elements, &keychain).unwrap()
}
fn test_transaction_with_coinbase_input(
@@ -1272,8 +1271,7 @@ mod tests {
}
tx_elements.push(build::with_fee(fees as u64));
- let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
- tx
+ build::transaction(tx_elements, &keychain).unwrap()
}
/// Very un-dry way of building a vanilla tx and adding a lock_height to it.
@@ -1303,8 +1301,7 @@ mod tests {
tx_elements.push(build::with_fee(fees as u64));
tx_elements.push(build::with_lock_height(lock_height));
- let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
- tx
+ build::transaction(tx_elements, &keychain).unwrap()
}
/// Deterministically generate an output defined by our test scheme
diff --git a/wallet/src/receiver.rs b/wallet/src/receiver.rs
index a0430ba98..bc9527f6f 100644
--- a/wallet/src/receiver.rs
+++ b/wallet/src/receiver.rs
@@ -25,9 +25,9 @@ use uuid::Uuid;
use api;
use core::consensus::reward;
-use core::core::{build, Block, Output, Transaction, TxKernel, amount_to_hr_string};
+use core::core::{build, Block, Committed, Output, Transaction, TxKernel, amount_to_hr_string};
use core::{global, ser};
-use keychain::{Identifier, Keychain};
+use keychain::{Identifier, Keychain, BlindingFactor};
use types::*;
use util::{LOGGER, to_hex, secp};
@@ -52,7 +52,7 @@ fn handle_sender_initiation(
keychain: &Keychain,
partial_tx: &PartialTx
) -> Result {
- let (amount, _sender_pub_blinding, sender_pub_nonce, _sig, tx) = read_partial_tx(keychain, partial_tx)?;
+ let (amount, _sender_pub_blinding, sender_pub_nonce, kernel_offset, _sig, tx) = read_partial_tx(keychain, partial_tx)?;
let root_key_id = keychain.root_key_id();
@@ -60,9 +60,9 @@ fn handle_sender_initiation(
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
- if fee != tx.fee {
+ if fee != tx.fee() {
return Err(Error::FeeDispute {
- sender_fee: tx.fee,
+ sender_fee: tx.fee(),
recipient_fee: fee,
});
}
@@ -82,8 +82,8 @@ fn handle_sender_initiation(
let out_amount = amount - fee;
- //First step is just to get the excess sum of the outputs we're participating in
- //Output and key needs to be stored until transaction finalisation time, somehow
+ // First step is just to get the excess sum of the outputs we're participating in
+ // Output and key needs to be stored until transaction finalisation time, somehow
let key_id = WalletData::with_wallet(&config.data_file_dir, |wallet_data| {
let (key_id, derivation) = next_available_key(&wallet_data, keychain);
@@ -104,7 +104,7 @@ fn handle_sender_initiation(
})?;
// Still handy for getting the blinding sum
- let (_, blind_sum) = build::transaction(
+ let (_, blind_sum) = build::partial_transaction(
vec![
build::output(out_amount, key_id.clone()),
],
@@ -114,16 +114,19 @@ fn handle_sender_initiation(
warn!(LOGGER, "Creating new aggsig context");
// Create a new aggsig context
// this will create a new blinding sum and nonce, and store them
- let result = keychain.aggsig_create_context(&partial_tx.id, blind_sum.secret_key());
- if let Err(_) = result {
- return Err(Error::DuplicateTransactionId);
- }
+ let blind = blind_sum.secret_key(&keychain.secp())?;
+ keychain.aggsig_create_context(&partial_tx.id, blind);
keychain.aggsig_add_output(&partial_tx.id, &key_id);
- let sig_part=keychain.aggsig_calculate_partial_sig(&partial_tx.id, &sender_pub_nonce, fee, tx.lock_height).unwrap();
+ let sig_part = keychain.aggsig_calculate_partial_sig(
+ &partial_tx.id,
+ &sender_pub_nonce,
+ fee,
+ tx.lock_height(),
+ ).unwrap();
// Build the response, which should contain sR, blinding excess xR * G, public nonce kR * G
- let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, Some(sig_part), tx);
+ let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, kernel_offset, Some(sig_part), tx);
partial_tx.phase = PartialTxPhase::ReceiverInitiation;
Ok(partial_tx)
@@ -146,33 +149,66 @@ fn handle_sender_confirmation(
keychain: &Keychain,
partial_tx: &PartialTx
) -> Result {
- let (amount, sender_pub_blinding, sender_pub_nonce, sender_sig_part, tx) = read_partial_tx(keychain, partial_tx)?;
- let sender_sig_part=sender_sig_part.unwrap();
- let res = keychain.aggsig_verify_partial_sig(&partial_tx.id, &sender_sig_part, &sender_pub_nonce, &sender_pub_blinding, tx.fee, tx.lock_height);
+ let (amount, sender_pub_blinding, sender_pub_nonce, kernel_offset, sender_sig_part, tx) = read_partial_tx(keychain, partial_tx)?;
+ let sender_sig_part = sender_sig_part.unwrap();
+ let res = keychain.aggsig_verify_partial_sig(
+ &partial_tx.id,
+ &sender_sig_part,
+ &sender_pub_nonce,
+ &sender_pub_blinding,
+ tx.fee(), tx.lock_height(),
+ );
if !res {
error!(LOGGER, "Partial Sig from sender invalid.");
return Err(Error::Signature(String::from("Partial Sig from sender invalid.")));
}
- //Just calculate our sig part again instead of storing
- let our_sig_part=keychain.aggsig_calculate_partial_sig(&partial_tx.id, &sender_pub_nonce, tx.fee, tx.lock_height).unwrap();
+ // Just calculate our sig part again instead of storing
+ let our_sig_part = keychain.aggsig_calculate_partial_sig(
+ &partial_tx.id,
+ &sender_pub_nonce,
+ tx.fee(),
+ tx.lock_height(),
+ ).unwrap();
// And the final signature
- let final_sig=keychain.aggsig_calculate_final_sig(&partial_tx.id, &sender_sig_part, &our_sig_part, &sender_pub_nonce).unwrap();
+ let final_sig = keychain.aggsig_calculate_final_sig(
+ &partial_tx.id,
+ &sender_sig_part,
+ &our_sig_part,
+ &sender_pub_nonce,
+ ).unwrap();
// Calculate the final public key (for our own sanity check)
- let final_pubkey=keychain.aggsig_calculate_final_pubkey(&partial_tx.id, &sender_pub_blinding).unwrap();
+ let final_pubkey = keychain.aggsig_calculate_final_pubkey(
+ &partial_tx.id,
+ &sender_pub_blinding,
+ ).unwrap();
- //Check our final sig verifies
- let res = keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, tx.fee, tx.lock_height);
+ // Check our final sig verifies
+ let res = keychain.aggsig_verify_final_sig_build_msg(
+ &final_sig,
+ &final_pubkey,
+ tx.fee(),
+ tx.lock_height(),
+ );
if !res {
error!(LOGGER, "Final aggregated signature invalid.");
return Err(Error::Signature(String::from("Final aggregated signature invalid.")));
}
- let final_tx = build_final_transaction(&partial_tx.id, config, keychain, amount, &final_sig, tx.clone())?;
+ let final_tx = build_final_transaction(
+ &partial_tx.id,
+ config,
+ keychain,
+ amount,
+ kernel_offset,
+ &final_sig,
+ tx.clone(),
+ )?;
+
let tx_hex = to_hex(ser::ser_vec(&final_tx).unwrap());
let url = format!("{}/v1/pool/push", config.check_node_api_http_addr.as_str());
@@ -180,7 +216,8 @@ fn handle_sender_confirmation(
.map_err(|e| Error::Node(e))?;
// Return what we've actually posted
- let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, Some(final_sig), tx);
+ // TODO - why build_partial_tx here? Just a naming issue?
+ let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, kernel_offset, Some(final_sig), tx);
partial_tx.phase = PartialTxPhase::ReceiverConfirmation;
Ok(partial_tx)
}
@@ -200,7 +237,7 @@ impl Handler for WalletReceiver {
if let Ok(Some(partial_tx)) = struct_body {
match partial_tx.phase {
PartialTxPhase::SenderInitiation => {
- let resp_tx=handle_sender_initiation(&self.config, &self.keychain, &partial_tx)
+ let resp_tx = handle_sender_initiation(&self.config, &self.keychain, &partial_tx)
.map_err(|e| {
error!(LOGGER, "Phase 1 Sender Initiation -> Problematic partial tx, looks like this: {:?}", partial_tx);
api::Error::Internal(
@@ -211,7 +248,7 @@ impl Handler for WalletReceiver {
Ok(Response::with((status::Ok, json)))
},
PartialTxPhase::SenderConfirmation => {
- let resp_tx=handle_sender_confirmation(&self.config, &self.keychain, &partial_tx)
+ let resp_tx = handle_sender_confirmation(&self.config, &self.keychain, &partial_tx)
.map_err(|e| {
error!(LOGGER, "Phase 3 Sender Confirmation -> Problematic partial tx, looks like this: {:?}", partial_tx);
api::Error::Internal(
@@ -317,35 +354,35 @@ fn build_final_transaction(
config: &WalletConfig,
keychain: &Keychain,
amount: u64,
+ kernel_offset: BlindingFactor,
excess_sig: &secp::Signature,
tx: Transaction,
) -> Result {
-
let root_key_id = keychain.root_key_id();
// double check the fee amount included in the partial tx
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
- if fee != tx.fee {
+ if fee != tx.fee() {
return Err(Error::FeeDispute {
- sender_fee: tx.fee,
+ sender_fee: tx.fee(),
recipient_fee: fee,
});
}
- if fee > amount {
+ if fee > amount {
info!(
LOGGER,
"Rejected the transfer because transaction fee ({}) exceeds received amount ({}).",
amount_to_hr_string(fee),
amount_to_hr_string(amount)
);
- return Err(Error::FeeExceedsAmount {
- sender_amount: amount,
- recipient_fee: fee,
- });
- }
+ return Err(Error::FeeExceedsAmount {
+ sender_amount: amount,
+ recipient_fee: fee,
+ });
+ }
let out_amount = amount - fee;
@@ -374,19 +411,35 @@ fn build_final_transaction(
// Build final transaction, the sum of which should
// be the same as the exchanged excess values
- let (mut final_tx, _) = build::transaction(
+ let mut final_tx = build::transaction(
vec![
build::initial_tx(tx),
build::output(out_amount, key_id.clone()),
+ build::with_offset(kernel_offset),
],
keychain,
)?;
- final_tx.excess_sig = excess_sig.clone();
+ // build the final excess based on final tx and offset
+ let final_excess = {
+ // sum the input/output commitments on the final tx
+ let tx_excess = final_tx.sum_commitments()?;
- // make sure the resulting transaction is valid (could have been lied to on
- // excess).
- let _ = final_tx.validate()?;
+ // subtract the kernel_excess (built from kernel_offset)
+ let offset_excess = keychain.secp().commit(0, kernel_offset.secret_key(&keychain.secp()).unwrap()).unwrap();
+ keychain.secp().commit_sum(vec![tx_excess], vec![offset_excess])?
+ };
+
+ // update the tx kernel to reflect the offset excess and sig
+ assert_eq!(final_tx.kernels.len(), 1);
+ final_tx.kernels[0].excess = final_excess.clone();
+ final_tx.kernels[0].excess_sig = excess_sig.clone();
+
+ // confirm the kernel verifies successfully before proceeding
+ final_tx.kernels[0].verify()?;
+
+ // confirm the overall transaction is valid (including the updated kernel)
+ final_tx.validate()?;
debug!(
LOGGER,
diff --git a/wallet/src/sender.rs b/wallet/src/sender.rs
index c6118f83a..155064df4 100644
--- a/wallet/src/sender.rs
+++ b/wallet/src/sender.rs
@@ -12,16 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+use rand::thread_rng;
use uuid::Uuid;
+
use api;
use client;
use checker;
use core::core::{build, Transaction, amount_to_hr_string};
use core::ser;
-use keychain::{BlindingFactor, Identifier, Keychain};
+use keychain::{BlindingFactor, BlindSum, Identifier, Keychain};
use receiver::TxWrapper;
use types::*;
use util::LOGGER;
+use util::secp::key::SecretKey;
use util;
/// Issue a new transaction to the provided sender by spending some of our
@@ -45,7 +48,7 @@ pub fn issue_send_tx(
// proof of concept - set lock_height on the tx
let lock_height = chain_tip.height;
- let (tx, blind_sum, coins, change_key, amount_with_fee) = build_send_tx(
+ let (tx, blind, coins, change_key, amount_with_fee) = build_send_tx(
config,
keychain,
amount,
@@ -55,16 +58,32 @@ pub fn issue_send_tx(
max_outputs,
selection_strategy_is_use_all,
)?;
- /*
- * -Sender picks random blinding factors for all outputs it participates in, computes total blinding excess xS
- * -Sender picks random nonce kS
- * -Sender posts inputs, outputs, Message M=fee, xS * G and kS * G to Receiver
- */
-// Create a new aggsig context
+ // TODO - wrap this up in build_send_tx or even the build() call?
+ // Generate a random kernel offset here
+ // and subtract it from the blind_sum so we create
+ // the aggsig context with the "split" key
+ let kernel_offset = BlindingFactor::from_secret_key(
+ SecretKey::new(&keychain.secp(), &mut thread_rng())
+ );
+
+ let blind_offset = keychain.blind_sum(
+ &BlindSum::new()
+ .add_blinding_factor(blind)
+ .sub_blinding_factor(kernel_offset)
+ ).unwrap();
+
+ //
+ // -Sender picks random blinding factors for all outputs it participates in, computes total blinding excess xS
+ // -Sender picks random nonce kS
+ // -Sender posts inputs, outputs, Message M=fee, xS * G and kS * G to Receiver
+ //
+ // Create a new aggsig context
let tx_id = Uuid::new_v4();
- let _ = keychain.aggsig_create_context(&tx_id, blind_sum.secret_key());
- let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, None, tx);
+ let skey = blind_offset.secret_key(&keychain.secp())?;
+ keychain.aggsig_create_context(&tx_id, skey);
+
+ let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, None, tx);
// Closure to acquire wallet lock and lock the coins being spent
// so we avoid accidental double spend attempt.
@@ -117,17 +136,25 @@ pub fn issue_send_tx(
* -Sender computes their part of signature, sS = kS + e * xS
* -Sender posts sS to receiver
*/
- let (_amount, recp_pub_blinding, recp_pub_nonce, sig, tx) = read_partial_tx(keychain, &res.unwrap())?;
- let res = keychain.aggsig_verify_partial_sig(&tx_id, &sig.unwrap(), &recp_pub_nonce, &recp_pub_blinding, tx.fee, lock_height);
+ let (_amount, recp_pub_blinding, recp_pub_nonce, kernel_offset, sig, tx) = read_partial_tx(keychain, &res.unwrap())?;
+ let res = keychain.aggsig_verify_partial_sig(
+ &tx_id,
+ &sig.unwrap(),
+ &recp_pub_nonce,
+ &recp_pub_blinding,
+ tx.fee(),
+ lock_height,
+ );
if !res {
error!(LOGGER, "Partial Sig from recipient invalid.");
return Err(Error::Signature(String::from("Partial Sig from recipient invalid.")));
}
- let sig_part=keychain.aggsig_calculate_partial_sig(&tx_id, &recp_pub_nonce, tx.fee, tx.lock_height).unwrap();
+ let sig_part = keychain.aggsig_calculate_partial_sig(&tx_id, &recp_pub_nonce, tx.fee(), tx.lock_height()).unwrap();
// Build the next stage, containing sS (and our pubkeys again, for the recipient's convenience)
- let mut partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, Some(sig_part), tx);
+ // offset has not been modified during tx building, so pass it back in
+ let mut partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, Some(sig_part), tx);
partial_tx.phase = PartialTxPhase::SenderConfirmation;
// And send again
@@ -146,7 +173,8 @@ pub fn issue_send_tx(
rollback_wallet()?;
return Err(e);
}
- //All good so
+
+ // All good so
update_wallet()?;
Ok(())
}
@@ -228,7 +256,7 @@ fn build_send_tx(
// on tx being sent (based on current chain height via api).
parts.push(build::with_lock_height(lock_height));
- let (tx, blind) = build::transaction(parts, &keychain)?;
+ let (tx, blind) = build::partial_transaction(parts, &keychain)?;
Ok((tx, blind, coins, change_key, amount_with_fee))
}
@@ -270,7 +298,7 @@ pub fn issue_burn_tx(
parts.push(build::output(amount - fee, Identifier::zero()));
// finalize the burn transaction and send
- let (tx_burn, _) = build::transaction(parts, &keychain)?;
+ let tx_burn = build::transaction(parts, &keychain)?;
tx_burn.validate()?;
let tx_hex = util::to_hex(ser::ser_vec(&tx_burn).unwrap());
@@ -336,7 +364,7 @@ fn inputs_and_change(
#[cfg(test)]
mod test {
- use core::core::build::{input, output, transaction};
+ use core::core::build;
use core::core::hash::ZERO_HASH;
use keychain::Keychain;
@@ -348,8 +376,8 @@ mod test {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
- let (tx1, _) = transaction(vec![output(105, key_id1.clone())], &keychain).unwrap();
- let (tx2, _) = transaction(vec![input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
+ let tx1 = build::transaction(vec![build::output(105, key_id1.clone())], &keychain).unwrap();
+ let tx2 = build::transaction(vec![build::input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
assert_eq!(tx1.outputs[0].features, tx2.inputs[0].features);
assert_eq!(tx1.outputs[0].commitment(), tx2.inputs[0].commitment());
diff --git a/wallet/src/types.rs b/wallet/src/types.rs
index ff4c027fd..c37bb0e70 100644
--- a/wallet/src/types.rs
+++ b/wallet/src/types.rs
@@ -38,6 +38,7 @@ use core::core::{transaction, Transaction};
use core::core::hash::Hash;
use core::ser;
use keychain;
+use keychain::BlindingFactor;
use util;
use util::secp;
use util::secp::Signature;
@@ -717,6 +718,7 @@ pub struct PartialTx {
pub amount: u64,
pub public_blind_excess: String,
pub public_nonce: String,
+ pub kernel_offset: String,
pub part_sig: String,
pub tx: String,
}
@@ -728,6 +730,7 @@ pub fn build_partial_tx(
transaction_id : &Uuid,
keychain: &keychain::Keychain,
receive_amount: u64,
+ kernel_offset: BlindingFactor,
part_sig: Option,
tx: Transaction,
) -> PartialTx {
@@ -747,6 +750,7 @@ pub fn build_partial_tx(
amount: receive_amount,
public_blind_excess: util::to_hex(pub_excess),
public_nonce: util::to_hex(pub_nonce),
+ kernel_offset: kernel_offset.to_hex(),
part_sig: match part_sig {
None => String::from("00"),
Some(p) => util::to_hex(p.serialize_der(&keychain.secp())),
@@ -760,11 +764,15 @@ pub fn build_partial_tx(
pub fn read_partial_tx(
keychain: &keychain::Keychain,
partial_tx: &PartialTx,
-) -> Result<(u64, PublicKey, PublicKey, Option, Transaction), Error> {
+) -> Result<(u64, PublicKey, PublicKey, BlindingFactor, Option, Transaction), Error> {
let blind_bin = util::from_hex(partial_tx.public_blind_excess.clone())?;
let blinding = PublicKey::from_slice(keychain.secp(), &blind_bin[..])?;
+
let nonce_bin = util::from_hex(partial_tx.public_nonce.clone())?;
let nonce = PublicKey::from_slice(keychain.secp(), &nonce_bin[..])?;
+
+ let kernel_offset = BlindingFactor::from_hex(&partial_tx.kernel_offset.clone())?;
+
let sig_bin = util::from_hex(partial_tx.part_sig.clone())?;
let sig = match sig_bin.len() {
1 => None,
@@ -774,7 +782,7 @@ pub fn read_partial_tx(
let tx = ser::deserialize(&mut &tx_bin[..]).map_err(|_| {
Error::Format("Could not deserialize transaction, invalid format.".to_string())
})?;
- Ok((partial_tx.amount, blinding, nonce, sig, tx))
+ Ok((partial_tx.amount, blinding, nonce, kernel_offset, sig, tx))
}
/// Amount in request to build a coinbase output.