mirror of
https://github.com/mimblewimble/grin.git
synced 2025-02-01 17:01:09 +03:00
Transaction contain kernels. Transactions and blocks maintain a kernel offset (split key). (#681)
* WIP - split the key in final tx step store "offset" on transaction itself * rebase * commit * tx with offset * got a test tx validating successfully using a sig from a split key and the appropriate offset * sum up the offset for the block_header * fix size tests for blocks and compact blocks (header now includes offset) * use txs with offsets in most of the core tests some tests now failing * build kernel from k1G (k2 stored on tx, sum stored on header) * commit * tx now has vec of kernels rework tx and kernel validation * add test for tx cut_through * wip - working on splitting in aggsig * split the key when creating the initial sender aggsig context * cleanup * cleanup * code needs claning up but split keys working for sender/receiver aggsig flow * cleanup debug logging * fix tests * fix merge and basic cleanup * fix keychain tests to use new tx_id
This commit is contained in:
parent
76796738c1
commit
ebd801f14e
15 changed files with 1160 additions and 293 deletions
|
@ -263,7 +263,7 @@ fn spend_in_fork() {
|
||||||
let lock_height = 1 + global::coinbase_maturity();
|
let lock_height = 1 + global::coinbase_maturity();
|
||||||
assert_eq!(lock_height, 4);
|
assert_eq!(lock_height, 4);
|
||||||
|
|
||||||
let (tx1, _) = build::transaction(
|
let tx1 = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
build::coinbase_input(consensus::REWARD, block_hash, kc.derive_key_id(2).unwrap()),
|
build::coinbase_input(consensus::REWARD, block_hash, kc.derive_key_id(2).unwrap()),
|
||||||
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
|
build::output(consensus::REWARD - 20000, kc.derive_key_id(30).unwrap()),
|
||||||
|
@ -276,7 +276,7 @@ fn spend_in_fork() {
|
||||||
let prev_main = next.header.clone();
|
let prev_main = next.header.clone();
|
||||||
chain.process_block(next.clone(), chain::Options::SKIP_POW).unwrap();
|
chain.process_block(next.clone(), chain::Options::SKIP_POW).unwrap();
|
||||||
|
|
||||||
let (tx2, _) = build::transaction(
|
let tx2 = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
build::input(consensus::REWARD - 20000, next.hash(), kc.derive_key_id(30).unwrap()),
|
build::input(consensus::REWARD - 20000, next.hash(), kc.derive_key_id(30).unwrap()),
|
||||||
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
|
build::output(consensus::REWARD - 40000, kc.derive_key_id(31).unwrap()),
|
||||||
|
|
|
@ -116,7 +116,7 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
// here we build a tx that attempts to spend the earlier coinbase output
|
// here we build a tx that attempts to spend the earlier coinbase output
|
||||||
// this is not a valid tx as the coinbase output cannot be spent yet
|
// this is not a valid tx as the coinbase output cannot be spent yet
|
||||||
let (coinbase_txn, _) = build::transaction(
|
let coinbase_txn = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
||||||
build::output(amount - 2, key_id2.clone()),
|
build::output(amount - 2, key_id2.clone()),
|
||||||
|
@ -183,7 +183,7 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
|
|
||||||
let (coinbase_txn, _) = build::transaction(
|
let coinbase_txn = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
build::coinbase_input(amount, block_hash, key_id1.clone()),
|
||||||
build::output(amount - 2, key_id2.clone()),
|
build::output(amount - 2, key_id2.clone()),
|
||||||
|
|
|
@ -43,12 +43,13 @@ use util::kernel_sig_msg;
|
||||||
use util::LOGGER;
|
use util::LOGGER;
|
||||||
use global;
|
use global;
|
||||||
use keychain;
|
use keychain;
|
||||||
|
use keychain::BlindingFactor;
|
||||||
|
|
||||||
/// Errors thrown by Block validation
|
/// Errors thrown by Block validation
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// The sum of output minus input commitments does not match the sum of
|
/// The sum of output minus input commitments does not
|
||||||
/// kernel commitments
|
/// match the sum of kernel commitments
|
||||||
KernelSumMismatch,
|
KernelSumMismatch,
|
||||||
/// Same as above but for the coinbase part of a block, including reward
|
/// Same as above but for the coinbase part of a block, including reward
|
||||||
CoinbaseSumMismatch,
|
CoinbaseSumMismatch,
|
||||||
|
@ -126,6 +127,8 @@ pub struct BlockHeader {
|
||||||
pub difficulty: Difficulty,
|
pub difficulty: Difficulty,
|
||||||
/// Total accumulated difficulty since genesis block
|
/// Total accumulated difficulty since genesis block
|
||||||
pub total_difficulty: Difficulty,
|
pub total_difficulty: Difficulty,
|
||||||
|
/// The single aggregate "offset" that needs to be applied for all commitments to sum
|
||||||
|
pub kernel_offset: BlindingFactor,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for BlockHeader {
|
impl Default for BlockHeader {
|
||||||
|
@ -143,6 +146,7 @@ impl Default for BlockHeader {
|
||||||
kernel_root: ZERO_HASH,
|
kernel_root: ZERO_HASH,
|
||||||
nonce: 0,
|
nonce: 0,
|
||||||
pow: Proof::zero(proof_size),
|
pow: Proof::zero(proof_size),
|
||||||
|
kernel_offset: BlindingFactor::zero(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,6 +168,7 @@ impl Writeable for BlockHeader {
|
||||||
try!(writer.write_u64(self.nonce));
|
try!(writer.write_u64(self.nonce));
|
||||||
try!(self.difficulty.write(writer));
|
try!(self.difficulty.write(writer));
|
||||||
try!(self.total_difficulty.write(writer));
|
try!(self.total_difficulty.write(writer));
|
||||||
|
try!(self.kernel_offset.write(writer));
|
||||||
|
|
||||||
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
if writer.serialization_mode() != ser::SerializationMode::Hash {
|
||||||
try!(self.pow.write(writer));
|
try!(self.pow.write(writer));
|
||||||
|
@ -184,6 +189,7 @@ impl Readable for BlockHeader {
|
||||||
let nonce = reader.read_u64()?;
|
let nonce = reader.read_u64()?;
|
||||||
let difficulty = Difficulty::read(reader)?;
|
let difficulty = Difficulty::read(reader)?;
|
||||||
let total_difficulty = Difficulty::read(reader)?;
|
let total_difficulty = Difficulty::read(reader)?;
|
||||||
|
let kernel_offset = BlindingFactor::read(reader)?;
|
||||||
let pow = Proof::read(reader)?;
|
let pow = Proof::read(reader)?;
|
||||||
|
|
||||||
Ok(BlockHeader {
|
Ok(BlockHeader {
|
||||||
|
@ -201,6 +207,7 @@ impl Readable for BlockHeader {
|
||||||
nonce: nonce,
|
nonce: nonce,
|
||||||
difficulty: difficulty,
|
difficulty: difficulty,
|
||||||
total_difficulty: total_difficulty,
|
total_difficulty: total_difficulty,
|
||||||
|
kernel_offset: kernel_offset,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -284,7 +291,7 @@ pub struct Block {
|
||||||
pub inputs: Vec<Input>,
|
pub inputs: Vec<Input>,
|
||||||
/// List of transaction outputs
|
/// List of transaction outputs
|
||||||
pub outputs: Vec<Output>,
|
pub outputs: Vec<Output>,
|
||||||
/// List of transaction kernels and associated proofs
|
/// List of kernels with associated proofs (note these are offset from tx_kernels)
|
||||||
pub kernels: Vec<TxKernel>,
|
pub kernels: Vec<TxKernel>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -379,7 +386,7 @@ impl Block {
|
||||||
key_id: &keychain::Identifier,
|
key_id: &keychain::Identifier,
|
||||||
difficulty: Difficulty,
|
difficulty: Difficulty,
|
||||||
) -> Result<Block, Error> {
|
) -> Result<Block, Error> {
|
||||||
let fees = txs.iter().map(|tx| tx.fee).sum();
|
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
||||||
let (reward_out, reward_proof) = Block::reward_output(
|
let (reward_out, reward_proof) = Block::reward_output(
|
||||||
keychain,
|
keychain,
|
||||||
key_id,
|
key_id,
|
||||||
|
@ -486,26 +493,33 @@ impl Block {
|
||||||
let mut inputs = vec![];
|
let mut inputs = vec![];
|
||||||
let mut outputs = vec![];
|
let mut outputs = vec![];
|
||||||
|
|
||||||
|
// we will sum these together at the end
|
||||||
|
// to give us the overall offset for the block
|
||||||
|
let mut kernel_offsets = vec![];
|
||||||
|
|
||||||
// iterate over the all the txs
|
// iterate over the all the txs
|
||||||
// build the kernel for each
|
// build the kernel for each
|
||||||
// and collect all the kernels, inputs and outputs
|
// and collect all the kernels, inputs and outputs
|
||||||
// to build the block (which we can sort of think of as one big tx?)
|
// to build the block (which we can sort of think of as one big tx?)
|
||||||
for tx in txs {
|
for tx in txs {
|
||||||
// validate each transaction and gather their kernels
|
// validate each transaction and gather their kernels
|
||||||
let excess = tx.validate()?;
|
// tx has an offset k2 where k = k1 + k2
|
||||||
let kernel = tx.build_kernel(excess);
|
// and the tx is signed using k1
|
||||||
kernels.push(kernel);
|
// the kernel excess is k1G
|
||||||
|
// we will sum all the offsets later and store the total offset
|
||||||
|
// on the block_header
|
||||||
|
tx.validate()?;
|
||||||
|
|
||||||
for input in tx.inputs.clone() {
|
// we will summ these later to give a single aggregate offset
|
||||||
inputs.push(input);
|
kernel_offsets.push(tx.offset);
|
||||||
|
|
||||||
|
// add all tx inputs/outputs/kernels to the block
|
||||||
|
kernels.extend(tx.kernels.iter().cloned());
|
||||||
|
inputs.extend(tx.inputs.iter().cloned());
|
||||||
|
outputs.extend(tx.outputs.iter().cloned());
|
||||||
}
|
}
|
||||||
|
|
||||||
for output in tx.outputs.clone() {
|
// include the reward kernel and output
|
||||||
outputs.push(output);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// also include the reward kernel and output
|
|
||||||
kernels.push(reward_kern);
|
kernels.push(reward_kern);
|
||||||
outputs.push(reward_out);
|
outputs.push(reward_out);
|
||||||
|
|
||||||
|
@ -514,7 +528,28 @@ impl Block {
|
||||||
outputs.sort();
|
outputs.sort();
|
||||||
kernels.sort();
|
kernels.sort();
|
||||||
|
|
||||||
// calculate the overall Merkle tree and fees (todo?)
|
// now sum the kernel_offsets up to give us
|
||||||
|
// an aggregate offset for the entire block
|
||||||
|
let kernel_offset = {
|
||||||
|
let secp = static_secp_instance();
|
||||||
|
let secp = secp.lock().unwrap();
|
||||||
|
let keys = kernel_offsets
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.filter(|x| *x != BlindingFactor::zero())
|
||||||
|
.filter_map(|x| {
|
||||||
|
x.secret_key(&secp).ok()
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if keys.is_empty() {
|
||||||
|
BlindingFactor::zero()
|
||||||
|
} else {
|
||||||
|
let sum = secp.blind_sum(keys, vec![])?;
|
||||||
|
|
||||||
|
BlindingFactor::from_secret_key(sum)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Ok(
|
Ok(
|
||||||
Block {
|
Block {
|
||||||
header: BlockHeader {
|
header: BlockHeader {
|
||||||
|
@ -526,6 +561,7 @@ impl Block {
|
||||||
previous: prev.hash(),
|
previous: prev.hash(),
|
||||||
total_difficulty: difficulty +
|
total_difficulty: difficulty +
|
||||||
prev.total_difficulty.clone(),
|
prev.total_difficulty.clone(),
|
||||||
|
kernel_offset: kernel_offset,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
inputs: inputs,
|
inputs: inputs,
|
||||||
|
@ -641,22 +677,34 @@ impl Block {
|
||||||
let io_sum = self.sum_commitments()?;
|
let io_sum = self.sum_commitments()?;
|
||||||
|
|
||||||
// sum all kernels commitments
|
// sum all kernels commitments
|
||||||
let proof_commits = map_vec!(self.kernels, |proof| proof.excess);
|
let kernel_sum = {
|
||||||
|
let mut kernel_commits = self.kernels
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.excess)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let proof_sum = {
|
|
||||||
let secp = static_secp_instance();
|
let secp = static_secp_instance();
|
||||||
let secp = secp.lock().unwrap();
|
let secp = secp.lock().unwrap();
|
||||||
secp.commit_sum(proof_commits, vec![])?
|
|
||||||
|
// add the kernel_offset in as necessary (unless offset is zero)
|
||||||
|
if self.header.kernel_offset != BlindingFactor::zero() {
|
||||||
|
let skey = self.header.kernel_offset.secret_key(&secp)?;
|
||||||
|
let offset_commit = secp.commit(0, skey)?;
|
||||||
|
kernel_commits.push(offset_commit);
|
||||||
|
}
|
||||||
|
|
||||||
|
secp.commit_sum(kernel_commits, vec![])?
|
||||||
};
|
};
|
||||||
|
|
||||||
// both should be the same
|
// sum of kernel commitments (including kernel_offset) must match
|
||||||
if proof_sum != io_sum {
|
// the sum of input/output commitments (minus fee)
|
||||||
|
if kernel_sum != io_sum {
|
||||||
return Err(Error::KernelSumMismatch);
|
return Err(Error::KernelSumMismatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
// verify all signatures with the commitment as pk
|
// verify all signatures with the commitment as pk
|
||||||
for proof in &self.kernels {
|
for kernel in &self.kernels {
|
||||||
proof.verify()?;
|
kernel.verify()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -839,8 +887,7 @@ mod test {
|
||||||
build::transaction(
|
build::transaction(
|
||||||
vec![input(v, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
vec![input(v, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap()
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Too slow for now #[test]
|
// Too slow for now #[test]
|
||||||
|
@ -863,7 +910,6 @@ mod test {
|
||||||
let now = Instant::now();
|
let now = Instant::now();
|
||||||
parts.append(&mut vec![input(500000, ZERO_HASH, pks.pop().unwrap()), with_fee(2)]);
|
parts.append(&mut vec![input(500000, ZERO_HASH, pks.pop().unwrap()), with_fee(2)]);
|
||||||
let mut tx = build::transaction(parts, &keychain)
|
let mut tx = build::transaction(parts, &keychain)
|
||||||
.map(|(tx, _)| tx)
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
println!("Build tx: {}", now.elapsed().as_secs());
|
println!("Build tx: {}", now.elapsed().as_secs());
|
||||||
|
|
||||||
|
@ -898,7 +944,7 @@ mod test {
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
let mut btx1 = tx2i1o();
|
let mut btx1 = tx2i1o();
|
||||||
let (mut btx2, _) = build::transaction(
|
let mut btx2 = build::transaction(
|
||||||
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
|
vec![input(7, ZERO_HASH, key_id1), output(5, key_id2.clone()), with_fee(2)],
|
||||||
&keychain,
|
&keychain,
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
@ -1010,7 +1056,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
5_676
|
5_708,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1023,7 +1069,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
16_224
|
16_256,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1035,7 +1081,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
5_676
|
5_708,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1048,7 +1094,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
5_682
|
5_714,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1070,7 +1116,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b).expect("serialization failed");
|
ser::serialize(&mut vec, &b).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
111_156
|
111_188,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1092,7 +1138,7 @@ mod test {
|
||||||
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec.len(),
|
vec.len(),
|
||||||
5_736
|
5_768,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
|
|
||||||
use util::{secp, kernel_sig_msg};
|
use util::{secp, kernel_sig_msg};
|
||||||
|
|
||||||
use core::{Transaction, Input, Output, OutputFeatures, SwitchCommitHash};
|
use core::{Transaction, TxKernel, Input, Output, OutputFeatures, SwitchCommitHash};
|
||||||
use core::hash::Hash;
|
use core::hash::Hash;
|
||||||
use keychain;
|
use keychain;
|
||||||
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
|
use keychain::{Keychain, BlindSum, BlindingFactor, Identifier};
|
||||||
|
@ -40,7 +40,7 @@ pub struct Context<'a> {
|
||||||
|
|
||||||
/// Function type returned by the transaction combinators. Transforms a
|
/// Function type returned by the transaction combinators. Transforms a
|
||||||
/// (Transaction, BlindSum) pair into another, provided some context.
|
/// (Transaction, BlindSum) pair into another, provided some context.
|
||||||
pub type Append = for<'a> Fn(&'a mut Context, (Transaction, BlindSum)) -> (Transaction, BlindSum);
|
pub type Append = for<'a> Fn(&'a mut Context, (Transaction, TxKernel, BlindSum)) -> (Transaction, TxKernel, BlindSum);
|
||||||
|
|
||||||
/// Adds an input with the provided value and blinding key to the transaction
|
/// Adds an input with the provided value and blinding key to the transaction
|
||||||
/// being built.
|
/// being built.
|
||||||
|
@ -50,14 +50,14 @@ fn build_input(
|
||||||
out_block: Option<Hash>,
|
out_block: Option<Hash>,
|
||||||
key_id: Identifier,
|
key_id: Identifier,
|
||||||
) -> Box<Append> {
|
) -> Box<Append> {
|
||||||
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
|
Box::new(move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
let commit = build.keychain.commit(value, &key_id).unwrap();
|
let commit = build.keychain.commit(value, &key_id).unwrap();
|
||||||
let input = Input::new(
|
let input = Input::new(
|
||||||
features,
|
features,
|
||||||
commit,
|
commit,
|
||||||
out_block,
|
out_block,
|
||||||
);
|
);
|
||||||
(tx.with_input(input), sum.sub_key_id(key_id.clone()))
|
(tx.with_input(input), kern, sum.sub_key_id(key_id.clone()))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ pub fn coinbase_input(
|
||||||
/// Adds an output with the provided value and key identifier from the
|
/// Adds an output with the provided value and key identifier from the
|
||||||
/// keychain.
|
/// keychain.
|
||||||
pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
|
pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
|
||||||
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) {
|
Box::new(move |build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
debug!(
|
debug!(
|
||||||
LOGGER,
|
LOGGER,
|
||||||
"Building an output: {}, {}",
|
"Building an output: {}, {}",
|
||||||
|
@ -125,6 +125,7 @@ pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
|
||||||
switch_commit_hash: switch_commit_hash,
|
switch_commit_hash: switch_commit_hash,
|
||||||
proof: rproof,
|
proof: rproof,
|
||||||
}),
|
}),
|
||||||
|
kern,
|
||||||
sum.add_key_id(key_id.clone()),
|
sum.add_key_id(key_id.clone()),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -132,31 +133,41 @@ pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
|
||||||
|
|
||||||
/// Sets the fee on the transaction being built.
|
/// Sets the fee on the transaction being built.
|
||||||
pub fn with_fee(fee: u64) -> Box<Append> {
|
pub fn with_fee(fee: u64) -> Box<Append> {
|
||||||
Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
|
Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
(tx.with_fee(fee), sum)
|
(tx, kern.with_fee(fee), sum)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the lock_height on the transaction being built.
|
/// Sets the lock_height on the transaction being built.
|
||||||
pub fn with_lock_height(lock_height: u64) -> Box<Append> {
|
pub fn with_lock_height(lock_height: u64) -> Box<Append> {
|
||||||
Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
|
Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
(tx.with_lock_height(lock_height), sum)
|
(tx, kern.with_lock_height(lock_height), sum)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets a known excess value on the transaction being built. Usually used in
|
/// Adds a known excess value on the transaction being built. Usually used in
|
||||||
/// combination with the initial_tx function when a new transaction is built
|
/// combination with the initial_tx function when a new transaction is built
|
||||||
/// by adding to a pre-existing one.
|
/// by adding to a pre-existing one.
|
||||||
pub fn with_excess(excess: BlindingFactor) -> Box<Append> {
|
pub fn with_excess(excess: BlindingFactor) -> Box<Append> {
|
||||||
Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) {
|
Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
(tx, sum.add_blinding_factor(excess.clone()))
|
(tx, kern, sum.add_blinding_factor(excess.clone()))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets a known tx "offset". Used in final step of tx construction.
|
||||||
|
pub fn with_offset(offset: BlindingFactor) -> Box<Append> {
|
||||||
|
Box::new(move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
|
(tx.with_offset(offset), kern, sum)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets an initial transaction to add to when building a new transaction.
|
/// Sets an initial transaction to add to when building a new transaction.
|
||||||
pub fn initial_tx(tx: Transaction) -> Box<Append> {
|
/// We currently only support building a tx with a single kernel with build::transaction()
|
||||||
Box::new(move |_build, (_, sum)| -> (Transaction, BlindSum) {
|
pub fn initial_tx(mut tx: Transaction) -> Box<Append> {
|
||||||
(tx.clone(), sum)
|
assert_eq!(tx.kernels.len(), 1);
|
||||||
|
let kern = tx.kernels.remove(0);
|
||||||
|
Box::new(move |_build, (_, _, sum)| -> (Transaction, TxKernel, BlindSum) {
|
||||||
|
(tx.clone(), kern.clone(), sum)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,21 +181,78 @@ pub fn initial_tx(tx: Transaction) -> Box<Append> {
|
||||||
/// let (tx2, _) = build::transaction(vec![initial_tx(tx1), with_excess(sum),
|
/// let (tx2, _) = build::transaction(vec![initial_tx(tx1), with_excess(sum),
|
||||||
/// output_rand(2)], keychain).unwrap();
|
/// output_rand(2)], keychain).unwrap();
|
||||||
///
|
///
|
||||||
pub fn transaction(
|
pub fn partial_transaction(
|
||||||
elems: Vec<Box<Append>>,
|
elems: Vec<Box<Append>>,
|
||||||
keychain: &keychain::Keychain,
|
keychain: &keychain::Keychain,
|
||||||
) -> Result<(Transaction, BlindingFactor), keychain::Error> {
|
) -> Result<(Transaction, BlindingFactor), keychain::Error> {
|
||||||
let mut ctx = Context { keychain };
|
let mut ctx = Context { keychain };
|
||||||
let (mut tx, sum) = elems.iter().fold(
|
let (mut tx, kern, sum) = elems.iter().fold(
|
||||||
(Transaction::empty(), BlindSum::new()),
|
(Transaction::empty(), TxKernel::empty(), BlindSum::new()),
|
||||||
|acc, elem| elem(&mut ctx, acc),
|
|acc, elem| elem(&mut ctx, acc),
|
||||||
);
|
);
|
||||||
let blind_sum = ctx.keychain.blind_sum(&sum)?;
|
let blind_sum = ctx.keychain.blind_sum(&sum)?;
|
||||||
let msg = secp::Message::from_slice(&kernel_sig_msg(tx.fee, tx.lock_height))?;
|
|
||||||
tx.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &blind_sum)?;
|
// we only support building a tx with a single kernel via build::transaction()
|
||||||
|
assert!(tx.kernels.is_empty());
|
||||||
|
tx.kernels.push(kern);
|
||||||
|
|
||||||
Ok((tx, blind_sum))
|
Ok((tx, blind_sum))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Builds a complete transaction.
|
||||||
|
pub fn transaction(
|
||||||
|
elems: Vec<Box<Append>>,
|
||||||
|
keychain: &keychain::Keychain,
|
||||||
|
) -> Result<Transaction, keychain::Error> {
|
||||||
|
let (mut tx, blind_sum) = partial_transaction(elems, keychain)?;
|
||||||
|
assert_eq!(tx.kernels.len(), 1);
|
||||||
|
|
||||||
|
let mut kern = tx.kernels.remove(0);
|
||||||
|
let msg = secp::Message::from_slice(&kernel_sig_msg(kern.fee, kern.lock_height))?;
|
||||||
|
|
||||||
|
let skey = blind_sum.secret_key(&keychain.secp())?;
|
||||||
|
kern.excess = keychain.secp().commit(0, skey)?;
|
||||||
|
kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &blind_sum)?;
|
||||||
|
|
||||||
|
tx.kernels.push(kern);
|
||||||
|
|
||||||
|
Ok(tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a complete transaction, splitting the key and
|
||||||
|
/// setting the excess, excess_sig and tx offset as necessary.
|
||||||
|
pub fn transaction_with_offset(
|
||||||
|
elems: Vec<Box<Append>>,
|
||||||
|
keychain: &keychain::Keychain,
|
||||||
|
) -> Result<Transaction, keychain::Error> {
|
||||||
|
let mut ctx = Context { keychain };
|
||||||
|
let (mut tx, mut kern, sum) = elems.iter().fold(
|
||||||
|
(Transaction::empty(), TxKernel::empty(), BlindSum::new()),
|
||||||
|
|acc, elem| elem(&mut ctx, acc),
|
||||||
|
);
|
||||||
|
let blind_sum = ctx.keychain.blind_sum(&sum)?;
|
||||||
|
|
||||||
|
let split = blind_sum.split(&keychain.secp())?;
|
||||||
|
let k1 = split.blind_1;
|
||||||
|
let k2 = split.blind_2;
|
||||||
|
|
||||||
|
let msg = secp::Message::from_slice(&kernel_sig_msg(kern.fee, kern.lock_height))?;
|
||||||
|
|
||||||
|
// generate kernel excess and excess_sig using the split key k1
|
||||||
|
let skey = k1.secret_key(&keychain.secp())?;
|
||||||
|
kern.excess = ctx.keychain.secp().commit(0, skey)?;
|
||||||
|
kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &k1)?;
|
||||||
|
|
||||||
|
// store the kernel offset (k2) on the tx itself
|
||||||
|
// commitments will sum correctly when including the offset
|
||||||
|
tx.offset = k2.clone();
|
||||||
|
|
||||||
|
assert!(tx.kernels.is_empty());
|
||||||
|
tx.kernels.push(kern);
|
||||||
|
|
||||||
|
Ok(tx)
|
||||||
|
}
|
||||||
|
|
||||||
// Just a simple test, most exhaustive tests in the core mod.rs.
|
// Just a simple test, most exhaustive tests in the core mod.rs.
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
@ -198,17 +266,37 @@ mod test {
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
let (tx, _) = transaction(
|
let tx = transaction(
|
||||||
vec![
|
vec![
|
||||||
input(10, ZERO_HASH, key_id1),
|
input(10, ZERO_HASH, key_id1),
|
||||||
input(11, ZERO_HASH, key_id2),
|
input(12, ZERO_HASH, key_id2),
|
||||||
output(20, key_id3),
|
output(20, key_id3),
|
||||||
with_fee(1),
|
with_fee(2),
|
||||||
],
|
],
|
||||||
&keychain,
|
&keychain,
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
tx.verify_sig().unwrap();
|
tx.validate().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn blind_simple_tx_with_offset() {
|
||||||
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
|
let tx = transaction_with_offset(
|
||||||
|
vec![
|
||||||
|
input(10, ZERO_HASH, key_id1),
|
||||||
|
input(12, ZERO_HASH, key_id2),
|
||||||
|
output(20, key_id3),
|
||||||
|
with_fee(2),
|
||||||
|
],
|
||||||
|
&keychain,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
tx.validate().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -217,11 +305,11 @@ mod test {
|
||||||
let key_id1 = keychain.derive_key_id(1).unwrap();
|
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
|
|
||||||
let (tx, _) = transaction(
|
let tx = transaction(
|
||||||
vec![input(6, ZERO_HASH, key_id1), output(2, key_id2), with_fee(4)],
|
vec![input(6, ZERO_HASH, key_id1), output(2, key_id2), with_fee(4)],
|
||||||
&keychain,
|
&keychain,
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
tx.verify_sig().unwrap();
|
tx.validate().unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -263,8 +263,10 @@ mod test {
|
||||||
let tx = tx2i1o();
|
let tx = tx2i1o();
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &tx).expect("serialization failed");
|
ser::serialize(&mut vec, &tx).expect("serialization failed");
|
||||||
println!("{}", vec.len());
|
assert_eq!(
|
||||||
assert!(vec.len() == 5364);
|
vec.len(),
|
||||||
|
5_438,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -273,7 +275,7 @@ mod test {
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
ser::serialize(&mut vec, &tx).expect("serialization failed");
|
ser::serialize(&mut vec, &tx).expect("serialization failed");
|
||||||
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
|
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
|
||||||
assert_eq!(dtx.fee, 2);
|
assert_eq!(dtx.fee(), 2);
|
||||||
assert_eq!(dtx.inputs.len(), 2);
|
assert_eq!(dtx.inputs.len(), 2);
|
||||||
assert_eq!(dtx.outputs.len(), 1);
|
assert_eq!(dtx.outputs.len(), 1);
|
||||||
assert_eq!(tx.hash(), dtx.hash());
|
assert_eq!(tx.hash(), dtx.hash());
|
||||||
|
@ -304,7 +306,7 @@ mod test {
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
// first build a valid tx with corresponding blinding factor
|
// first build a valid tx with corresponding blinding factor
|
||||||
let (tx, blind) = build::transaction(
|
let tx = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
input(10, ZERO_HASH, key_id1),
|
input(10, ZERO_HASH, key_id1),
|
||||||
output(5, key_id2),
|
output(5, key_id2),
|
||||||
|
@ -314,14 +316,51 @@ mod test {
|
||||||
&keychain,
|
&keychain,
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
// confirm the tx validates and that we can construct a valid tx_kernel from it
|
// check the tx is valid
|
||||||
let excess = tx.validate().unwrap();
|
tx.validate().unwrap();
|
||||||
let tx_kernel = tx.build_kernel(excess);
|
|
||||||
let _ = tx_kernel.verify().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(tx_kernel.features, KernelFeatures::DEFAULT_KERNEL);
|
// check the kernel is also itself valid
|
||||||
assert_eq!(tx_kernel.fee, tx.fee);
|
assert_eq!(tx.kernels.len(), 1);
|
||||||
assert_eq!(tx_kernel.excess, excess);
|
let kern = &tx.kernels[0];
|
||||||
|
kern.verify().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(kern.features, KernelFeatures::DEFAULT_KERNEL);
|
||||||
|
assert_eq!(kern.fee, tx.fee());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine two transactions into one big transaction (with multiple kernels)
|
||||||
|
// and check it still validates.
|
||||||
|
#[test]
|
||||||
|
fn transaction_cut_through() {
|
||||||
|
let tx1 = tx1i2o();
|
||||||
|
let tx2 = tx2i1o();
|
||||||
|
|
||||||
|
assert!(tx1.validate().is_ok());
|
||||||
|
assert!(tx2.validate().is_ok());
|
||||||
|
|
||||||
|
// now build a "cut_through" tx from tx1 and tx2
|
||||||
|
let mut tx3 = tx1.clone();
|
||||||
|
tx3.inputs.extend(tx2.inputs.iter().cloned());
|
||||||
|
tx3.outputs.extend(tx2.outputs.iter().cloned());
|
||||||
|
tx3.kernels.extend(tx2.kernels.iter().cloned());
|
||||||
|
|
||||||
|
// make sure everything is sorted
|
||||||
|
tx3.inputs.sort();
|
||||||
|
tx3.outputs.sort();
|
||||||
|
tx3.kernels.sort();
|
||||||
|
|
||||||
|
// finally sum the offsets up
|
||||||
|
// TODO - hide this in a convenience function somewhere
|
||||||
|
tx3.offset = {
|
||||||
|
let secp = static_secp_instance();
|
||||||
|
let secp = secp.lock().unwrap();
|
||||||
|
let skey1 = tx1.offset.secret_key(&secp).unwrap();
|
||||||
|
let skey2 = tx2.offset.secret_key(&secp).unwrap();
|
||||||
|
let skey3 = secp.blind_sum(vec![skey1, skey2], vec![]).unwrap();
|
||||||
|
BlindingFactor::from_secret_key(skey3)
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(tx3.validate().is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -331,7 +370,7 @@ mod test {
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
let (tx, _) = build::transaction(
|
let tx = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
input(75, ZERO_HASH, key_id1),
|
input(75, ZERO_HASH, key_id1),
|
||||||
output(42, key_id2),
|
output(42, key_id2),
|
||||||
|
@ -349,7 +388,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn blind_tx() {
|
fn blind_tx() {
|
||||||
let btx = tx2i1o();
|
let btx = tx2i1o();
|
||||||
btx.verify_sig().unwrap(); // unwrap will panic if invalid
|
assert!(btx.validate().is_ok());
|
||||||
|
|
||||||
// checks that the range proof on our blind output is sufficiently hiding
|
// checks that the range proof on our blind output is sufficiently hiding
|
||||||
let Output { proof, .. } = btx.outputs[0];
|
let Output { proof, .. } = btx.outputs[0];
|
||||||
|
@ -372,6 +411,57 @@ mod test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// #[test]
|
||||||
|
// fn tx_build_aggsig() {
|
||||||
|
// let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
// let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
|
// let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
|
// let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
// let key_id4 = keychain.derive_key_id(4).unwrap();
|
||||||
|
//
|
||||||
|
// let (tx_alice, blind_sum) = {
|
||||||
|
// // Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
|
||||||
|
// // become inputs in the new transaction
|
||||||
|
// let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
|
||||||
|
//
|
||||||
|
// // Alice builds her transaction, with change, which also produces the sum
|
||||||
|
// // of blinding factors before they're obscured.
|
||||||
|
// let (tx, sum) = build::partial_transaction(
|
||||||
|
// vec![in1, in2, output(1, key_id3),
|
||||||
|
// with_fee(2)],
|
||||||
|
// &keychain,
|
||||||
|
// ).unwrap();
|
||||||
|
//
|
||||||
|
// (tx, sum)
|
||||||
|
// };
|
||||||
|
//
|
||||||
|
// let blind = blind_sum.secret_key(&keychain.secp())?;
|
||||||
|
// keychain.aggsig_create_context(blind);
|
||||||
|
// let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys();
|
||||||
|
//
|
||||||
|
// let sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
// &pub_nonce,
|
||||||
|
// tx.fee(),
|
||||||
|
// tx.lock_height(),
|
||||||
|
// ).unwrap();
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// // From now on, Bob only has the obscured transaction and the sum of
|
||||||
|
// // blinding factors. He adds his output, finalizes the transaction so it's
|
||||||
|
// // ready for broadcast.
|
||||||
|
// let tx_final = build::transaction(
|
||||||
|
// vec![
|
||||||
|
// initial_tx(tx_alice),
|
||||||
|
// with_excess(blind_sum),
|
||||||
|
// output(4, key_id4),
|
||||||
|
// ],
|
||||||
|
// &keychain,
|
||||||
|
// ).unwrap();
|
||||||
|
//
|
||||||
|
// tx_final.validate().unwrap();
|
||||||
|
//
|
||||||
|
// }
|
||||||
|
|
||||||
/// Simulate the standard exchange between 2 parties when creating a basic
|
/// Simulate the standard exchange between 2 parties when creating a basic
|
||||||
/// 2 inputs, 2 outputs transaction.
|
/// 2 inputs, 2 outputs transaction.
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -382,27 +472,26 @@ mod test {
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
let key_id4 = keychain.derive_key_id(4).unwrap();
|
let key_id4 = keychain.derive_key_id(4).unwrap();
|
||||||
|
|
||||||
let tx_alice: Transaction;
|
let (tx_alice, blind_sum) = {
|
||||||
let blind_sum: BlindingFactor;
|
|
||||||
|
|
||||||
{
|
|
||||||
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
|
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
|
||||||
// become inputs in the new transaction
|
// become inputs in the new transaction
|
||||||
let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
|
let (in1, in2) = (input(4, ZERO_HASH, key_id1), input(3, ZERO_HASH, key_id2));
|
||||||
|
|
||||||
// Alice builds her transaction, with change, which also produces the sum
|
// Alice builds her transaction, with change, which also produces the sum
|
||||||
// of blinding factors before they're obscured.
|
// of blinding factors before they're obscured.
|
||||||
let (tx, sum) =
|
let (tx, sum) = build::partial_transaction(
|
||||||
build::transaction(vec![in1, in2, output(1, key_id3), with_fee(2)], &keychain)
|
vec![in1, in2, output(1, key_id3),
|
||||||
.unwrap();
|
with_fee(2)],
|
||||||
tx_alice = tx;
|
&keychain,
|
||||||
blind_sum = sum;
|
).unwrap();
|
||||||
}
|
|
||||||
|
(tx, sum)
|
||||||
|
};
|
||||||
|
|
||||||
// From now on, Bob only has the obscured transaction and the sum of
|
// From now on, Bob only has the obscured transaction and the sum of
|
||||||
// blinding factors. He adds his output, finalizes the transaction so it's
|
// blinding factors. He adds his output, finalizes the transaction so it's
|
||||||
// ready for broadcast.
|
// ready for broadcast.
|
||||||
let (tx_final, _) = build::transaction(
|
let tx_final = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
initial_tx(tx_alice),
|
initial_tx(tx_alice),
|
||||||
with_excess(blind_sum),
|
with_excess(blind_sum),
|
||||||
|
@ -435,7 +524,7 @@ mod test {
|
||||||
let key_id = keychain.derive_key_id(1).unwrap();
|
let key_id = keychain.derive_key_id(1).unwrap();
|
||||||
|
|
||||||
let mut tx1 = tx2i1o();
|
let mut tx1 = tx2i1o();
|
||||||
tx1.verify_sig().unwrap();
|
tx1.validate().unwrap();
|
||||||
|
|
||||||
let b = Block::new(
|
let b = Block::new(
|
||||||
&BlockHeader::default(),
|
&BlockHeader::default(),
|
||||||
|
@ -483,8 +572,7 @@ mod test {
|
||||||
with_lock_height(1),
|
with_lock_height(1),
|
||||||
],
|
],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let b = Block::new(
|
let b = Block::new(
|
||||||
&BlockHeader::default(),
|
&BlockHeader::default(),
|
||||||
|
@ -504,8 +592,7 @@ mod test {
|
||||||
with_lock_height(2),
|
with_lock_height(2),
|
||||||
],
|
],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let b = Block::new(
|
let b = Block::new(
|
||||||
&BlockHeader::default(),
|
&BlockHeader::default(),
|
||||||
|
@ -525,13 +612,13 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_verify_1i1o_sig() {
|
pub fn test_verify_1i1o_sig() {
|
||||||
let tx = tx1i1o();
|
let tx = tx1i1o();
|
||||||
tx.verify_sig().unwrap();
|
tx.validate().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_verify_2i1o_sig() {
|
pub fn test_verify_2i1o_sig() {
|
||||||
let tx = tx2i1o();
|
let tx = tx2i1o();
|
||||||
tx.verify_sig().unwrap();
|
tx.validate().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// utility producing a transaction with 2 inputs and a single outputs
|
// utility producing a transaction with 2 inputs and a single outputs
|
||||||
|
@ -541,7 +628,7 @@ mod test {
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
build::transaction(
|
build::transaction_with_offset(
|
||||||
vec![
|
vec![
|
||||||
input(10, ZERO_HASH, key_id1),
|
input(10, ZERO_HASH, key_id1),
|
||||||
input(11, ZERO_HASH, key_id2),
|
input(11, ZERO_HASH, key_id2),
|
||||||
|
@ -549,8 +636,7 @@ mod test {
|
||||||
with_fee(2),
|
with_fee(2),
|
||||||
],
|
],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap()
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// utility producing a transaction with a single input and output
|
// utility producing a transaction with a single input and output
|
||||||
|
@ -559,22 +645,22 @@ mod test {
|
||||||
let key_id1 = keychain.derive_key_id(1).unwrap();
|
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
|
|
||||||
build::transaction(
|
build::transaction_with_offset(
|
||||||
vec![input(5, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
vec![input(5, ZERO_HASH, key_id1), output(3, key_id2), with_fee(2)],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap()
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// utility producing a transaction with a single input
|
// utility producing a transaction with a single input
|
||||||
// and two outputs (one change output)
|
// and two outputs (one change output)
|
||||||
|
// Note: this tx has an "offset" kernel
|
||||||
pub fn tx1i2o() -> Transaction {
|
pub fn tx1i2o() -> Transaction {
|
||||||
let keychain = keychain::Keychain::from_random_seed().unwrap();
|
let keychain = keychain::Keychain::from_random_seed().unwrap();
|
||||||
let key_id1 = keychain.derive_key_id(1).unwrap();
|
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
let key_id2 = keychain.derive_key_id(2).unwrap();
|
let key_id2 = keychain.derive_key_id(2).unwrap();
|
||||||
let key_id3 = keychain.derive_key_id(3).unwrap();
|
let key_id3 = keychain.derive_key_id(3).unwrap();
|
||||||
|
|
||||||
build::transaction(
|
build::transaction_with_offset(
|
||||||
vec![
|
vec![
|
||||||
input(6, ZERO_HASH, key_id1),
|
input(6, ZERO_HASH, key_id1),
|
||||||
output(3, key_id2),
|
output(3, key_id2),
|
||||||
|
@ -582,7 +668,6 @@ mod test {
|
||||||
with_fee(2),
|
with_fee(2),
|
||||||
],
|
],
|
||||||
&keychain,
|
&keychain,
|
||||||
).map(|(tx, _)| tx)
|
).unwrap()
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use blake2::blake2b::blake2b;
|
||||||
use util::secp::{self, Message, Signature};
|
use util::secp::{self, Message, Signature};
|
||||||
use util::{static_secp_instance, kernel_sig_msg};
|
use util::{static_secp_instance, kernel_sig_msg};
|
||||||
use util::secp::pedersen::{Commitment, RangeProof};
|
use util::secp::pedersen::{Commitment, RangeProof};
|
||||||
use std::cmp::min;
|
use std::cmp::{min, max};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::ops;
|
use std::ops;
|
||||||
|
|
||||||
|
@ -26,7 +26,8 @@ use consensus::VerifySortOrder;
|
||||||
use core::Committed;
|
use core::Committed;
|
||||||
use core::hash::{Hash, Hashed, ZERO_HASH};
|
use core::hash::{Hash, Hashed, ZERO_HASH};
|
||||||
use core::pmmr::Summable;
|
use core::pmmr::Summable;
|
||||||
use keychain::{Identifier, Keychain};
|
use keychain;
|
||||||
|
use keychain::{Identifier, Keychain, BlindingFactor};
|
||||||
use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSorted, Writer};
|
use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSorted, Writer};
|
||||||
use util;
|
use util;
|
||||||
|
|
||||||
|
@ -74,8 +75,15 @@ macro_rules! hashable_ord {
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// Transaction fee can't be odd, due to half fee burning
|
/// Transaction fee can't be odd, due to half fee burning
|
||||||
OddFee,
|
OddFee,
|
||||||
|
/// Kernel fee can't be odd, due to half fee burning
|
||||||
|
OddKernelFee,
|
||||||
/// Underlying Secp256k1 error (signature validation or invalid public key typically)
|
/// Underlying Secp256k1 error (signature validation or invalid public key typically)
|
||||||
Secp(secp::Error),
|
Secp(secp::Error),
|
||||||
|
/// Underlying keychain related error
|
||||||
|
Keychain(keychain::Error),
|
||||||
|
/// The sum of output minus input commitments does not
|
||||||
|
/// match the sum of kernel commitments
|
||||||
|
KernelSumMismatch,
|
||||||
/// Restrict number of incoming inputs
|
/// Restrict number of incoming inputs
|
||||||
TooManyInputs,
|
TooManyInputs,
|
||||||
/// Underlying consensus error (currently for sort order)
|
/// Underlying consensus error (currently for sort order)
|
||||||
|
@ -98,6 +106,13 @@ impl From<consensus::Error> for Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<keychain::Error> for Error {
|
||||||
|
fn from(e: keychain::Error) -> Error {
|
||||||
|
Error::Keychain(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A proof that a transaction sums to zero. Includes both the transaction's
|
/// A proof that a transaction sums to zero. Includes both the transaction's
|
||||||
/// Pedersen commitment and the signature, that guarantees that the commitments
|
/// Pedersen commitment and the signature, that guarantees that the commitments
|
||||||
/// amount to zero.
|
/// amount to zero.
|
||||||
|
@ -168,6 +183,30 @@ impl TxKernel {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build an empty tx kernel with zero values.
|
||||||
|
pub fn empty() -> TxKernel {
|
||||||
|
TxKernel {
|
||||||
|
features: KernelFeatures::DEFAULT_KERNEL,
|
||||||
|
fee: 0,
|
||||||
|
lock_height: 0,
|
||||||
|
excess: Commitment::from_vec(vec![0; 33]),
|
||||||
|
excess_sig: Signature::from_raw_data(&[0; 64]).unwrap(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a new tx kernel with the provided fee.
|
||||||
|
pub fn with_fee(self, fee: u64) -> TxKernel {
|
||||||
|
TxKernel { fee: fee, ..self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a new tx kernel with the provided lock_height.
|
||||||
|
pub fn with_lock_height(self, lock_height: u64) -> TxKernel {
|
||||||
|
TxKernel {
|
||||||
|
lock_height: lock_height,
|
||||||
|
..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Size in bytes of a kernel, necessary for binary storage
|
/// Size in bytes of a kernel, necessary for binary storage
|
||||||
pub fn size() -> usize {
|
pub fn size() -> usize {
|
||||||
17 + // features plus fee and lock_height
|
17 + // features plus fee and lock_height
|
||||||
|
@ -179,41 +218,37 @@ impl TxKernel {
|
||||||
/// A transaction
|
/// A transaction
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Transaction {
|
pub struct Transaction {
|
||||||
/// Set of inputs spent by the transaction.
|
/// List of inputs spent by the transaction.
|
||||||
pub inputs: Vec<Input>,
|
pub inputs: Vec<Input>,
|
||||||
/// Set of outputs the transaction produces.
|
/// List of outputs the transaction produces.
|
||||||
pub outputs: Vec<Output>,
|
pub outputs: Vec<Output>,
|
||||||
/// Fee paid by the transaction.
|
/// List of kernels that make up this transaction (usually a single kernel).
|
||||||
pub fee: u64,
|
pub kernels: Vec<TxKernel>,
|
||||||
/// Transaction is not valid before this chain height.
|
/// The kernel "offset" k2
|
||||||
pub lock_height: u64,
|
/// excess is k1G after splitting the key k = k1 + k2
|
||||||
/// The signature proving the excess is a valid public key, which signs
|
pub offset: BlindingFactor,
|
||||||
/// the transaction fee.
|
|
||||||
pub excess_sig: Signature,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implementation of Writeable for a fully blinded transaction, defines how to
|
/// Implementation of Writeable for a fully blinded transaction, defines how to
|
||||||
/// write the transaction as binary.
|
/// write the transaction as binary.
|
||||||
impl Writeable for Transaction {
|
impl Writeable for Transaction {
|
||||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
|
||||||
ser_multiwrite!(
|
self.offset.write(writer)?;
|
||||||
writer,
|
|
||||||
[write_u64, self.fee],
|
|
||||||
[write_u64, self.lock_height]
|
|
||||||
);
|
|
||||||
self.excess_sig.write(writer)?;
|
|
||||||
ser_multiwrite!(
|
ser_multiwrite!(
|
||||||
writer,
|
writer,
|
||||||
[write_u64, self.inputs.len() as u64],
|
[write_u64, self.inputs.len() as u64],
|
||||||
[write_u64, self.outputs.len() as u64]
|
[write_u64, self.outputs.len() as u64],
|
||||||
|
[write_u64, self.kernels.len() as u64]
|
||||||
);
|
);
|
||||||
|
|
||||||
// Consensus rule that everything is sorted in lexicographical order on the wire.
|
// Consensus rule that everything is sorted in lexicographical order on the wire.
|
||||||
let mut inputs = self.inputs.clone();
|
let mut inputs = self.inputs.clone();
|
||||||
let mut outputs = self.outputs.clone();
|
let mut outputs = self.outputs.clone();
|
||||||
|
let mut kernels = self.kernels.clone();
|
||||||
|
|
||||||
try!(inputs.write_sorted(writer));
|
try!(inputs.write_sorted(writer));
|
||||||
try!(outputs.write_sorted(writer));
|
try!(outputs.write_sorted(writer));
|
||||||
|
try!(kernels.write_sorted(writer));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -223,23 +258,20 @@ impl Writeable for Transaction {
|
||||||
/// transaction from a binary stream.
|
/// transaction from a binary stream.
|
||||||
impl Readable for Transaction {
|
impl Readable for Transaction {
|
||||||
fn read(reader: &mut Reader) -> Result<Transaction, ser::Error> {
|
fn read(reader: &mut Reader) -> Result<Transaction, ser::Error> {
|
||||||
let (fee, lock_height) =
|
let offset = BlindingFactor::read(reader)?;
|
||||||
ser_multiread!(reader, read_u64, read_u64);
|
|
||||||
|
|
||||||
let excess_sig = Signature::read(reader)?;
|
let (input_len, output_len, kernel_len) =
|
||||||
|
ser_multiread!(reader, read_u64, read_u64, read_u64);
|
||||||
let (input_len, output_len) =
|
|
||||||
ser_multiread!(reader, read_u64, read_u64);
|
|
||||||
|
|
||||||
let inputs = read_and_verify_sorted(reader, input_len)?;
|
let inputs = read_and_verify_sorted(reader, input_len)?;
|
||||||
let outputs = read_and_verify_sorted(reader, output_len)?;
|
let outputs = read_and_verify_sorted(reader, output_len)?;
|
||||||
|
let kernels = read_and_verify_sorted(reader, kernel_len)?;
|
||||||
|
|
||||||
Ok(Transaction {
|
Ok(Transaction {
|
||||||
fee: fee,
|
offset,
|
||||||
lock_height: lock_height,
|
inputs,
|
||||||
excess_sig: excess_sig,
|
outputs,
|
||||||
inputs: inputs,
|
kernels,
|
||||||
outputs: outputs,
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -253,7 +285,7 @@ impl Committed for Transaction {
|
||||||
&self.outputs
|
&self.outputs
|
||||||
}
|
}
|
||||||
fn overage(&self) -> i64 {
|
fn overage(&self) -> i64 {
|
||||||
(self.fee as i64)
|
(self.fee() as i64)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,28 +299,34 @@ impl Transaction {
|
||||||
/// Creates a new empty transaction (no inputs or outputs, zero fee).
|
/// Creates a new empty transaction (no inputs or outputs, zero fee).
|
||||||
pub fn empty() -> Transaction {
|
pub fn empty() -> Transaction {
|
||||||
Transaction {
|
Transaction {
|
||||||
fee: 0,
|
offset: BlindingFactor::zero(),
|
||||||
lock_height: 0,
|
|
||||||
excess_sig: Signature::from_raw_data(&[0;64]).unwrap(),
|
|
||||||
inputs: vec![],
|
inputs: vec![],
|
||||||
outputs: vec![],
|
outputs: vec![],
|
||||||
|
kernels: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new transaction initialized with
|
/// Creates a new transaction initialized with
|
||||||
/// the provided inputs, outputs, fee and lock_height.
|
/// the provided inputs, outputs, kernels
|
||||||
pub fn new(
|
pub fn new(
|
||||||
inputs: Vec<Input>,
|
inputs: Vec<Input>,
|
||||||
outputs: Vec<Output>,
|
outputs: Vec<Output>,
|
||||||
fee: u64,
|
kernels: Vec<TxKernel>,
|
||||||
lock_height: u64,
|
|
||||||
) -> Transaction {
|
) -> Transaction {
|
||||||
Transaction {
|
Transaction {
|
||||||
fee: fee,
|
offset: BlindingFactor::zero(),
|
||||||
lock_height: lock_height,
|
|
||||||
excess_sig: Signature::from_raw_data(&[0;64]).unwrap(),
|
|
||||||
inputs: inputs,
|
inputs: inputs,
|
||||||
outputs: outputs,
|
outputs: outputs,
|
||||||
|
kernels: kernels,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new transaction using this transaction as a template
|
||||||
|
/// and with the specified offset.
|
||||||
|
pub fn with_offset(self, offset: BlindingFactor) -> Transaction {
|
||||||
|
Transaction {
|
||||||
|
offset: offset,
|
||||||
|
..self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -316,74 +354,92 @@ impl Transaction {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds a new transaction with the provided fee.
|
/// Total fee for a transaction is the sum of fees of all kernels.
|
||||||
pub fn with_fee(self, fee: u64) -> Transaction {
|
pub fn fee(&self) -> u64 {
|
||||||
Transaction { fee: fee, ..self }
|
self.kernels.iter().fold(0, |acc, ref x| acc + x.fee)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds a new transaction with the provided lock_height.
|
/// Lock height of a transaction is the max lock height of the kernels.
|
||||||
pub fn with_lock_height(self, lock_height: u64) -> Transaction {
|
pub fn lock_height(&self) -> u64 {
|
||||||
Transaction {
|
self.kernels.iter().fold(0, |acc, ref x| max(acc, x.lock_height))
|
||||||
lock_height: lock_height,
|
}
|
||||||
..self
|
|
||||||
|
/// To verify transaction kernels we check that -
|
||||||
|
/// * all kernels have an even fee
|
||||||
|
/// * sum of input/output commitments matches sum of kernel commitments after applying offset
|
||||||
|
/// * each kernel sig is valid (i.e. tx commitments sum to zero, given above is true)
|
||||||
|
fn verify_kernels(&self) -> Result<(), Error> {
|
||||||
|
// check that each individual kernel fee is even
|
||||||
|
// TODO - is this strictly necessary given that we check overall tx fee?
|
||||||
|
// TODO - move this into verify_fee() check or maybe kernel.verify()?
|
||||||
|
for k in &self.kernels {
|
||||||
|
if k.fee & 1 != 0 {
|
||||||
|
return Err(Error::OddKernelFee);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The verification for a MimbleWimble transaction involves getting the
|
// sum all input and output commitments
|
||||||
/// excess of summing all commitments and using it as a public key
|
let io_sum = self.sum_commitments()?;
|
||||||
/// to verify the embedded signature. The rational is that if the values
|
|
||||||
/// sum to zero as they should in r.G + v.H then only k.G the excess
|
|
||||||
/// of the sum of r.G should be left. And r.G is the definition of a
|
|
||||||
/// public key generated using r as a private key.
|
|
||||||
pub fn verify_sig(&self) -> Result<Commitment, secp::Error> {
|
|
||||||
let rsum = self.sum_commitments()?;
|
|
||||||
|
|
||||||
let msg = Message::from_slice(&kernel_sig_msg(self.fee, self.lock_height))?;
|
// sum all kernels commitments
|
||||||
|
let kernel_sum = {
|
||||||
|
let mut kernel_commits = self.kernels
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.excess)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let secp = static_secp_instance();
|
let secp = static_secp_instance();
|
||||||
let secp = secp.lock().unwrap();
|
let secp = secp.lock().unwrap();
|
||||||
let sig = self.excess_sig;
|
|
||||||
// pretend the sum is a public key (which it is, being of the form r.G) and
|
// add the offset in as necessary (unless offset is zero)
|
||||||
// verify the transaction sig with it
|
if self.offset != BlindingFactor::zero() {
|
||||||
let valid = Keychain::aggsig_verify_single_from_commit(&secp, &sig, &msg, &rsum);
|
let skey = self.offset.secret_key(&secp)?;
|
||||||
if !valid {
|
let offset_commit = secp.commit(0, skey)?;
|
||||||
return Err(secp::Error::IncorrectSignature);
|
kernel_commits.push(offset_commit);
|
||||||
}
|
|
||||||
Ok(rsum)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds a transaction kernel
|
secp.commit_sum(kernel_commits, vec![])?
|
||||||
pub fn build_kernel(&self, excess: Commitment) -> TxKernel {
|
};
|
||||||
TxKernel {
|
|
||||||
features: KernelFeatures::DEFAULT_KERNEL,
|
// sum of kernel commitments (including the offset) must match
|
||||||
excess: excess,
|
// the sum of input/output commitments (minus fee)
|
||||||
excess_sig: self.excess_sig.clone(),
|
if kernel_sum != io_sum {
|
||||||
fee: self.fee,
|
return Err(Error::KernelSumMismatch);
|
||||||
lock_height: self.lock_height,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// verify all signatures with the commitment as pk
|
||||||
|
for kernel in &self.kernels {
|
||||||
|
kernel.verify()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validates all relevant parts of a fully built transaction. Checks the
|
/// Validates all relevant parts of a fully built transaction. Checks the
|
||||||
/// excess value against the signature as well as range proofs for each
|
/// excess value against the signature as well as range proofs for each
|
||||||
/// output.
|
/// output.
|
||||||
pub fn validate(&self) -> Result<Commitment, Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.fee & 1 != 0 {
|
if self.fee() & 1 != 0 {
|
||||||
return Err(Error::OddFee);
|
return Err(Error::OddFee);
|
||||||
}
|
}
|
||||||
if self.inputs.len() > consensus::MAX_BLOCK_INPUTS {
|
if self.inputs.len() > consensus::MAX_BLOCK_INPUTS {
|
||||||
return Err(Error::TooManyInputs);
|
return Err(Error::TooManyInputs);
|
||||||
}
|
}
|
||||||
self.verify_sorted()?;
|
self.verify_sorted()?;
|
||||||
|
|
||||||
for out in &self.outputs {
|
for out in &self.outputs {
|
||||||
out.verify_proof()?;
|
out.verify_proof()?;
|
||||||
}
|
}
|
||||||
let excess = self.verify_sig()?;
|
|
||||||
Ok(excess)
|
self.verify_kernels()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_sorted(&self) -> Result<(), Error> {
|
fn verify_sorted(&self) -> Result<(), Error> {
|
||||||
self.inputs.verify_sort_order()?;
|
self.inputs.verify_sort_order()?;
|
||||||
self.outputs.verify_sort_order()?;
|
self.outputs.verify_sort_order()?;
|
||||||
|
self.kernels.verify_sort_order()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
use std::{cmp, error, fmt};
|
use std::{cmp, error, fmt};
|
||||||
use std::io::{self, Read, Write};
|
use std::io::{self, Read, Write};
|
||||||
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
|
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
|
||||||
use keychain::{Identifier, IDENTIFIER_SIZE};
|
use keychain::{Identifier, BlindingFactor, IDENTIFIER_SIZE};
|
||||||
use consensus;
|
use consensus;
|
||||||
use consensus::VerifySortOrder;
|
use consensus::VerifySortOrder;
|
||||||
use core::hash::Hashed;
|
use core::hash::Hashed;
|
||||||
|
@ -30,7 +30,12 @@ use core::transaction::{SWITCH_COMMIT_HASH_SIZE, SwitchCommitHash};
|
||||||
use util::secp::pedersen::Commitment;
|
use util::secp::pedersen::Commitment;
|
||||||
use util::secp::pedersen::RangeProof;
|
use util::secp::pedersen::RangeProof;
|
||||||
use util::secp::Signature;
|
use util::secp::Signature;
|
||||||
use util::secp::constants::{MAX_PROOF_SIZE, PEDERSEN_COMMITMENT_SIZE, AGG_SIGNATURE_SIZE};
|
use util::secp::constants::{
|
||||||
|
MAX_PROOF_SIZE,
|
||||||
|
PEDERSEN_COMMITMENT_SIZE,
|
||||||
|
AGG_SIGNATURE_SIZE,
|
||||||
|
SECRET_KEY_SIZE,
|
||||||
|
};
|
||||||
|
|
||||||
/// Possible errors deriving from serializing or deserializing.
|
/// Possible errors deriving from serializing or deserializing.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -325,6 +330,19 @@ impl Writeable for Commitment {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Writeable for BlindingFactor {
|
||||||
|
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
|
||||||
|
writer.write_fixed_bytes(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Readable for BlindingFactor {
|
||||||
|
fn read(reader: &mut Reader) -> Result<BlindingFactor, Error> {
|
||||||
|
let bytes = reader.read_fixed_bytes(SECRET_KEY_SIZE)?;
|
||||||
|
Ok(BlindingFactor::from_slice(&bytes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Writeable for Identifier {
|
impl Writeable for Identifier {
|
||||||
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
|
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
|
||||||
writer.write_fixed_bytes(self)
|
writer.write_fixed_bytes(self)
|
||||||
|
@ -590,11 +608,14 @@ impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
|
||||||
return self.plen;
|
return self.plen;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl AsFixedBytes for ::util::secp::key::SecretKey {
|
// // TODO - is this (single byte) so we do not ever serialize a secret_key?
|
||||||
fn len(&self) -> usize {
|
// // Note: we *can* serialize a blinding_factor built from a secret_key
|
||||||
return 1;
|
// // but this needs to be done explicitly (tx kernel offset for example)
|
||||||
}
|
// impl AsFixedBytes for ::util::secp::key::SecretKey {
|
||||||
}
|
// fn len(&self) -> usize {
|
||||||
|
// return 1;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
impl AsFixedBytes for ::util::secp::Signature {
|
impl AsFixedBytes for ::util::secp::Signature {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return 64;
|
return 64;
|
||||||
|
@ -605,6 +626,11 @@ impl AsFixedBytes for ::util::secp::pedersen::Commitment {
|
||||||
return PEDERSEN_COMMITMENT_SIZE;
|
return PEDERSEN_COMMITMENT_SIZE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl AsFixedBytes for BlindingFactor {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
return SECRET_KEY_SIZE;
|
||||||
|
}
|
||||||
|
}
|
||||||
impl AsFixedBytes for SwitchCommitHash {
|
impl AsFixedBytes for SwitchCommitHash {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
return SWITCH_COMMIT_HASH_SIZE;
|
return SWITCH_COMMIT_HASH_SIZE;
|
||||||
|
|
|
@ -594,7 +594,7 @@ impl Miner {
|
||||||
let txs: Vec<&Transaction> = txs_box.iter().map(|tx| tx.as_ref()).collect();
|
let txs: Vec<&Transaction> = txs_box.iter().map(|tx| tx.as_ref()).collect();
|
||||||
|
|
||||||
// build the coinbase and the block itself
|
// build the coinbase and the block itself
|
||||||
let fees = txs.iter().map(|tx| tx.fee).sum();
|
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
||||||
let height = head.height + 1;
|
let height = head.height + 1;
|
||||||
let block_fees = BlockFees {
|
let block_fees = BlockFees {
|
||||||
fees,
|
fees,
|
||||||
|
|
|
@ -13,28 +13,90 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
/// Encapsulate a secret key for the blind_sum operation
|
/// Encapsulate a secret key for the blind_sum operation
|
||||||
|
|
||||||
|
use std::cmp::min;
|
||||||
|
use rand::thread_rng;
|
||||||
|
|
||||||
use util::secp::{self, Secp256k1};
|
|
||||||
use extkey::Identifier;
|
use extkey::Identifier;
|
||||||
use keychain::Error;
|
use keychain::Error;
|
||||||
|
use util;
|
||||||
|
use util::secp::{self, Secp256k1};
|
||||||
|
use util::secp::constants::SECRET_KEY_SIZE;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct BlindingFactor(secp::key::SecretKey);
|
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct BlindingFactor([u8; SECRET_KEY_SIZE]);
|
||||||
|
|
||||||
|
impl AsRef<[u8]> for BlindingFactor {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl BlindingFactor {
|
impl BlindingFactor {
|
||||||
pub fn new(secret_key: secp::key::SecretKey) -> BlindingFactor {
|
pub fn from_secret_key(skey: secp::key::SecretKey) -> BlindingFactor {
|
||||||
BlindingFactor(secret_key)
|
BlindingFactor::from_slice(&skey.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn secret_key(&self) -> secp::key::SecretKey {
|
pub fn from_slice(data: &[u8]) -> BlindingFactor {
|
||||||
self.0
|
let mut blind = [0; SECRET_KEY_SIZE];
|
||||||
|
for i in 0..min(SECRET_KEY_SIZE, data.len()) {
|
||||||
|
blind[i] = data[i];
|
||||||
|
}
|
||||||
|
BlindingFactor(blind)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_slice(secp: &Secp256k1, data: &[u8]) -> Result<BlindingFactor, Error> {
|
pub fn zero() -> BlindingFactor {
|
||||||
Ok(BlindingFactor(
|
BlindingFactor::from_secret_key(secp::key::ZERO_KEY)
|
||||||
secp::key::SecretKey::from_slice(&secp, data)?,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_hex(&self) -> String {
|
||||||
|
util::to_hex(self.0.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_hex(hex: &str) -> Result<BlindingFactor, Error> {
|
||||||
|
let bytes = util::from_hex(hex.to_string()).unwrap();
|
||||||
|
Ok(BlindingFactor::from_slice(&bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn secret_key(&self, secp: &Secp256k1) -> Result<secp::key::SecretKey, Error> {
|
||||||
|
if *self == BlindingFactor::zero() {
|
||||||
|
// TODO - need this currently for tx tests
|
||||||
|
// the "zero" secret key is not actually a valid secret_key
|
||||||
|
// and secp lib checks this
|
||||||
|
Ok(secp::key::ZERO_KEY)
|
||||||
|
} else {
|
||||||
|
secp::key::SecretKey::from_slice(secp, &self.0)
|
||||||
|
.map_err(|e| Error::Secp(e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a blinding_factor (aka secret_key) into a pair of blinding_factors.
|
||||||
|
/// We use one of these (k1) to sign the tx_kernel (k1G)
|
||||||
|
/// and the other gets aggregated in the block_header as the "offset".
|
||||||
|
/// This prevents an actor from being able to sum a set of inputs, outputs and kernels
|
||||||
|
/// from a block to identify and reconstruct a particular tx from a block.
|
||||||
|
/// You would need both k1, k2 to do this.
|
||||||
|
pub fn split(&self, secp: &Secp256k1) -> Result<SplitBlindingFactor, Error> {
|
||||||
|
let skey_1 = secp::key::SecretKey::new(secp, &mut thread_rng());
|
||||||
|
|
||||||
|
// use blind_sum to subtract skey_1 from our key (to give k = k1 + k2)
|
||||||
|
let skey = self.secret_key(secp)?;
|
||||||
|
let skey_2 = secp.blind_sum(vec![skey], vec![skey_1])?;
|
||||||
|
|
||||||
|
let blind_1 = BlindingFactor::from_secret_key(skey_1);
|
||||||
|
let blind_2 = BlindingFactor::from_secret_key(skey_2);
|
||||||
|
|
||||||
|
Ok(SplitBlindingFactor {
|
||||||
|
blind_1,
|
||||||
|
blind_2,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct SplitBlindingFactor {
|
||||||
|
pub blind_1: BlindingFactor,
|
||||||
|
pub blind_2: BlindingFactor,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Accumulator to compute the sum of blinding factors. Keeps track of each
|
/// Accumulator to compute the sum of blinding factors. Keeps track of each
|
||||||
|
@ -74,9 +136,46 @@ impl BlindSum {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Subtractss the provided key to the sum of blinding factors.
|
/// Subtracts the provided key to the sum of blinding factors.
|
||||||
pub fn sub_blinding_factor(mut self, blind: BlindingFactor) -> BlindSum {
|
pub fn sub_blinding_factor(mut self, blind: BlindingFactor) -> BlindSum {
|
||||||
self.negative_blinding_factors.push(blind);
|
self.negative_blinding_factors.push(blind);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use rand::thread_rng;
|
||||||
|
|
||||||
|
use blind::BlindingFactor;
|
||||||
|
use util::secp::Secp256k1;
|
||||||
|
use util::secp::key::{SecretKey, ZERO_KEY};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_blinding_factor() {
|
||||||
|
let secp = Secp256k1::new();
|
||||||
|
let skey_in = SecretKey::new(&secp, &mut thread_rng());
|
||||||
|
let blind = BlindingFactor::from_secret_key(skey_in);
|
||||||
|
let split = blind.split(&secp).unwrap();
|
||||||
|
|
||||||
|
// split a key, sum the split keys and confirm the sum matches the original key
|
||||||
|
let mut skey_sum = split.blind_1.secret_key(&secp).unwrap();
|
||||||
|
let skey_2 = split.blind_2.secret_key(&secp).unwrap();
|
||||||
|
let _ = skey_sum.add_assign(&secp, &skey_2).unwrap();
|
||||||
|
assert_eq!(skey_in, skey_sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check that we can add the zero key to a secret key and it is still
|
||||||
|
// the same key that we started with (k + 0 = k)
|
||||||
|
#[test]
|
||||||
|
fn zero_key_addition() {
|
||||||
|
let secp = Secp256k1::new();
|
||||||
|
let skey_in = SecretKey::new(&secp, &mut thread_rng());
|
||||||
|
let skey_zero = ZERO_KEY;
|
||||||
|
|
||||||
|
let mut skey_out = skey_in.clone();
|
||||||
|
let _ = skey_out.add_assign(&secp, &skey_zero).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(skey_in, skey_out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -258,17 +258,17 @@ impl Keychain {
|
||||||
pos_keys.extend(&blind_sum
|
pos_keys.extend(&blind_sum
|
||||||
.positive_blinding_factors
|
.positive_blinding_factors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| b.secret_key())
|
.filter_map(|b| b.secret_key(&self.secp).ok())
|
||||||
.collect::<Vec<SecretKey>>());
|
.collect::<Vec<SecretKey>>());
|
||||||
|
|
||||||
neg_keys.extend(&blind_sum
|
neg_keys.extend(&blind_sum
|
||||||
.negative_blinding_factors
|
.negative_blinding_factors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| b.secret_key())
|
.filter_map(|b| b.secret_key(&self.secp).ok())
|
||||||
.collect::<Vec<SecretKey>>());
|
.collect::<Vec<SecretKey>>());
|
||||||
|
|
||||||
let blinding = self.secp.blind_sum(pos_keys, neg_keys)?;
|
let sum = self.secp.blind_sum(pos_keys, neg_keys)?;
|
||||||
Ok(BlindingFactor::new(blinding))
|
Ok(BlindingFactor::from_secret_key(sum))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn aggsig_create_context(&self, transaction_id: &Uuid, sec_key:SecretKey)
|
pub fn aggsig_create_context(&self, transaction_id: &Uuid, sec_key:SecretKey)
|
||||||
|
@ -381,7 +381,8 @@ impl Keychain {
|
||||||
self.aggsig_verify_single(sig, &msg, Some(&nonce_sum), pubkey, true)
|
self.aggsig_verify_single(sig, &msg, Some(&nonce_sum), pubkey, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn aggsig_calculate_partial_sig(&self,
|
pub fn aggsig_calculate_partial_sig(
|
||||||
|
&self,
|
||||||
transaction_id: &Uuid,
|
transaction_id: &Uuid,
|
||||||
other_pub_nonce: &PublicKey,
|
other_pub_nonce: &PublicKey,
|
||||||
fee: u64,
|
fee: u64,
|
||||||
|
@ -396,8 +397,9 @@ impl Keychain {
|
||||||
self.aggsig_sign_single(transaction_id, &msg, Some(&sec_nonce), Some(&nonce_sum), Some(&nonce_sum))
|
self.aggsig_sign_single(transaction_id, &msg, Some(&sec_nonce), Some(&nonce_sum), Some(&nonce_sum))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to calculate final singature
|
/// Helper function to calculate final signature
|
||||||
pub fn aggsig_calculate_final_sig(&self,
|
pub fn aggsig_calculate_final_sig(
|
||||||
|
&self,
|
||||||
transaction_id: &Uuid,
|
transaction_id: &Uuid,
|
||||||
their_sig: &Signature,
|
their_sig: &Signature,
|
||||||
our_sig: &Signature,
|
our_sig: &Signature,
|
||||||
|
@ -459,7 +461,8 @@ impl Keychain {
|
||||||
msg: &Message,
|
msg: &Message,
|
||||||
blinding: &BlindingFactor,
|
blinding: &BlindingFactor,
|
||||||
) -> Result<Signature, Error> {
|
) -> Result<Signature, Error> {
|
||||||
let sig = aggsig::sign_single(secp, &msg, &blinding.secret_key(), None, None, None)?;
|
let skey = &blinding.secret_key(&secp)?;
|
||||||
|
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
|
||||||
Ok(sig)
|
Ok(sig)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -474,7 +477,8 @@ impl Keychain {
|
||||||
msg: &Message,
|
msg: &Message,
|
||||||
blinding: &BlindingFactor,
|
blinding: &BlindingFactor,
|
||||||
) -> Result<Signature, Error> {
|
) -> Result<Signature, Error> {
|
||||||
let sig = self.secp.sign(msg, &blinding.secret_key())?;
|
let skey = &blinding.secret_key(&self.secp)?;
|
||||||
|
let sig = self.secp.sign(msg, &skey)?;
|
||||||
Ok(sig)
|
Ok(sig)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -485,7 +489,12 @@ impl Keychain {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use rand::thread_rng;
|
||||||
|
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
use keychain::{BlindSum, BlindingFactor, Keychain};
|
use keychain::{BlindSum, BlindingFactor, Keychain};
|
||||||
|
use util::kernel_sig_msg;
|
||||||
use util::secp;
|
use util::secp;
|
||||||
use util::secp::pedersen::ProofMessage;
|
use util::secp::pedersen::ProofMessage;
|
||||||
use util::secp::key::SecretKey;
|
use util::secp::key::SecretKey;
|
||||||
|
@ -608,10 +617,374 @@ mod test {
|
||||||
// in the same way (convenience function)
|
// in the same way (convenience function)
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
keychain.blind_sum(&BlindSum::new()
|
keychain.blind_sum(&BlindSum::new()
|
||||||
.add_blinding_factor(BlindingFactor::new(skey1))
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
||||||
.add_blinding_factor(BlindingFactor::new(skey2))
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
|
||||||
).unwrap(),
|
).unwrap(),
|
||||||
BlindingFactor::new(skey3)
|
BlindingFactor::from_secret_key(skey3),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn aggsig_sender_receiver_interaction() {
|
||||||
|
let sender_keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
let receiver_keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
|
||||||
|
// tx identifier for wallet interaction
|
||||||
|
let tx_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
// Calculate the kernel excess here for convenience.
|
||||||
|
// Normally this would happen during transaction building.
|
||||||
|
let kernel_excess = {
|
||||||
|
let skey1 = sender_keychain.derived_key(
|
||||||
|
&sender_keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let skey2 = receiver_keychain.derived_key(
|
||||||
|
&receiver_keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
let blinding_factor = keychain.blind_sum(
|
||||||
|
&BlindSum::new()
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
||||||
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
keychain.secp.commit(
|
||||||
|
0,
|
||||||
|
blinding_factor.secret_key(&keychain.secp).unwrap(),
|
||||||
|
).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
// sender starts the tx interaction
|
||||||
|
let (sender_pub_excess, sender_pub_nonce) = {
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
|
||||||
|
let skey = keychain.derived_key(
|
||||||
|
&keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// dealing with an input here so we need to negate the blinding_factor
|
||||||
|
// rather than use it as is
|
||||||
|
let blinding_factor = keychain.blind_sum(
|
||||||
|
&BlindSum::new()
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey))
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
|
||||||
|
|
||||||
|
keychain.aggsig_create_context(&tx_id, blind);
|
||||||
|
keychain.aggsig_get_public_keys(&tx_id)
|
||||||
|
};
|
||||||
|
|
||||||
|
// receiver receives partial tx
|
||||||
|
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
let key_id = keychain.derive_key_id(1).unwrap();
|
||||||
|
|
||||||
|
// let blind = blind_sum.secret_key(&keychain.secp())?;
|
||||||
|
let blind = keychain.derived_key(&key_id).unwrap();
|
||||||
|
|
||||||
|
keychain.aggsig_create_context(&tx_id, blind);
|
||||||
|
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
|
||||||
|
keychain.aggsig_add_output(&tx_id, &key_id);
|
||||||
|
|
||||||
|
let sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap();
|
||||||
|
(pub_excess, pub_nonce, sig_part)
|
||||||
|
};
|
||||||
|
|
||||||
|
// check the sender can verify the partial signature
|
||||||
|
// received in the response back from the receiver
|
||||||
|
{
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sig_part,
|
||||||
|
&receiver_pub_nonce,
|
||||||
|
&receiver_pub_excess,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// now sender signs with their key
|
||||||
|
let sender_sig_part = {
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&receiver_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
// check the receiver can verify the partial signature
|
||||||
|
// received by the sender
|
||||||
|
{
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
&sender_pub_excess,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Receiver now builds final signature from sender and receiver parts
|
||||||
|
let (final_sig, final_pubkey) = {
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
|
||||||
|
// Receiver recreates their partial sig (we do not maintain state from earlier)
|
||||||
|
let our_sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Receiver now generates final signature from the two parts
|
||||||
|
let final_sig = keychain.aggsig_calculate_final_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&our_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Receiver calculates the final public key (to verify sig later)
|
||||||
|
let final_pubkey = keychain.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess).unwrap();
|
||||||
|
|
||||||
|
(final_sig, final_pubkey)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Receiver checks the final signature verifies
|
||||||
|
{
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
|
||||||
|
// Receiver check the final signature verifies
|
||||||
|
let sig_verifies = keychain.aggsig_verify_final_sig_build_msg(
|
||||||
|
&final_sig,
|
||||||
|
&final_pubkey,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check we can verify the sig using the kernel excess
|
||||||
|
{
|
||||||
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
|
||||||
|
let msg = secp::Message::from_slice(
|
||||||
|
&kernel_sig_msg(
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
|
||||||
|
&keychain.secp,
|
||||||
|
&final_sig,
|
||||||
|
&msg,
|
||||||
|
&kernel_excess,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn aggsig_sender_receiver_interaction_offset() {
|
||||||
|
let sender_keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
let receiver_keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
|
||||||
|
// tx identifier for wallet interaction
|
||||||
|
let tx_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
// This is the kernel offset that we use to split the key
|
||||||
|
// Summing these at the block level prevents the
|
||||||
|
// kernels from being used to reconstruct (or identify) individual transactions
|
||||||
|
let kernel_offset = SecretKey::new(&sender_keychain.secp(), &mut thread_rng());
|
||||||
|
|
||||||
|
// Calculate the kernel excess here for convenience.
|
||||||
|
// Normally this would happen during transaction building.
|
||||||
|
let kernel_excess = {
|
||||||
|
let skey1 = sender_keychain.derived_key(
|
||||||
|
&sender_keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let skey2 = receiver_keychain.derived_key(
|
||||||
|
&receiver_keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
let blinding_factor = keychain.blind_sum(
|
||||||
|
&BlindSum::new()
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
||||||
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
|
||||||
|
// subtract the kernel offset here like as would when
|
||||||
|
// verifying a kernel signature
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset))
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
keychain.secp.commit(
|
||||||
|
0,
|
||||||
|
blinding_factor.secret_key(&keychain.secp).unwrap(),
|
||||||
|
).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
// sender starts the tx interaction
|
||||||
|
let (sender_pub_excess, sender_pub_nonce) = {
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
|
||||||
|
let skey = keychain.derived_key(
|
||||||
|
&keychain.derive_key_id(1).unwrap(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// dealing with an input here so we need to negate the blinding_factor
|
||||||
|
// rather than use it as is
|
||||||
|
let blinding_factor = keychain.blind_sum(
|
||||||
|
&BlindSum::new()
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey))
|
||||||
|
// subtract the kernel offset to create an aggsig context
|
||||||
|
// with our "split" key
|
||||||
|
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset))
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
|
||||||
|
|
||||||
|
keychain.aggsig_create_context(&tx_id, blind);
|
||||||
|
keychain.aggsig_get_public_keys(&tx_id)
|
||||||
|
};
|
||||||
|
|
||||||
|
// receiver receives partial tx
|
||||||
|
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
let key_id = keychain.derive_key_id(1).unwrap();
|
||||||
|
|
||||||
|
let blind = keychain.derived_key(&key_id).unwrap();
|
||||||
|
|
||||||
|
keychain.aggsig_create_context(&tx_id, blind);
|
||||||
|
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
|
||||||
|
keychain.aggsig_add_output(&tx_id, &key_id);
|
||||||
|
|
||||||
|
let sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap();
|
||||||
|
(pub_excess, pub_nonce, sig_part)
|
||||||
|
};
|
||||||
|
|
||||||
|
// check the sender can verify the partial signature
|
||||||
|
// received in the response back from the receiver
|
||||||
|
{
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sig_part,
|
||||||
|
&receiver_pub_nonce,
|
||||||
|
&receiver_pub_excess,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// now sender signs with their key
|
||||||
|
let sender_sig_part = {
|
||||||
|
let keychain = sender_keychain.clone();
|
||||||
|
keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&receiver_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
// check the receiver can verify the partial signature
|
||||||
|
// received by the sender
|
||||||
|
{
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
&sender_pub_excess,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Receiver now builds final signature from sender and receiver parts
|
||||||
|
let (final_sig, final_pubkey) = {
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
|
||||||
|
// Receiver recreates their partial sig (we do not maintain state from earlier)
|
||||||
|
let our_sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Receiver now generates final signature from the two parts
|
||||||
|
let final_sig = keychain.aggsig_calculate_final_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&our_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Receiver calculates the final public key (to verify sig later)
|
||||||
|
let final_pubkey = keychain.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess).unwrap();
|
||||||
|
|
||||||
|
(final_sig, final_pubkey)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Receiver checks the final signature verifies
|
||||||
|
{
|
||||||
|
let keychain = receiver_keychain.clone();
|
||||||
|
|
||||||
|
// Receiver check the final signature verifies
|
||||||
|
let sig_verifies = keychain.aggsig_verify_final_sig_build_msg(
|
||||||
|
&final_sig,
|
||||||
|
&final_pubkey,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check we can verify the sig using the kernel excess
|
||||||
|
{
|
||||||
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
|
|
||||||
|
let msg = secp::Message::from_slice(
|
||||||
|
&kernel_sig_msg(
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
|
||||||
|
&keychain.secp,
|
||||||
|
&final_sig,
|
||||||
|
&msg,
|
||||||
|
&kernel_excess,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(sig_verifies);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -337,8 +337,16 @@ mod tests {
|
||||||
.range_proof(100, &key_id1, output_commit, msg)
|
.range_proof(100, &key_id1, output_commit, msg)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
};
|
};
|
||||||
let outputs = vec![output];
|
|
||||||
let test_transaction = core::transaction::Transaction::new(inputs, outputs, 5, 0);
|
let kernel = core::transaction::TxKernel::empty()
|
||||||
|
.with_fee(5)
|
||||||
|
.with_lock_height(0);
|
||||||
|
|
||||||
|
let test_transaction = core::transaction::Transaction::new(
|
||||||
|
inputs,
|
||||||
|
vec![output],
|
||||||
|
vec![kernel],
|
||||||
|
);
|
||||||
|
|
||||||
let test_pool_entry = PoolEntry::new(&test_transaction);
|
let test_pool_entry = PoolEntry::new(&test_transaction);
|
||||||
|
|
||||||
|
|
|
@ -160,9 +160,9 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
let head_header = self.blockchain.head_header()?;
|
let head_header = self.blockchain.head_header()?;
|
||||||
if head_header.height < tx.lock_height {
|
if head_header.height < tx.lock_height() {
|
||||||
return Err(PoolError::ImmatureTransaction {
|
return Err(PoolError::ImmatureTransaction {
|
||||||
lock_height: tx.lock_height,
|
lock_height: tx.lock_height(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -583,7 +583,7 @@ where
|
||||||
tx_weight = 1;
|
tx_weight = 1;
|
||||||
}
|
}
|
||||||
let threshold = (tx_weight as u64) * self.config.accept_fee_base;
|
let threshold = (tx_weight as u64) * self.config.accept_fee_base;
|
||||||
if tx.fee < threshold {
|
if tx.fee() < threshold {
|
||||||
return Err(PoolError::LowFeeTransaction(threshold));
|
return Err(PoolError::LowFeeTransaction(threshold));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1242,8 +1242,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
tx_elements.push(build::with_fee(fees as u64));
|
tx_elements.push(build::with_fee(fees as u64));
|
||||||
|
|
||||||
let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
|
build::transaction(tx_elements, &keychain).unwrap()
|
||||||
tx
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_transaction_with_coinbase_input(
|
fn test_transaction_with_coinbase_input(
|
||||||
|
@ -1272,8 +1271,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
tx_elements.push(build::with_fee(fees as u64));
|
tx_elements.push(build::with_fee(fees as u64));
|
||||||
|
|
||||||
let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
|
build::transaction(tx_elements, &keychain).unwrap()
|
||||||
tx
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Very un-dry way of building a vanilla tx and adding a lock_height to it.
|
/// Very un-dry way of building a vanilla tx and adding a lock_height to it.
|
||||||
|
@ -1303,8 +1301,7 @@ mod tests {
|
||||||
tx_elements.push(build::with_fee(fees as u64));
|
tx_elements.push(build::with_fee(fees as u64));
|
||||||
|
|
||||||
tx_elements.push(build::with_lock_height(lock_height));
|
tx_elements.push(build::with_lock_height(lock_height));
|
||||||
let (tx, _) = build::transaction(tx_elements, &keychain).unwrap();
|
build::transaction(tx_elements, &keychain).unwrap()
|
||||||
tx
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deterministically generate an output defined by our test scheme
|
/// Deterministically generate an output defined by our test scheme
|
||||||
|
|
|
@ -25,9 +25,9 @@ use uuid::Uuid;
|
||||||
|
|
||||||
use api;
|
use api;
|
||||||
use core::consensus::reward;
|
use core::consensus::reward;
|
||||||
use core::core::{build, Block, Output, Transaction, TxKernel, amount_to_hr_string};
|
use core::core::{build, Block, Committed, Output, Transaction, TxKernel, amount_to_hr_string};
|
||||||
use core::{global, ser};
|
use core::{global, ser};
|
||||||
use keychain::{Identifier, Keychain};
|
use keychain::{Identifier, Keychain, BlindingFactor};
|
||||||
use types::*;
|
use types::*;
|
||||||
use util::{LOGGER, to_hex, secp};
|
use util::{LOGGER, to_hex, secp};
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ fn handle_sender_initiation(
|
||||||
keychain: &Keychain,
|
keychain: &Keychain,
|
||||||
partial_tx: &PartialTx
|
partial_tx: &PartialTx
|
||||||
) -> Result<PartialTx, Error> {
|
) -> Result<PartialTx, Error> {
|
||||||
let (amount, _sender_pub_blinding, sender_pub_nonce, _sig, tx) = read_partial_tx(keychain, partial_tx)?;
|
let (amount, _sender_pub_blinding, sender_pub_nonce, kernel_offset, _sig, tx) = read_partial_tx(keychain, partial_tx)?;
|
||||||
|
|
||||||
let root_key_id = keychain.root_key_id();
|
let root_key_id = keychain.root_key_id();
|
||||||
|
|
||||||
|
@ -60,9 +60,9 @@ fn handle_sender_initiation(
|
||||||
// we don't necessarily want to just trust the sender
|
// we don't necessarily want to just trust the sender
|
||||||
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
|
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
|
||||||
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
|
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
|
||||||
if fee != tx.fee {
|
if fee != tx.fee() {
|
||||||
return Err(Error::FeeDispute {
|
return Err(Error::FeeDispute {
|
||||||
sender_fee: tx.fee,
|
sender_fee: tx.fee(),
|
||||||
recipient_fee: fee,
|
recipient_fee: fee,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ fn handle_sender_initiation(
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Still handy for getting the blinding sum
|
// Still handy for getting the blinding sum
|
||||||
let (_, blind_sum) = build::transaction(
|
let (_, blind_sum) = build::partial_transaction(
|
||||||
vec![
|
vec![
|
||||||
build::output(out_amount, key_id.clone()),
|
build::output(out_amount, key_id.clone()),
|
||||||
],
|
],
|
||||||
|
@ -114,16 +114,19 @@ fn handle_sender_initiation(
|
||||||
warn!(LOGGER, "Creating new aggsig context");
|
warn!(LOGGER, "Creating new aggsig context");
|
||||||
// Create a new aggsig context
|
// Create a new aggsig context
|
||||||
// this will create a new blinding sum and nonce, and store them
|
// this will create a new blinding sum and nonce, and store them
|
||||||
let result = keychain.aggsig_create_context(&partial_tx.id, blind_sum.secret_key());
|
let blind = blind_sum.secret_key(&keychain.secp())?;
|
||||||
if let Err(_) = result {
|
keychain.aggsig_create_context(&partial_tx.id, blind);
|
||||||
return Err(Error::DuplicateTransactionId);
|
|
||||||
}
|
|
||||||
keychain.aggsig_add_output(&partial_tx.id, &key_id);
|
keychain.aggsig_add_output(&partial_tx.id, &key_id);
|
||||||
|
|
||||||
let sig_part=keychain.aggsig_calculate_partial_sig(&partial_tx.id, &sender_pub_nonce, fee, tx.lock_height).unwrap();
|
let sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&partial_tx.id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
fee,
|
||||||
|
tx.lock_height(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
// Build the response, which should contain sR, blinding excess xR * G, public nonce kR * G
|
// Build the response, which should contain sR, blinding excess xR * G, public nonce kR * G
|
||||||
let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, Some(sig_part), tx);
|
let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, kernel_offset, Some(sig_part), tx);
|
||||||
partial_tx.phase = PartialTxPhase::ReceiverInitiation;
|
partial_tx.phase = PartialTxPhase::ReceiverInitiation;
|
||||||
|
|
||||||
Ok(partial_tx)
|
Ok(partial_tx)
|
||||||
|
@ -146,9 +149,15 @@ fn handle_sender_confirmation(
|
||||||
keychain: &Keychain,
|
keychain: &Keychain,
|
||||||
partial_tx: &PartialTx
|
partial_tx: &PartialTx
|
||||||
) -> Result<PartialTx, Error> {
|
) -> Result<PartialTx, Error> {
|
||||||
let (amount, sender_pub_blinding, sender_pub_nonce, sender_sig_part, tx) = read_partial_tx(keychain, partial_tx)?;
|
let (amount, sender_pub_blinding, sender_pub_nonce, kernel_offset, sender_sig_part, tx) = read_partial_tx(keychain, partial_tx)?;
|
||||||
let sender_sig_part = sender_sig_part.unwrap();
|
let sender_sig_part = sender_sig_part.unwrap();
|
||||||
let res = keychain.aggsig_verify_partial_sig(&partial_tx.id, &sender_sig_part, &sender_pub_nonce, &sender_pub_blinding, tx.fee, tx.lock_height);
|
let res = keychain.aggsig_verify_partial_sig(
|
||||||
|
&partial_tx.id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
&sender_pub_blinding,
|
||||||
|
tx.fee(), tx.lock_height(),
|
||||||
|
);
|
||||||
|
|
||||||
if !res {
|
if !res {
|
||||||
error!(LOGGER, "Partial Sig from sender invalid.");
|
error!(LOGGER, "Partial Sig from sender invalid.");
|
||||||
|
@ -156,23 +165,50 @@ fn handle_sender_confirmation(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Just calculate our sig part again instead of storing
|
// Just calculate our sig part again instead of storing
|
||||||
let our_sig_part=keychain.aggsig_calculate_partial_sig(&partial_tx.id, &sender_pub_nonce, tx.fee, tx.lock_height).unwrap();
|
let our_sig_part = keychain.aggsig_calculate_partial_sig(
|
||||||
|
&partial_tx.id,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
tx.fee(),
|
||||||
|
tx.lock_height(),
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
// And the final signature
|
// And the final signature
|
||||||
let final_sig=keychain.aggsig_calculate_final_sig(&partial_tx.id, &sender_sig_part, &our_sig_part, &sender_pub_nonce).unwrap();
|
let final_sig = keychain.aggsig_calculate_final_sig(
|
||||||
|
&partial_tx.id,
|
||||||
|
&sender_sig_part,
|
||||||
|
&our_sig_part,
|
||||||
|
&sender_pub_nonce,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
// Calculate the final public key (for our own sanity check)
|
// Calculate the final public key (for our own sanity check)
|
||||||
let final_pubkey=keychain.aggsig_calculate_final_pubkey(&partial_tx.id, &sender_pub_blinding).unwrap();
|
let final_pubkey = keychain.aggsig_calculate_final_pubkey(
|
||||||
|
&partial_tx.id,
|
||||||
|
&sender_pub_blinding,
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
// Check our final sig verifies
|
// Check our final sig verifies
|
||||||
let res = keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, tx.fee, tx.lock_height);
|
let res = keychain.aggsig_verify_final_sig_build_msg(
|
||||||
|
&final_sig,
|
||||||
|
&final_pubkey,
|
||||||
|
tx.fee(),
|
||||||
|
tx.lock_height(),
|
||||||
|
);
|
||||||
|
|
||||||
if !res {
|
if !res {
|
||||||
error!(LOGGER, "Final aggregated signature invalid.");
|
error!(LOGGER, "Final aggregated signature invalid.");
|
||||||
return Err(Error::Signature(String::from("Final aggregated signature invalid.")));
|
return Err(Error::Signature(String::from("Final aggregated signature invalid.")));
|
||||||
}
|
}
|
||||||
|
|
||||||
let final_tx = build_final_transaction(&partial_tx.id, config, keychain, amount, &final_sig, tx.clone())?;
|
let final_tx = build_final_transaction(
|
||||||
|
&partial_tx.id,
|
||||||
|
config,
|
||||||
|
keychain,
|
||||||
|
amount,
|
||||||
|
kernel_offset,
|
||||||
|
&final_sig,
|
||||||
|
tx.clone(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let tx_hex = to_hex(ser::ser_vec(&final_tx).unwrap());
|
let tx_hex = to_hex(ser::ser_vec(&final_tx).unwrap());
|
||||||
|
|
||||||
let url = format!("{}/v1/pool/push", config.check_node_api_http_addr.as_str());
|
let url = format!("{}/v1/pool/push", config.check_node_api_http_addr.as_str());
|
||||||
|
@ -180,7 +216,8 @@ fn handle_sender_confirmation(
|
||||||
.map_err(|e| Error::Node(e))?;
|
.map_err(|e| Error::Node(e))?;
|
||||||
|
|
||||||
// Return what we've actually posted
|
// Return what we've actually posted
|
||||||
let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, Some(final_sig), tx);
|
// TODO - why build_partial_tx here? Just a naming issue?
|
||||||
|
let mut partial_tx = build_partial_tx(&partial_tx.id, keychain, amount, kernel_offset, Some(final_sig), tx);
|
||||||
partial_tx.phase = PartialTxPhase::ReceiverConfirmation;
|
partial_tx.phase = PartialTxPhase::ReceiverConfirmation;
|
||||||
Ok(partial_tx)
|
Ok(partial_tx)
|
||||||
}
|
}
|
||||||
|
@ -317,19 +354,19 @@ fn build_final_transaction(
|
||||||
config: &WalletConfig,
|
config: &WalletConfig,
|
||||||
keychain: &Keychain,
|
keychain: &Keychain,
|
||||||
amount: u64,
|
amount: u64,
|
||||||
|
kernel_offset: BlindingFactor,
|
||||||
excess_sig: &secp::Signature,
|
excess_sig: &secp::Signature,
|
||||||
tx: Transaction,
|
tx: Transaction,
|
||||||
) -> Result<Transaction, Error> {
|
) -> Result<Transaction, Error> {
|
||||||
|
|
||||||
let root_key_id = keychain.root_key_id();
|
let root_key_id = keychain.root_key_id();
|
||||||
|
|
||||||
// double check the fee amount included in the partial tx
|
// double check the fee amount included in the partial tx
|
||||||
// we don't necessarily want to just trust the sender
|
// we don't necessarily want to just trust the sender
|
||||||
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
|
// we could just overwrite the fee here (but we won't) due to the ecdsa sig
|
||||||
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
|
let fee = tx_fee(tx.inputs.len(), tx.outputs.len() + 1, None);
|
||||||
if fee != tx.fee {
|
if fee != tx.fee() {
|
||||||
return Err(Error::FeeDispute {
|
return Err(Error::FeeDispute {
|
||||||
sender_fee: tx.fee,
|
sender_fee: tx.fee(),
|
||||||
recipient_fee: fee,
|
recipient_fee: fee,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -374,19 +411,35 @@ fn build_final_transaction(
|
||||||
|
|
||||||
// Build final transaction, the sum of which should
|
// Build final transaction, the sum of which should
|
||||||
// be the same as the exchanged excess values
|
// be the same as the exchanged excess values
|
||||||
let (mut final_tx, _) = build::transaction(
|
let mut final_tx = build::transaction(
|
||||||
vec![
|
vec![
|
||||||
build::initial_tx(tx),
|
build::initial_tx(tx),
|
||||||
build::output(out_amount, key_id.clone()),
|
build::output(out_amount, key_id.clone()),
|
||||||
|
build::with_offset(kernel_offset),
|
||||||
],
|
],
|
||||||
keychain,
|
keychain,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
final_tx.excess_sig = excess_sig.clone();
|
// build the final excess based on final tx and offset
|
||||||
|
let final_excess = {
|
||||||
|
// sum the input/output commitments on the final tx
|
||||||
|
let tx_excess = final_tx.sum_commitments()?;
|
||||||
|
|
||||||
// make sure the resulting transaction is valid (could have been lied to on
|
// subtract the kernel_excess (built from kernel_offset)
|
||||||
// excess).
|
let offset_excess = keychain.secp().commit(0, kernel_offset.secret_key(&keychain.secp()).unwrap()).unwrap();
|
||||||
let _ = final_tx.validate()?;
|
keychain.secp().commit_sum(vec![tx_excess], vec![offset_excess])?
|
||||||
|
};
|
||||||
|
|
||||||
|
// update the tx kernel to reflect the offset excess and sig
|
||||||
|
assert_eq!(final_tx.kernels.len(), 1);
|
||||||
|
final_tx.kernels[0].excess = final_excess.clone();
|
||||||
|
final_tx.kernels[0].excess_sig = excess_sig.clone();
|
||||||
|
|
||||||
|
// confirm the kernel verifies successfully before proceeding
|
||||||
|
final_tx.kernels[0].verify()?;
|
||||||
|
|
||||||
|
// confirm the overall transaction is valid (including the updated kernel)
|
||||||
|
final_tx.validate()?;
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
|
|
@ -12,16 +12,19 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use rand::thread_rng;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use api;
|
use api;
|
||||||
use client;
|
use client;
|
||||||
use checker;
|
use checker;
|
||||||
use core::core::{build, Transaction, amount_to_hr_string};
|
use core::core::{build, Transaction, amount_to_hr_string};
|
||||||
use core::ser;
|
use core::ser;
|
||||||
use keychain::{BlindingFactor, Identifier, Keychain};
|
use keychain::{BlindingFactor, BlindSum, Identifier, Keychain};
|
||||||
use receiver::TxWrapper;
|
use receiver::TxWrapper;
|
||||||
use types::*;
|
use types::*;
|
||||||
use util::LOGGER;
|
use util::LOGGER;
|
||||||
|
use util::secp::key::SecretKey;
|
||||||
use util;
|
use util;
|
||||||
|
|
||||||
/// Issue a new transaction to the provided sender by spending some of our
|
/// Issue a new transaction to the provided sender by spending some of our
|
||||||
|
@ -45,7 +48,7 @@ pub fn issue_send_tx(
|
||||||
// proof of concept - set lock_height on the tx
|
// proof of concept - set lock_height on the tx
|
||||||
let lock_height = chain_tip.height;
|
let lock_height = chain_tip.height;
|
||||||
|
|
||||||
let (tx, blind_sum, coins, change_key, amount_with_fee) = build_send_tx(
|
let (tx, blind, coins, change_key, amount_with_fee) = build_send_tx(
|
||||||
config,
|
config,
|
||||||
keychain,
|
keychain,
|
||||||
amount,
|
amount,
|
||||||
|
@ -55,16 +58,32 @@ pub fn issue_send_tx(
|
||||||
max_outputs,
|
max_outputs,
|
||||||
selection_strategy_is_use_all,
|
selection_strategy_is_use_all,
|
||||||
)?;
|
)?;
|
||||||
/*
|
|
||||||
* -Sender picks random blinding factors for all outputs it participates in, computes total blinding excess xS
|
|
||||||
* -Sender picks random nonce kS
|
|
||||||
* -Sender posts inputs, outputs, Message M=fee, xS * G and kS * G to Receiver
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
// TODO - wrap this up in build_send_tx or even the build() call?
|
||||||
|
// Generate a random kernel offset here
|
||||||
|
// and subtract it from the blind_sum so we create
|
||||||
|
// the aggsig context with the "split" key
|
||||||
|
let kernel_offset = BlindingFactor::from_secret_key(
|
||||||
|
SecretKey::new(&keychain.secp(), &mut thread_rng())
|
||||||
|
);
|
||||||
|
|
||||||
|
let blind_offset = keychain.blind_sum(
|
||||||
|
&BlindSum::new()
|
||||||
|
.add_blinding_factor(blind)
|
||||||
|
.sub_blinding_factor(kernel_offset)
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
//
|
||||||
|
// -Sender picks random blinding factors for all outputs it participates in, computes total blinding excess xS
|
||||||
|
// -Sender picks random nonce kS
|
||||||
|
// -Sender posts inputs, outputs, Message M=fee, xS * G and kS * G to Receiver
|
||||||
|
//
|
||||||
// Create a new aggsig context
|
// Create a new aggsig context
|
||||||
let tx_id = Uuid::new_v4();
|
let tx_id = Uuid::new_v4();
|
||||||
let _ = keychain.aggsig_create_context(&tx_id, blind_sum.secret_key());
|
let skey = blind_offset.secret_key(&keychain.secp())?;
|
||||||
let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, None, tx);
|
keychain.aggsig_create_context(&tx_id, skey);
|
||||||
|
|
||||||
|
let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, None, tx);
|
||||||
|
|
||||||
// Closure to acquire wallet lock and lock the coins being spent
|
// Closure to acquire wallet lock and lock the coins being spent
|
||||||
// so we avoid accidental double spend attempt.
|
// so we avoid accidental double spend attempt.
|
||||||
|
@ -117,17 +136,25 @@ pub fn issue_send_tx(
|
||||||
* -Sender computes their part of signature, sS = kS + e * xS
|
* -Sender computes their part of signature, sS = kS + e * xS
|
||||||
* -Sender posts sS to receiver
|
* -Sender posts sS to receiver
|
||||||
*/
|
*/
|
||||||
let (_amount, recp_pub_blinding, recp_pub_nonce, sig, tx) = read_partial_tx(keychain, &res.unwrap())?;
|
let (_amount, recp_pub_blinding, recp_pub_nonce, kernel_offset, sig, tx) = read_partial_tx(keychain, &res.unwrap())?;
|
||||||
let res = keychain.aggsig_verify_partial_sig(&tx_id, &sig.unwrap(), &recp_pub_nonce, &recp_pub_blinding, tx.fee, lock_height);
|
let res = keychain.aggsig_verify_partial_sig(
|
||||||
|
&tx_id,
|
||||||
|
&sig.unwrap(),
|
||||||
|
&recp_pub_nonce,
|
||||||
|
&recp_pub_blinding,
|
||||||
|
tx.fee(),
|
||||||
|
lock_height,
|
||||||
|
);
|
||||||
if !res {
|
if !res {
|
||||||
error!(LOGGER, "Partial Sig from recipient invalid.");
|
error!(LOGGER, "Partial Sig from recipient invalid.");
|
||||||
return Err(Error::Signature(String::from("Partial Sig from recipient invalid.")));
|
return Err(Error::Signature(String::from("Partial Sig from recipient invalid.")));
|
||||||
}
|
}
|
||||||
|
|
||||||
let sig_part=keychain.aggsig_calculate_partial_sig(&tx_id, &recp_pub_nonce, tx.fee, tx.lock_height).unwrap();
|
let sig_part = keychain.aggsig_calculate_partial_sig(&tx_id, &recp_pub_nonce, tx.fee(), tx.lock_height()).unwrap();
|
||||||
|
|
||||||
// Build the next stage, containing sS (and our pubkeys again, for the recipient's convenience)
|
// Build the next stage, containing sS (and our pubkeys again, for the recipient's convenience)
|
||||||
let mut partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, Some(sig_part), tx);
|
// offset has not been modified during tx building, so pass it back in
|
||||||
|
let mut partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, Some(sig_part), tx);
|
||||||
partial_tx.phase = PartialTxPhase::SenderConfirmation;
|
partial_tx.phase = PartialTxPhase::SenderConfirmation;
|
||||||
|
|
||||||
// And send again
|
// And send again
|
||||||
|
@ -146,6 +173,7 @@ pub fn issue_send_tx(
|
||||||
rollback_wallet()?;
|
rollback_wallet()?;
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
// All good so
|
// All good so
|
||||||
update_wallet()?;
|
update_wallet()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -228,7 +256,7 @@ fn build_send_tx(
|
||||||
// on tx being sent (based on current chain height via api).
|
// on tx being sent (based on current chain height via api).
|
||||||
parts.push(build::with_lock_height(lock_height));
|
parts.push(build::with_lock_height(lock_height));
|
||||||
|
|
||||||
let (tx, blind) = build::transaction(parts, &keychain)?;
|
let (tx, blind) = build::partial_transaction(parts, &keychain)?;
|
||||||
|
|
||||||
Ok((tx, blind, coins, change_key, amount_with_fee))
|
Ok((tx, blind, coins, change_key, amount_with_fee))
|
||||||
}
|
}
|
||||||
|
@ -270,7 +298,7 @@ pub fn issue_burn_tx(
|
||||||
parts.push(build::output(amount - fee, Identifier::zero()));
|
parts.push(build::output(amount - fee, Identifier::zero()));
|
||||||
|
|
||||||
// finalize the burn transaction and send
|
// finalize the burn transaction and send
|
||||||
let (tx_burn, _) = build::transaction(parts, &keychain)?;
|
let tx_burn = build::transaction(parts, &keychain)?;
|
||||||
tx_burn.validate()?;
|
tx_burn.validate()?;
|
||||||
|
|
||||||
let tx_hex = util::to_hex(ser::ser_vec(&tx_burn).unwrap());
|
let tx_hex = util::to_hex(ser::ser_vec(&tx_burn).unwrap());
|
||||||
|
@ -336,7 +364,7 @@ fn inputs_and_change(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use core::core::build::{input, output, transaction};
|
use core::core::build;
|
||||||
use core::core::hash::ZERO_HASH;
|
use core::core::hash::ZERO_HASH;
|
||||||
use keychain::Keychain;
|
use keychain::Keychain;
|
||||||
|
|
||||||
|
@ -348,8 +376,8 @@ mod test {
|
||||||
let keychain = Keychain::from_random_seed().unwrap();
|
let keychain = Keychain::from_random_seed().unwrap();
|
||||||
let key_id1 = keychain.derive_key_id(1).unwrap();
|
let key_id1 = keychain.derive_key_id(1).unwrap();
|
||||||
|
|
||||||
let (tx1, _) = transaction(vec![output(105, key_id1.clone())], &keychain).unwrap();
|
let tx1 = build::transaction(vec![build::output(105, key_id1.clone())], &keychain).unwrap();
|
||||||
let (tx2, _) = transaction(vec![input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
|
let tx2 = build::transaction(vec![build::input(105, ZERO_HASH, key_id1.clone())], &keychain).unwrap();
|
||||||
|
|
||||||
assert_eq!(tx1.outputs[0].features, tx2.inputs[0].features);
|
assert_eq!(tx1.outputs[0].features, tx2.inputs[0].features);
|
||||||
assert_eq!(tx1.outputs[0].commitment(), tx2.inputs[0].commitment());
|
assert_eq!(tx1.outputs[0].commitment(), tx2.inputs[0].commitment());
|
||||||
|
|
|
@ -38,6 +38,7 @@ use core::core::{transaction, Transaction};
|
||||||
use core::core::hash::Hash;
|
use core::core::hash::Hash;
|
||||||
use core::ser;
|
use core::ser;
|
||||||
use keychain;
|
use keychain;
|
||||||
|
use keychain::BlindingFactor;
|
||||||
use util;
|
use util;
|
||||||
use util::secp;
|
use util::secp;
|
||||||
use util::secp::Signature;
|
use util::secp::Signature;
|
||||||
|
@ -717,6 +718,7 @@ pub struct PartialTx {
|
||||||
pub amount: u64,
|
pub amount: u64,
|
||||||
pub public_blind_excess: String,
|
pub public_blind_excess: String,
|
||||||
pub public_nonce: String,
|
pub public_nonce: String,
|
||||||
|
pub kernel_offset: String,
|
||||||
pub part_sig: String,
|
pub part_sig: String,
|
||||||
pub tx: String,
|
pub tx: String,
|
||||||
}
|
}
|
||||||
|
@ -728,6 +730,7 @@ pub fn build_partial_tx(
|
||||||
transaction_id : &Uuid,
|
transaction_id : &Uuid,
|
||||||
keychain: &keychain::Keychain,
|
keychain: &keychain::Keychain,
|
||||||
receive_amount: u64,
|
receive_amount: u64,
|
||||||
|
kernel_offset: BlindingFactor,
|
||||||
part_sig: Option<secp::Signature>,
|
part_sig: Option<secp::Signature>,
|
||||||
tx: Transaction,
|
tx: Transaction,
|
||||||
) -> PartialTx {
|
) -> PartialTx {
|
||||||
|
@ -747,6 +750,7 @@ pub fn build_partial_tx(
|
||||||
amount: receive_amount,
|
amount: receive_amount,
|
||||||
public_blind_excess: util::to_hex(pub_excess),
|
public_blind_excess: util::to_hex(pub_excess),
|
||||||
public_nonce: util::to_hex(pub_nonce),
|
public_nonce: util::to_hex(pub_nonce),
|
||||||
|
kernel_offset: kernel_offset.to_hex(),
|
||||||
part_sig: match part_sig {
|
part_sig: match part_sig {
|
||||||
None => String::from("00"),
|
None => String::from("00"),
|
||||||
Some(p) => util::to_hex(p.serialize_der(&keychain.secp())),
|
Some(p) => util::to_hex(p.serialize_der(&keychain.secp())),
|
||||||
|
@ -760,11 +764,15 @@ pub fn build_partial_tx(
|
||||||
pub fn read_partial_tx(
|
pub fn read_partial_tx(
|
||||||
keychain: &keychain::Keychain,
|
keychain: &keychain::Keychain,
|
||||||
partial_tx: &PartialTx,
|
partial_tx: &PartialTx,
|
||||||
) -> Result<(u64, PublicKey, PublicKey, Option<Signature>, Transaction), Error> {
|
) -> Result<(u64, PublicKey, PublicKey, BlindingFactor, Option<Signature>, Transaction), Error> {
|
||||||
let blind_bin = util::from_hex(partial_tx.public_blind_excess.clone())?;
|
let blind_bin = util::from_hex(partial_tx.public_blind_excess.clone())?;
|
||||||
let blinding = PublicKey::from_slice(keychain.secp(), &blind_bin[..])?;
|
let blinding = PublicKey::from_slice(keychain.secp(), &blind_bin[..])?;
|
||||||
|
|
||||||
let nonce_bin = util::from_hex(partial_tx.public_nonce.clone())?;
|
let nonce_bin = util::from_hex(partial_tx.public_nonce.clone())?;
|
||||||
let nonce = PublicKey::from_slice(keychain.secp(), &nonce_bin[..])?;
|
let nonce = PublicKey::from_slice(keychain.secp(), &nonce_bin[..])?;
|
||||||
|
|
||||||
|
let kernel_offset = BlindingFactor::from_hex(&partial_tx.kernel_offset.clone())?;
|
||||||
|
|
||||||
let sig_bin = util::from_hex(partial_tx.part_sig.clone())?;
|
let sig_bin = util::from_hex(partial_tx.part_sig.clone())?;
|
||||||
let sig = match sig_bin.len() {
|
let sig = match sig_bin.len() {
|
||||||
1 => None,
|
1 => None,
|
||||||
|
@ -774,7 +782,7 @@ pub fn read_partial_tx(
|
||||||
let tx = ser::deserialize(&mut &tx_bin[..]).map_err(|_| {
|
let tx = ser::deserialize(&mut &tx_bin[..]).map_err(|_| {
|
||||||
Error::Format("Could not deserialize transaction, invalid format.".to_string())
|
Error::Format("Could not deserialize transaction, invalid format.".to_string())
|
||||||
})?;
|
})?;
|
||||||
Ok((partial_tx.amount, blinding, nonce, sig, tx))
|
Ok((partial_tx.amount, blinding, nonce, kernel_offset, sig, tx))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Amount in request to build a coinbase output.
|
/// Amount in request to build a coinbase output.
|
||||||
|
|
Loading…
Reference in a new issue