Wallet+Keychain refactoring (#1035)

* beginning to refactor keychain into wallet lib

* rustfmt

* more refactor of aggsig lib, simplify aggsig context manager, hold instance statically for now

* clean some warnings

* clean some warnings

* fix wallet send test a bit

* fix core tests, move wallet dependent tests into integration tests

* repair chain tests

* refactor/fix pool tests

* fix wallet tests, moved from keychain

* add wallet tests
This commit is contained in:
Yeastplume 2018-05-09 10:15:58 +01:00 committed by GitHub
parent 982fdea636
commit 4121ea1240
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 3599 additions and 3183 deletions

4
Cargo.lock generated
View file

@ -633,6 +633,7 @@ dependencies = [
"grin_keychain 0.2.0",
"grin_store 0.2.0",
"grin_util 0.2.0",
"grin_wallet 0.2.0",
"lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
@ -663,6 +664,7 @@ dependencies = [
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"grin_keychain 0.2.0",
"grin_util 0.2.0",
"grin_wallet 0.2.0",
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"num-bigint 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
@ -715,6 +717,7 @@ dependencies = [
"grin_core 0.2.0",
"grin_keychain 0.2.0",
"grin_util 0.2.0",
"grin_wallet 0.2.0",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
@ -801,6 +804,7 @@ dependencies = [
"grin_util 0.2.0",
"hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)",
"iron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"prettytable-rs 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"router 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -20,5 +20,6 @@ grin_store = { path = "../store" }
grin_util = { path = "../util" }
[dev-dependencies]
grin_wallet = { path = "../wallet" }
env_logger = "0.3"
rand = "0.3"

View file

@ -17,6 +17,7 @@ extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
extern crate rand;
extern crate time;
@ -32,6 +33,7 @@ use core::global;
use core::global::ChainTypes;
use keychain::Keychain;
use wallet::libwallet;
use core::pow;
@ -73,8 +75,8 @@ fn data_files() {
let prev = chain.head_header().unwrap();
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
let pk = keychain.derive_key_id(n as u32).unwrap();
let mut b =
core::core::Block::new(&prev, vec![], &keychain, &pk, difficulty.clone()).unwrap();
let reward = libwallet::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
chain.set_txhashset_roots(&mut b, false).unwrap();
@ -86,7 +88,7 @@ fn data_files() {
global::sizeshift(),
).unwrap();
let bhash = b.hash();
let _bhash = b.hash();
chain
.process_block(b.clone(), chain::Options::MINE)
.unwrap();
@ -112,43 +114,43 @@ fn data_files() {
}
}
fn prepare_block(kc: &Keychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
fn _prepare_block(kc: &Keychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
chain.set_txhashset_roots(&mut b, false).unwrap();
b
}
fn prepare_block_tx(
fn _prepare_block_tx(
kc: &Keychain,
prev: &BlockHeader,
chain: &Chain,
diff: u64,
txs: Vec<&Transaction>,
) -> Block {
let mut b = prepare_block_nosum(kc, prev, diff, txs);
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
chain.set_txhashset_roots(&mut b, false).unwrap();
b
}
fn prepare_fork_block(kc: &Keychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
let mut b = prepare_block_nosum(kc, prev, diff, vec![]);
fn _prepare_fork_block(kc: &Keychain, prev: &BlockHeader, chain: &Chain, diff: u64) -> Block {
let mut b = _prepare_block_nosum(kc, prev, diff, vec![]);
chain.set_txhashset_roots(&mut b, true).unwrap();
b
}
fn prepare_fork_block_tx(
fn _prepare_fork_block_tx(
kc: &Keychain,
prev: &BlockHeader,
chain: &Chain,
diff: u64,
txs: Vec<&Transaction>,
) -> Block {
let mut b = prepare_block_nosum(kc, prev, diff, txs);
let mut b = _prepare_block_nosum(kc, prev, diff, txs);
chain.set_txhashset_roots(&mut b, true).unwrap();
b
}
fn prepare_block_nosum(
fn _prepare_block_nosum(
kc: &Keychain,
prev: &BlockHeader,
diff: u64,
@ -156,7 +158,9 @@ fn prepare_block_nosum(
) -> Block {
let key_id = kc.derive_key_id(diff as u32).unwrap();
let mut b = match core::core::Block::new(prev, txs, kc, &key_id, Difficulty::from_num(diff)) {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libwallet::reward::output(&kc, &key_id, fees, prev.height).unwrap();
let mut b = match core::core::Block::new(prev, txs, Difficulty::from_num(diff), reward) {
Err(e) => panic!("{:?}", e),
Ok(b) => b,
};

View file

@ -17,6 +17,7 @@ extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
extern crate rand;
extern crate time;
@ -25,12 +26,13 @@ use std::sync::Arc;
use chain::Chain;
use chain::types::*;
use core::core::{build, Block, BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
use core::core::{Block, BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
use core::core::hash::Hashed;
use core::core::target::Difficulty;
use core::consensus;
use core::global;
use core::global::ChainTypes;
use wallet::libwallet::{self, build};
use keychain::Keychain;
@ -62,8 +64,8 @@ fn mine_empty_chain() {
let prev = chain.head_header().unwrap();
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
let pk = keychain.derive_key_id(n as u32).unwrap();
let mut b =
core::core::Block::new(&prev, vec![], &keychain, &pk, difficulty.clone()).unwrap();
let reward = libwallet::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut b = core::core::Block::new(&prev, vec![], difficulty.clone(), reward).unwrap();
b.header.timestamp = prev.timestamp + time::Duration::seconds(60);
chain.set_txhashset_roots(&mut b, false).unwrap();
@ -410,7 +412,9 @@ fn prepare_block_nosum(
let proof_size = global::proofsize();
let key_id = kc.derive_key_id(diff as u32).unwrap();
let mut b = match core::core::Block::new(prev, txs, kc, &key_id, Difficulty::from_num(diff)) {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libwallet::reward::output(&kc, &key_id, fees, prev.height).unwrap();
let mut b = match core::core::Block::new(prev, txs, Difficulty::from_num(diff), reward) {
Err(e) => panic!("{:?}", e),
Ok(b) => b,
};

View file

@ -16,6 +16,7 @@ extern crate env_logger;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_wallet as wallet;
extern crate rand;
use std::fs;
@ -30,6 +31,8 @@ use core::global;
use core::global::ChainTypes;
use core::pow;
use wallet::libwallet;
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
}
@ -53,13 +56,9 @@ fn test_various_store_indices() {
.setup_height(&genesis.header, &Tip::new(genesis.hash()))
.unwrap();
let block = Block::new(
&genesis.header,
vec![],
&keychain,
&key_id,
Difficulty::one(),
).unwrap();
let reward = libwallet::reward::output(&keychain, &key_id, 0, 1).unwrap();
let block = Block::new(&genesis.header, vec![], Difficulty::one(), reward).unwrap();
let block_hash = block.hash();
chain_store.save_block(&block).unwrap();

View file

@ -16,6 +16,7 @@ extern crate env_logger;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_wallet as wallet;
extern crate rand;
extern crate time;
@ -23,7 +24,7 @@ use std::fs;
use std::sync::Arc;
use chain::types::*;
use core::core::build;
use wallet::libwallet::build;
use core::core::target::Difficulty;
use core::core::transaction;
use core::core::OutputIdentifier;
@ -32,6 +33,7 @@ use core::global;
use core::global::ChainTypes;
use keychain::Keychain;
use wallet::libwallet;
use core::pow;
@ -62,8 +64,8 @@ fn test_coinbase_maturity() {
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
let mut block =
core::core::Block::new(&prev, vec![], &keychain, &key_id1, Difficulty::one()).unwrap();
let reward = libwallet::reward::output(&keychain, &key_id1, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
@ -114,13 +116,10 @@ fn test_coinbase_maturity() {
&keychain,
).unwrap();
let mut block = core::core::Block::new(
&prev,
vec![&coinbase_txn],
&keychain,
&key_id3,
Difficulty::one(),
).unwrap();
let txs = vec![&coinbase_txn];
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libwallet::reward::output(&keychain, &key_id3, fees, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, txs, Difficulty::one(), reward).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
@ -145,8 +144,8 @@ fn test_coinbase_maturity() {
let keychain = Keychain::from_random_seed().unwrap();
let pk = keychain.derive_key_id(1).unwrap();
let mut block =
core::core::Block::new(&prev, vec![], &keychain, &pk, Difficulty::one()).unwrap();
let reward = libwallet::reward::output(&keychain, &pk, 0, prev.height).unwrap();
let mut block = core::core::Block::new(&prev, vec![], Difficulty::one(), reward).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);
let difficulty = consensus::next_difficulty(chain.difficulty_iter()).unwrap();
@ -174,13 +173,11 @@ fn test_coinbase_maturity() {
&keychain,
).unwrap();
let mut block = core::core::Block::new(
&prev,
vec![&coinbase_txn],
&keychain,
&key_id4,
Difficulty::one(),
).unwrap();
let txs = vec![&coinbase_txn];
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward = libwallet::reward::output(&keychain, &key_id4, fees, prev.height).unwrap();
let mut block =
core::core::Block::new(&prev, vec![&coinbase_txn], Difficulty::one(), reward).unwrap();
block.header.timestamp = prev.timestamp + time::Duration::seconds(60);

View file

@ -20,3 +20,6 @@ time = "0.1"
grin_keychain = { path = "../keychain" }
grin_util = { path = "../util" }
[dev-dependencies]
grin_wallet = { path = "../wallet" }

View file

@ -1,75 +0,0 @@
// Copyright 2018 The Grin Developers
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![feature(test)]
extern crate grin_core as core;
extern crate rand;
extern crate test;
use rand::Rng;
use test::Bencher;
use core::core::txhashset::{self, Summable, TxHashSet};
use core::ser::{Error, Writeable, Writer};
#[derive(Copy, Clone, Debug)]
struct TestElem([u32; 4]);
impl Summable for TestElem {
type Sum = u64;
fn sum(&self) -> u64 {
// sums are not allowed to overflow, so we use this simple
// non-injective "sum" function that will still be homomorphic
self.0[0] as u64 * 0x1000 + self.0[1] as u64 * 0x100 + self.0[2] as u64 * 0x10
+ self.0[3] as u64
}
}
impl Writeable for TestElem {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
try!(writer.write_u32(self.0[0]));
try!(writer.write_u32(self.0[1]));
try!(writer.write_u32(self.0[2]));
writer.write_u32(self.0[3])
}
}
#[bench]
fn bench_small_tree(b: &mut Bencher) {
let mut rng = rand::thread_rng();
b.iter(|| {
let mut big_tree = TxHashSet::new();
for i in 0..1000 {
// To avoid RNG overflow we generate random elements that are small.
// Though to avoid repeat elements they have to be reasonably big.
let new_elem;
let word1 = rng.gen::<u16>() as u32;
let word2 = rng.gen::<u16>() as u32;
if rng.gen() {
if rng.gen() {
new_elem = TestElem([word1, word2, 0, 0]);
} else {
new_elem = TestElem([word1, 0, word2, 0]);
}
} else {
if rng.gen() {
new_elem = TestElem([0, word1, 0, word2]);
} else {
new_elem = TestElem([0, 0, word1, word2]);
}
}
big_tree.push(new_elem);
}
});
}

View file

@ -18,8 +18,8 @@ use time;
use rand::{thread_rng, Rng};
use std::collections::HashSet;
use core::{Commitment, Committed, Input, KernelFeatures, Output, OutputFeatures, Proof,
ProofMessageElements, ShortId, Transaction, TxKernel};
use core::{Commitment, Committed, Input, KernelFeatures, Output, OutputFeatures, Proof, ShortId,
Transaction, TxKernel};
use consensus;
use consensus::{exceeds_weight, reward, VerifySortOrder, REWARD};
use core::hash::{Hash, HashWriter, Hashed, ZERO_HASH};
@ -30,7 +30,6 @@ use ser::{self, read_and_verify_sorted, Readable, Reader, Writeable, WriteableSo
use global;
use keychain;
use keychain::BlindingFactor;
use util::kernel_sig_msg;
use util::LOGGER;
use util::{secp, static_secp_instance};
@ -402,14 +401,10 @@ impl Block {
pub fn new(
prev: &BlockHeader,
txs: Vec<&Transaction>,
keychain: &keychain::Keychain,
key_id: &keychain::Identifier,
difficulty: Difficulty,
reward_output: (Output, TxKernel),
) -> Result<Block, Error> {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let (reward_out, reward_proof) =
Block::reward_output(keychain, key_id, fees, prev.height + 1)?;
let block = Block::with_reward(prev, txs, reward_out, reward_proof, difficulty)?;
let block = Block::with_reward(prev, txs, reward_output.0, reward_output.1, difficulty)?;
Ok(block)
}
@ -705,7 +700,8 @@ impl Block {
Ok(())
}
fn verify_sums(
/// Verify sums
pub fn verify_sums(
&self,
prev_output_sum: &Commitment,
prev_kernel_sum: &Commitment,
@ -744,12 +740,12 @@ impl Block {
Ok((io_sum, kernel_sum))
}
// Validate the coinbase outputs generated by miners. Entails 2 main checks:
//
// * That the sum of all coinbase-marked outputs equal the supply.
// * That the sum of blinding factors for all coinbase-marked outputs match
// the coinbase-marked kernels.
fn verify_coinbase(&self) -> Result<(), Error> {
/// Validate the coinbase outputs generated by miners. Entails 2 main checks:
///
/// * That the sum of all coinbase-marked outputs equal the supply.
/// * That the sum of blinding factors for all coinbase-marked outputs match
/// the coinbase-marked kernels.
pub fn verify_coinbase(&self) -> Result<(), Error> {
let cb_outs = self.outputs
.iter()
.filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
@ -781,430 +777,4 @@ impl Block {
}
Ok(())
}
/// Builds the blinded output and related signature proof for the block
/// reward.
pub fn reward_output(
keychain: &keychain::Keychain,
key_id: &keychain::Identifier,
fees: u64,
height: u64,
) -> Result<(Output, TxKernel), keychain::Error> {
let value = reward(fees);
let commit = keychain.commit(value, key_id)?;
let msg = ProofMessageElements::new(value, key_id);
trace!(LOGGER, "Block reward - Pedersen Commit is: {:?}", commit,);
let rproof = keychain.range_proof(value, key_id, commit, None, msg.to_proof_message())?;
let output = Output {
features: OutputFeatures::COINBASE_OUTPUT,
commit: commit,
proof: rproof,
};
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let over_commit = secp.commit_value(reward(fees))?;
let out_commit = output.commitment();
let excess = secp.commit_sum(vec![out_commit], vec![over_commit])?;
// NOTE: Remember we sign the fee *and* the lock_height.
// For a coinbase output the fee is 0 and the lock_height is
// the lock_height of the coinbase output itself,
// not the lock_height of the tx (there is no tx for a coinbase output).
// This output will not be spendable earlier than lock_height (and we sign this
// here).
let msg = secp::Message::from_slice(&kernel_sig_msg(0, height))?;
let sig = keychain.aggsig_sign_from_key_id(&msg, &key_id)?;
let proof = TxKernel {
features: KernelFeatures::COINBASE_KERNEL,
excess: excess,
excess_sig: sig,
fee: 0,
// lock_height here is the height of the block (tx should be valid immediately)
// *not* the lock_height of the coinbase output (only spendable 1,000 blocks later)
lock_height: height,
};
Ok((output, proof))
}
}
#[cfg(test)]
mod test {
use std::time::Instant;
use super::*;
use core::Transaction;
use core::build::{self, input, output, with_fee};
use core::test::{tx1i2o, tx2i1o};
use keychain::{Identifier, Keychain};
use consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
use util::{secp, secp_static};
// utility to create a block without worrying about the key or previous
// header
fn new_block(
txs: Vec<&Transaction>,
keychain: &Keychain,
previous_header: &BlockHeader,
) -> Block {
let key_id = keychain.derive_key_id(1).unwrap();
Block::new(&previous_header, txs, keychain, &key_id, Difficulty::one()).unwrap()
}
// utility producing a transaction that spends an output with the provided
// value and blinding key
fn txspend1i1o(
v: u64,
keychain: &Keychain,
key_id1: Identifier,
key_id2: Identifier,
) -> Transaction {
build::transaction(
vec![input(v, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).unwrap()
}
// Too slow for now #[test]
// TODO: make this fast enough or add similar but faster test?
#[allow(dead_code)]
fn too_large_block() {
let keychain = Keychain::from_random_seed().unwrap();
let max_out = MAX_BLOCK_WEIGHT / BLOCK_OUTPUT_WEIGHT;
let zero_commit = secp_static::commit_to_zero_value();
let mut pks = vec![];
for n in 0..(max_out + 1) {
pks.push(keychain.derive_key_id(n as u32).unwrap());
}
let mut parts = vec![];
for _ in 0..max_out {
parts.push(output(5, pks.pop().unwrap()));
}
let now = Instant::now();
parts.append(&mut vec![input(500000, pks.pop().unwrap()), with_fee(2)]);
let mut tx = build::transaction(parts, &keychain).unwrap();
println!("Build tx: {}", now.elapsed().as_secs());
let prev = BlockHeader::default();
let b = new_block(vec![&mut tx], &keychain, &prev);
assert!(b.validate(&zero_commit, &zero_commit).is_err());
}
#[test]
// block with no inputs/outputs/kernels
// no fees, no reward, no coinbase
fn very_empty_block() {
let b = Block {
header: BlockHeader::default(),
inputs: vec![],
outputs: vec![],
kernels: vec![],
};
assert_eq!(
b.verify_coinbase(),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
}
#[test]
// builds a block with a tx spending another and check that cut_through occurred
fn block_with_cut_through() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut btx1 = tx2i1o();
let mut btx2 = build::transaction(
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
// spending tx2 - reuse key_id2
let mut btx3 = txspend1i1o(5, &keychain, key_id2.clone(), key_id3);
let prev = BlockHeader::default();
let b = new_block(vec![&mut btx1, &mut btx2, &mut btx3], &keychain, &prev);
// block should have been automatically compacted (including reward
// output) and should still be valid
b.validate(&zero_commit, &zero_commit).unwrap();
assert_eq!(b.inputs.len(), 3);
assert_eq!(b.outputs.len(), 3);
}
#[test]
fn empty_block_with_coinbase_is_valid() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let b = new_block(vec![], &keychain, &prev);
assert_eq!(b.inputs.len(), 0);
assert_eq!(b.outputs.len(), 1);
assert_eq!(b.kernels.len(), 1);
let coinbase_outputs = b.outputs
.iter()
.filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.map(|o| o.clone())
.collect::<Vec<_>>();
assert_eq!(coinbase_outputs.len(), 1);
let coinbase_kernels = b.kernels
.iter()
.filter(|out| out.features.contains(KernelFeatures::COINBASE_KERNEL))
.map(|o| o.clone())
.collect::<Vec<_>>();
assert_eq!(coinbase_kernels.len(), 1);
// the block should be valid here (single coinbase output with corresponding
// txn kernel)
assert!(b.validate(&zero_commit, &zero_commit).is_ok());
}
#[test]
// test that flipping the COINBASE_OUTPUT flag on the output features
// invalidates the block and specifically it causes verify_coinbase to fail
// additionally verifying the merkle_inputs_outputs also fails
fn remove_coinbase_output_flag() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let mut b = new_block(vec![], &keychain, &prev);
assert!(
b.outputs[0]
.features
.contains(OutputFeatures::COINBASE_OUTPUT)
);
b.outputs[0]
.features
.remove(OutputFeatures::COINBASE_OUTPUT);
assert_eq!(b.verify_coinbase(), Err(Error::CoinbaseSumMismatch));
assert!(b.verify_sums(&zero_commit, &zero_commit).is_ok());
assert_eq!(
b.validate(&zero_commit, &zero_commit),
Err(Error::CoinbaseSumMismatch)
);
}
#[test]
// test that flipping the COINBASE_KERNEL flag on the kernel features
// invalidates the block and specifically it causes verify_coinbase to fail
fn remove_coinbase_kernel_flag() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let mut b = new_block(vec![], &keychain, &prev);
assert!(
b.kernels[0]
.features
.contains(KernelFeatures::COINBASE_KERNEL)
);
b.kernels[0]
.features
.remove(KernelFeatures::COINBASE_KERNEL);
assert_eq!(
b.verify_coinbase(),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
assert_eq!(
b.validate(&zero_commit, &zero_commit),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
}
#[test]
fn serialize_deserialize_block() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let b = new_block(vec![], &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let b2: Block = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(b.header, b2.header);
assert_eq!(b.inputs, b2.inputs);
assert_eq!(b.outputs, b2.outputs);
assert_eq!(b.kernels, b2.kernels);
}
#[test]
fn empty_block_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let b = new_block(vec![], &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 1_216;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn block_single_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 2_796;
assert_eq!(vec.len(), target_len);
}
#[test]
fn empty_compact_block_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let b = new_block(vec![], &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_224;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_single_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_230;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn block_10_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
global::set_mining_mode(global::ChainTypes::Mainnet);
let mut txs = vec![];
for _ in 0..10 {
let tx = tx1i2o();
txs.push(tx);
}
let prev = BlockHeader::default();
let b = new_block(txs.iter().collect(), &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 17_016;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_10_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let mut txs = vec![];
for _ in 0..10 {
let tx = tx1i2o();
txs.push(tx);
}
let prev = BlockHeader::default();
let b = new_block(txs.iter().collect(), &keychain, &prev);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_284;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_hash_with_nonce() {
let keychain = Keychain::from_random_seed().unwrap();
let tx = tx1i2o();
let prev = BlockHeader::default();
let b = new_block(vec![&tx], &keychain, &prev);
let cb1 = b.as_compact_block();
let cb2 = b.as_compact_block();
// random nonce will not affect the hash of the compact block itself
// hash is based on header POW only
assert!(cb1.nonce != cb2.nonce);
assert_eq!(b.hash(), cb1.hash());
assert_eq!(cb1.hash(), cb2.hash());
assert!(cb1.kern_ids[0] != cb2.kern_ids[0]);
// check we can identify the specified kernel from the short_id
// correctly in both of the compact_blocks
assert_eq!(
cb1.kern_ids[0],
tx.kernels[0].short_id(&cb1.hash(), cb1.nonce)
);
assert_eq!(
cb2.kern_ids[0],
tx.kernels[0].short_id(&cb2.hash(), cb2.nonce)
);
}
#[test]
fn convert_block_to_compact_block() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &prev);
let cb = b.as_compact_block();
assert_eq!(cb.out_full.len(), 1);
assert_eq!(cb.kern_full.len(), 1);
assert_eq!(cb.kern_ids.len(), 1);
assert_eq!(
cb.kern_ids[0],
b.kernels
.iter()
.find(|x| !x.features.contains(KernelFeatures::COINBASE_KERNEL))
.unwrap()
.short_id(&cb.hash(), cb.nonce)
);
}
#[test]
fn hydrate_empty_compact_block() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let b = new_block(vec![], &keychain, &prev);
let cb = b.as_compact_block();
let hb = Block::hydrate_from(cb, vec![]);
assert_eq!(hb.header, b.header);
assert_eq!(hb.outputs, b.outputs);
assert_eq!(hb.kernels, b.kernels);
}
#[test]
fn serialize_deserialize_compact_block() {
let b = CompactBlock {
header: BlockHeader::default(),
nonce: 0,
out_full: vec![],
kern_full: vec![],
kern_ids: vec![ShortId::zero()],
};
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let b2: CompactBlock = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(b.header, b2.header);
assert_eq!(b.kern_ids, b2.kern_ids);
}
}

View file

@ -15,13 +15,11 @@
//! Core types
pub mod block;
pub mod build;
pub mod hash;
pub mod id;
pub mod pmmr;
pub mod target;
pub mod transaction;
// pub mod txoset;
#[allow(dead_code)]
use rand::{thread_rng, Rng};
@ -274,14 +272,6 @@ pub fn amount_to_hr_string(amount: u64) -> String {
#[cfg(test)]
mod test {
use super::*;
use core::target::Difficulty;
use core::hash::ZERO_HASH;
use core::build::{initial_tx, input, output, with_excess, with_fee, with_lock_height};
use core::block::Error::KernelLockHeight;
use ser;
use keychain;
use keychain::Keychain;
use util::secp_static;
#[test]
pub fn test_amount_to_hr() {
@ -304,550 +294,4 @@ mod test {
assert!("5000000000.000000000" == amount_to_hr_string(5_000_000_000_000_000_000));
}
#[test]
#[should_panic(expected = "InvalidSecretKey")]
fn test_zero_commit_fails() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
// blinding should fail as signing with a zero r*G shouldn't work
build::transaction(
vec![
input(10, key_id1.clone()),
output(9, key_id1.clone()),
with_fee(1),
],
&keychain,
).unwrap();
}
#[test]
fn simple_tx_ser() {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
let target_len = 954;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn simple_tx_ser_deser() {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(dtx.fee(), 2);
assert_eq!(dtx.inputs.len(), 2);
assert_eq!(dtx.outputs.len(), 1);
assert_eq!(tx.hash(), dtx.hash());
}
#[test]
fn tx_double_ser_deser() {
// checks serializing doesn't mess up the tx and produces consistent results
let btx = tx2i1o();
let mut vec = Vec::new();
assert!(ser::serialize(&mut vec, &btx).is_ok());
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
let mut vec2 = Vec::new();
assert!(ser::serialize(&mut vec2, &btx).is_ok());
let dtx2: Transaction = ser::deserialize(&mut &vec2[..]).unwrap();
assert_eq!(btx.hash(), dtx.hash());
assert_eq!(dtx.hash(), dtx2.hash());
}
#[test]
fn build_tx_kernel() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
// first build a valid tx with corresponding blinding factor
let tx = build::transaction(
vec![
input(10, key_id1),
output(5, key_id2),
output(3, key_id3),
with_fee(2),
],
&keychain,
).unwrap();
// check the tx is valid
tx.validate().unwrap();
// check the kernel is also itself valid
assert_eq!(tx.kernels.len(), 1);
let kern = &tx.kernels[0];
kern.verify().unwrap();
assert_eq!(kern.features, KernelFeatures::DEFAULT_KERNEL);
assert_eq!(kern.fee, tx.fee());
}
// Combine two transactions into one big transaction (with multiple kernels)
// and check it still validates.
#[test]
fn transaction_cut_through() {
let tx1 = tx1i2o();
let tx2 = tx2i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
// now build a "cut_through" tx from tx1 and tx2
let tx3 = aggregate_with_cut_through(vec![tx1, tx2]).unwrap();
assert!(tx3.validate().is_ok());
}
// Attempt to deaggregate a multi-kernel transaction in a different way
#[test]
fn multi_kernel_transaction_deaggregation() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
let tx1234 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone()]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
let tx34 = aggregate(vec![tx3.clone(), tx4.clone()]).unwrap();
assert!(tx1234.validate().is_ok());
assert!(tx12.validate().is_ok());
assert!(tx34.validate().is_ok());
let deaggregated_tx34 = deaggregate(tx1234.clone(), vec![tx12.clone()]).unwrap();
assert!(deaggregated_tx34.validate().is_ok());
assert_eq!(tx34, deaggregated_tx34);
let deaggregated_tx12 = deaggregate(tx1234.clone(), vec![tx34.clone()]).unwrap();
assert!(deaggregated_tx12.validate().is_ok());
assert_eq!(tx12, deaggregated_tx12);
}
#[test]
fn multi_kernel_transaction_deaggregation_2() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
let tx123 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone()]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
assert!(tx123.validate().is_ok());
assert!(tx12.validate().is_ok());
let deaggregated_tx3 = deaggregate(tx123.clone(), vec![tx12.clone()]).unwrap();
assert!(deaggregated_tx3.validate().is_ok());
assert_eq!(tx3, deaggregated_tx3);
}
#[test]
fn multi_kernel_transaction_deaggregation_3() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
let tx123 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone()]).unwrap();
let tx13 = aggregate(vec![tx1.clone(), tx3.clone()]).unwrap();
let tx2 = aggregate(vec![tx2.clone()]).unwrap();
assert!(tx123.validate().is_ok());
assert!(tx2.validate().is_ok());
let deaggregated_tx13 = deaggregate(tx123.clone(), vec![tx2.clone()]).unwrap();
assert!(deaggregated_tx13.validate().is_ok());
assert_eq!(tx13, deaggregated_tx13);
}
#[test]
fn multi_kernel_transaction_deaggregation_4() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
let tx5 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
assert!(tx5.validate().is_ok());
let tx12345 = aggregate(vec![
tx1.clone(),
tx2.clone(),
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
assert!(tx12345.validate().is_ok());
let deaggregated_tx5 = deaggregate(
tx12345.clone(),
vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone()],
).unwrap();
assert!(deaggregated_tx5.validate().is_ok());
assert_eq!(tx5, deaggregated_tx5);
}
#[test]
fn multi_kernel_transaction_deaggregation_5() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
let tx5 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
assert!(tx5.validate().is_ok());
let tx12345 = aggregate(vec![
tx1.clone(),
tx2.clone(),
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
let tx34 = aggregate(vec![tx3.clone(), tx4.clone()]).unwrap();
assert!(tx12345.validate().is_ok());
let deaggregated_tx5 =
deaggregate(tx12345.clone(), vec![tx12.clone(), tx34.clone()]).unwrap();
assert!(deaggregated_tx5.validate().is_ok());
assert_eq!(tx5, deaggregated_tx5);
}
// Attempt to deaggregate a multi-kernel transaction
#[test]
fn basic_transaction_deaggregation() {
let tx1 = tx1i2o();
let tx2 = tx2i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
// now build a "cut_through" tx from tx1 and tx2
let tx3 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
assert!(tx3.validate().is_ok());
let deaggregated_tx1 = deaggregate(tx3.clone(), vec![tx2.clone()]).unwrap();
assert!(deaggregated_tx1.validate().is_ok());
assert_eq!(tx1, deaggregated_tx1);
let deaggregated_tx2 = deaggregate(tx3.clone(), vec![tx1.clone()]).unwrap();
assert!(deaggregated_tx2.validate().is_ok());
assert_eq!(tx2, deaggregated_tx2);
}
#[test]
fn hash_output() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let tx = build::transaction(
vec![
input(75, key_id1),
output(42, key_id2),
output(32, key_id3),
with_fee(1),
],
&keychain,
).unwrap();
let h = tx.outputs[0].hash();
assert!(h != ZERO_HASH);
let h2 = tx.outputs[1].hash();
assert!(h != h2);
}
#[ignore]
#[test]
fn blind_tx() {
let btx = tx2i1o();
assert!(btx.validate().is_ok());
// Ignored for bullet proofs, because calling range_proof_info
// with a bullet proof causes painful errors
// checks that the range proof on our blind output is sufficiently hiding
let Output { proof, .. } = btx.outputs[0];
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let info = secp.range_proof_info(proof);
assert!(info.min == 0);
assert!(info.max == u64::max_value());
}
#[test]
fn tx_hash_diff() {
let btx1 = tx2i1o();
let btx2 = tx1i1o();
if btx1.hash() == btx2.hash() {
panic!("diff txs have same hash")
}
}
/// Simulate the standard exchange between 2 parties when creating a basic
/// 2 inputs, 2 outputs transaction.
#[test]
fn tx_build_exchange() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
let (tx_alice, blind_sum) = {
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
// become inputs in the new transaction
let (in1, in2) = (input(4, key_id1), input(3, key_id2));
// Alice builds her transaction, with change, which also produces the sum
// of blinding factors before they're obscured.
let (tx, sum) = build::partial_transaction(
vec![in1, in2, output(1, key_id3), with_fee(2)],
&keychain,
).unwrap();
(tx, sum)
};
// From now on, Bob only has the obscured transaction and the sum of
// blinding factors. He adds his output, finalizes the transaction so it's
// ready for broadcast.
let tx_final = build::transaction(
vec![
initial_tx(tx_alice),
with_excess(blind_sum),
output(4, key_id4),
],
&keychain,
).unwrap();
tx_final.validate().unwrap();
}
#[test]
fn reward_empty_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let previous_header = BlockHeader::default();
let b = Block::new(
&previous_header,
vec![],
&keychain,
&key_id,
Difficulty::one(),
).unwrap();
b.cut_through()
.validate(&zero_commit, &zero_commit)
.unwrap();
}
#[test]
fn reward_with_tx_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut tx1 = tx2i1o();
tx1.validate().unwrap();
let previous_header = BlockHeader::default();
let block = Block::new(
&previous_header,
vec![&mut tx1],
&keychain,
&key_id,
Difficulty::one(),
).unwrap();
block
.cut_through()
.validate(&zero_commit, &zero_commit)
.unwrap();
}
#[test]
fn simple_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut tx1 = tx2i1o();
let mut tx2 = tx1i1o();
let previous_header = BlockHeader::default();
let b = Block::new(
&previous_header,
vec![&mut tx1, &mut tx2],
&keychain,
&key_id,
Difficulty::one(),
).unwrap();
b.validate(&zero_commit, &zero_commit).unwrap();
}
#[test]
fn test_block_with_timelocked_tx() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
// first check we can add a timelocked tx where lock height matches current
// block height and that the resulting block is valid
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(1),
],
&keychain,
).unwrap();
let previous_header = BlockHeader::default();
let b = Block::new(
&previous_header,
vec![&tx1],
&keychain,
&key_id3.clone(),
Difficulty::one(),
).unwrap();
b.validate(&zero_commit, &zero_commit).unwrap();
// now try adding a timelocked tx where lock height is greater than current
// block height
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(2),
],
&keychain,
).unwrap();
let previous_header = BlockHeader::default();
let b = Block::new(
&previous_header,
vec![&tx1],
&keychain,
&key_id3.clone(),
Difficulty::one(),
).unwrap();
match b.validate(&zero_commit, &zero_commit) {
Err(KernelLockHeight(height)) => {
assert_eq!(height, 2);
}
_ => panic!("expecting KernelLockHeight error here"),
}
}
#[test]
pub fn test_verify_1i1o_sig() {
let tx = tx1i1o();
tx.validate().unwrap();
}
#[test]
pub fn test_verify_2i1o_sig() {
let tx = tx2i1o();
tx.validate().unwrap();
}
// utility producing a transaction with 2 inputs and a single outputs
pub fn tx2i1o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
build::transaction_with_offset(
vec![
input(10, key_id1),
input(11, key_id2),
output(19, key_id3),
with_fee(2),
],
&keychain,
).unwrap()
}
// utility producing a transaction with a single input and output
pub fn tx1i1o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
build::transaction_with_offset(
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).unwrap()
}
// utility producing a transaction with a single input
// and two outputs (one change output)
// Note: this tx has an "offset" kernel
pub fn tx1i2o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
build::transaction_with_offset(
vec![
input(6, key_id1),
output(3, key_id2),
output(1, key_id3),
with_fee(2),
],
&keychain,
).unwrap()
}
}

View file

@ -30,7 +30,7 @@ use core::BlockHeader;
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::pmmr::MerkleProof;
use keychain;
use keychain::{BlindingFactor, Keychain};
use keychain::BlindingFactor;
use ser::{self, read_and_verify_sorted, ser_vec, PMMRable, Readable, Reader, Writeable,
WriteableSorted, Writer};
use util;
@ -186,7 +186,15 @@ impl TxKernel {
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let sig = &self.excess_sig;
let valid = Keychain::aggsig_verify_single_from_commit(&secp, &sig, &msg, &self.excess);
// Verify aggsig directly in libsecp
let pubkeys = &self.excess.to_two_pubkeys(&secp);
let mut valid = false;
for i in 0..pubkeys.len() {
valid = secp::aggsig::verify_single(&secp, &sig, &msg, None, &pubkeys[i], false);
if valid {
break;
}
}
if !valid {
return Err(secp::Error::IncorrectSignature);
}
@ -947,7 +955,7 @@ impl Output {
pub fn verify_proof(&self) -> Result<(), secp::Error> {
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
match Keychain::verify_range_proof(&secp, self.commit, self.proof, None) {
match secp.verify_bullet_proof(self.commit, self.proof, None) {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
@ -1196,29 +1204,6 @@ mod test {
assert_eq!(kernel2.fee, 10);
}
#[test]
fn test_output_ser_deser() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let commit = keychain.commit(5, &key_id).unwrap();
let msg = secp::pedersen::ProofMessage::empty();
let proof = keychain.range_proof(5, &key_id, commit, None, msg).unwrap();
let out = Output {
features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit,
proof: proof,
};
let mut vec = vec![];
ser::serialize(&mut vec, &out).expect("serialized failed");
let dout: Output = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(dout.features, OutputFeatures::DEFAULT_OUTPUT);
assert_eq!(dout.commit, out.commit);
assert_eq!(dout.proof, out.proof);
}
#[test]
fn commit_consistency() {
let keychain = Keychain::from_seed(&[0; 32]).unwrap();

View file

@ -120,7 +120,6 @@ mod test {
use global;
use core::target::Difficulty;
use genesis;
use global::ChainTypes;
/// We'll be generating genesis blocks differently
#[ignore]

393
core/tests/block.rs Normal file
View file

@ -0,0 +1,393 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
pub mod common;
use grin_core::core::{Block, BlockHeader, CompactBlock, KernelFeatures, OutputFeatures};
use grin_core::core::hash::Hashed;
use grin_core::core::block::Error;
use grin_core::core::id::{ShortId, ShortIdentifiable};
use wallet::libwallet::build::{self, input, output, with_fee};
use common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
use keychain::Keychain;
use grin_core::consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
use grin_core::ser;
use grin_core::global;
use std::time::Instant;
use util::{secp, secp_static};
// Too slow for now #[test]
// TODO: make this fast enough or add similar but faster test?
#[allow(dead_code)]
fn too_large_block() {
let keychain = Keychain::from_random_seed().unwrap();
let max_out = MAX_BLOCK_WEIGHT / BLOCK_OUTPUT_WEIGHT;
let zero_commit = secp_static::commit_to_zero_value();
let mut pks = vec![];
for n in 0..(max_out + 1) {
pks.push(keychain.derive_key_id(n as u32).unwrap());
}
let mut parts = vec![];
for _ in 0..max_out {
parts.push(output(5, pks.pop().unwrap()));
}
let now = Instant::now();
parts.append(&mut vec![input(500000, pks.pop().unwrap()), with_fee(2)]);
let mut tx = build::transaction(parts, &keychain).unwrap();
println!("Build tx: {}", now.elapsed().as_secs());
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![&mut tx], &keychain, &prev, &key_id);
assert!(b.validate(&zero_commit, &zero_commit).is_err());
}
#[test]
// block with no inputs/outputs/kernels
// no fees, no reward, no coinbase
fn very_empty_block() {
let b = Block {
header: BlockHeader::default(),
inputs: vec![],
outputs: vec![],
kernels: vec![],
};
assert_eq!(
b.verify_coinbase(),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
}
#[test]
// builds a block with a tx spending another and check that cut_through occurred
fn block_with_cut_through() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut btx1 = tx2i1o();
let mut btx2 = build::transaction(
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
// spending tx2 - reuse key_id2
let mut btx3 = txspend1i1o(5, &keychain, key_id2.clone(), key_id3);
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(
vec![&mut btx1, &mut btx2, &mut btx3],
&keychain,
&prev,
&key_id,
);
// block should have been automatically compacted (including reward
// output) and should still be valid
b.validate(&zero_commit, &zero_commit).unwrap();
assert_eq!(b.inputs.len(), 3);
assert_eq!(b.outputs.len(), 3);
}
#[test]
fn empty_block_with_coinbase_is_valid() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![], &keychain, &prev, &key_id);
assert_eq!(b.inputs.len(), 0);
assert_eq!(b.outputs.len(), 1);
assert_eq!(b.kernels.len(), 1);
let coinbase_outputs = b.outputs
.iter()
.filter(|out| out.features.contains(OutputFeatures::COINBASE_OUTPUT))
.map(|o| o.clone())
.collect::<Vec<_>>();
assert_eq!(coinbase_outputs.len(), 1);
let coinbase_kernels = b.kernels
.iter()
.filter(|out| out.features.contains(KernelFeatures::COINBASE_KERNEL))
.map(|o| o.clone())
.collect::<Vec<_>>();
assert_eq!(coinbase_kernels.len(), 1);
// the block should be valid here (single coinbase output with corresponding
// txn kernel)
assert!(b.validate(&zero_commit, &zero_commit).is_ok());
}
#[test]
// test that flipping the COINBASE_OUTPUT flag on the output features
// invalidates the block and specifically it causes verify_coinbase to fail
// additionally verifying the merkle_inputs_outputs also fails
fn remove_coinbase_output_flag() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
b.outputs[0]
.features
.contains(OutputFeatures::COINBASE_OUTPUT)
);
b.outputs[0]
.features
.remove(OutputFeatures::COINBASE_OUTPUT);
assert_eq!(b.verify_coinbase(), Err(Error::CoinbaseSumMismatch));
assert!(b.verify_sums(&zero_commit, &zero_commit).is_ok());
assert_eq!(
b.validate(&zero_commit, &zero_commit),
Err(Error::CoinbaseSumMismatch)
);
}
#[test]
// test that flipping the COINBASE_KERNEL flag on the kernel features
// invalidates the block and specifically it causes verify_coinbase to fail
fn remove_coinbase_kernel_flag() {
let keychain = Keychain::from_random_seed().unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
b.kernels[0]
.features
.contains(KernelFeatures::COINBASE_KERNEL)
);
b.kernels[0]
.features
.remove(KernelFeatures::COINBASE_KERNEL);
assert_eq!(
b.verify_coinbase(),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
assert_eq!(
b.validate(&zero_commit, &zero_commit),
Err(Error::Secp(secp::Error::IncorrectCommitSum))
);
}
#[test]
fn serialize_deserialize_block() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let b2: Block = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(b.header, b2.header);
assert_eq!(b.inputs, b2.inputs);
assert_eq!(b.outputs, b2.outputs);
assert_eq!(b.kernels, b2.kernels);
}
#[test]
fn empty_block_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 1_216;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn block_single_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 2_796;
assert_eq!(vec.len(), target_len);
}
#[test]
fn empty_compact_block_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_224;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_single_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_230;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn block_10_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
global::set_mining_mode(global::ChainTypes::Mainnet);
let mut txs = vec![];
for _ in 0..10 {
let tx = tx1i2o();
txs.push(tx);
}
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let target_len = 17_016;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_10_tx_serialized_size() {
let keychain = Keychain::from_random_seed().unwrap();
let mut txs = vec![];
for _ in 0..10 {
let tx = tx1i2o();
txs.push(tx);
}
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(txs.iter().collect(), &keychain, &prev, &key_id);
let mut vec = Vec::new();
ser::serialize(&mut vec, &b.as_compact_block()).expect("serialization failed");
let target_len = 1_284;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn compact_block_hash_with_nonce() {
let keychain = Keychain::from_random_seed().unwrap();
let tx = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![&tx], &keychain, &prev, &key_id);
let cb1 = b.as_compact_block();
let cb2 = b.as_compact_block();
// random nonce will not affect the hash of the compact block itself
// hash is based on header POW only
assert!(cb1.nonce != cb2.nonce);
assert_eq!(b.hash(), cb1.hash());
assert_eq!(cb1.hash(), cb2.hash());
assert!(cb1.kern_ids[0] != cb2.kern_ids[0]);
// check we can identify the specified kernel from the short_id
// correctly in both of the compact_blocks
assert_eq!(
cb1.kern_ids[0],
tx.kernels[0].short_id(&cb1.hash(), cb1.nonce)
);
assert_eq!(
cb2.kern_ids[0],
tx.kernels[0].short_id(&cb2.hash(), cb2.nonce)
);
}
#[test]
fn convert_block_to_compact_block() {
let keychain = Keychain::from_random_seed().unwrap();
let tx1 = tx1i2o();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![&tx1], &keychain, &prev, &key_id);
let cb = b.as_compact_block();
assert_eq!(cb.out_full.len(), 1);
assert_eq!(cb.kern_full.len(), 1);
assert_eq!(cb.kern_ids.len(), 1);
assert_eq!(
cb.kern_ids[0],
b.kernels
.iter()
.find(|x| !x.features.contains(KernelFeatures::COINBASE_KERNEL))
.unwrap()
.short_id(&cb.hash(), cb.nonce)
);
}
#[test]
fn hydrate_empty_compact_block() {
let keychain = Keychain::from_random_seed().unwrap();
let prev = BlockHeader::default();
let key_id = keychain.derive_key_id(1).unwrap();
let b = new_block(vec![], &keychain, &prev, &key_id);
let cb = b.as_compact_block();
let hb = Block::hydrate_from(cb, vec![]);
assert_eq!(hb.header, b.header);
assert_eq!(hb.outputs, b.outputs);
assert_eq!(hb.kernels, b.kernels);
}
#[test]
fn serialize_deserialize_compact_block() {
let b = CompactBlock {
header: BlockHeader::default(),
nonce: 0,
out_full: vec![],
kern_full: vec![],
kern_ids: vec![ShortId::zero()],
};
let mut vec = Vec::new();
ser::serialize(&mut vec, &b).expect("serialization failed");
let b2: CompactBlock = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(b.header, b2.header);
assert_eq!(b.kern_ids, b2.kern_ids);
}

104
core/tests/common/mod.rs Normal file
View file

@ -0,0 +1,104 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Common test functions
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
use grin_core::core::block::{Block, BlockHeader};
use grin_core::core::target::Difficulty;
use grin_core::core::Transaction;
use keychain::{Identifier, Keychain};
use wallet::libwallet::build::{self, input, output, with_fee};
use wallet::libwallet::reward;
// utility producing a transaction with 2 inputs and a single outputs
pub fn tx2i1o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
build::transaction_with_offset(
vec![
input(10, key_id1),
input(11, key_id2),
output(19, key_id3),
with_fee(2),
],
&keychain,
).unwrap()
}
// utility producing a transaction with a single input and output
pub fn tx1i1o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
build::transaction_with_offset(
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).unwrap()
}
// utility producing a transaction with a single input
// and two outputs (one change output)
// Note: this tx has an "offset" kernel
pub fn tx1i2o() -> Transaction {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
build::transaction_with_offset(
vec![
input(6, key_id1),
output(3, key_id2),
output(1, key_id3),
with_fee(2),
],
&keychain,
).unwrap()
}
// utility to create a block without worrying about the key or previous
// header
pub fn new_block(
txs: Vec<&Transaction>,
keychain: &Keychain,
previous_header: &BlockHeader,
key_id: &Identifier,
) -> Block {
let fees = txs.iter().map(|tx| tx.fee()).sum();
let reward_output = reward::output(keychain, &key_id, fees, previous_header.height).unwrap();
Block::new(&previous_header, txs, Difficulty::one(), reward_output).unwrap()
}
// utility producing a transaction that spends an output with the provided
// value and blinding key
pub fn txspend1i1o(
v: u64,
keychain: &Keychain,
key_id1: Identifier,
key_id2: Identifier,
) -> Transaction {
build::transaction(
vec![input(v, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).unwrap()
}

504
core/tests/core.rs Normal file
View file

@ -0,0 +1,504 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Core tests
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
pub mod common;
use grin_core::core::hash::{Hashed, ZERO_HASH};
use grin_core::core::block::BlockHeader;
use grin_core::core::{aggregate, aggregate_with_cut_through, deaggregate, KernelFeatures, Output,
Transaction};
use wallet::libwallet::build::{self, initial_tx, input, output, with_excess, with_fee,
with_lock_height};
use grin_core::core::block::Error::KernelLockHeight;
use grin_core::ser;
use keychain::Keychain;
use util::{secp_static, static_secp_instance};
use common::{new_block, tx1i1o, tx1i2o, tx2i1o};
#[test]
fn simple_tx_ser() {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
let target_len = 954;
assert_eq!(vec.len(), target_len,);
}
#[test]
fn simple_tx_ser_deser() {
let tx = tx2i1o();
let mut vec = Vec::new();
ser::serialize(&mut vec, &tx).expect("serialization failed");
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(dtx.fee(), 2);
assert_eq!(dtx.inputs.len(), 2);
assert_eq!(dtx.outputs.len(), 1);
assert_eq!(tx.hash(), dtx.hash());
}
#[test]
fn tx_double_ser_deser() {
// checks serializing doesn't mess up the tx and produces consistent results
let btx = tx2i1o();
let mut vec = Vec::new();
assert!(ser::serialize(&mut vec, &btx).is_ok());
let dtx: Transaction = ser::deserialize(&mut &vec[..]).unwrap();
let mut vec2 = Vec::new();
assert!(ser::serialize(&mut vec2, &btx).is_ok());
let dtx2: Transaction = ser::deserialize(&mut &vec2[..]).unwrap();
assert_eq!(btx.hash(), dtx.hash());
assert_eq!(dtx.hash(), dtx2.hash());
}
#[test]
#[should_panic(expected = "InvalidSecretKey")]
fn test_zero_commit_fails() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
// blinding should fail as signing with a zero r*G shouldn't work
build::transaction(
vec![
input(10, key_id1.clone()),
output(9, key_id1.clone()),
with_fee(1),
],
&keychain,
).unwrap();
}
#[test]
fn build_tx_kernel() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
// first build a valid tx with corresponding blinding factor
let tx = build::transaction(
vec![
input(10, key_id1),
output(5, key_id2),
output(3, key_id3),
with_fee(2),
],
&keychain,
).unwrap();
// check the tx is valid
tx.validate().unwrap();
// check the kernel is also itself valid
assert_eq!(tx.kernels.len(), 1);
let kern = &tx.kernels[0];
kern.verify().unwrap();
assert_eq!(kern.features, KernelFeatures::DEFAULT_KERNEL);
assert_eq!(kern.fee, tx.fee());
}
// Combine two transactions into one big transaction (with multiple kernels)
// and check it still validates.
#[test]
fn transaction_cut_through() {
let tx1 = tx1i2o();
let tx2 = tx2i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
// now build a "cut_through" tx from tx1 and tx2
let tx3 = aggregate_with_cut_through(vec![tx1, tx2]).unwrap();
assert!(tx3.validate().is_ok());
}
// Attempt to deaggregate a multi-kernel transaction in a different way
#[test]
fn multi_kernel_transaction_deaggregation() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
let tx1234 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone()]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
let tx34 = aggregate(vec![tx3.clone(), tx4.clone()]).unwrap();
assert!(tx1234.validate().is_ok());
assert!(tx12.validate().is_ok());
assert!(tx34.validate().is_ok());
let deaggregated_tx34 = deaggregate(tx1234.clone(), vec![tx12.clone()]).unwrap();
assert!(deaggregated_tx34.validate().is_ok());
assert_eq!(tx34, deaggregated_tx34);
let deaggregated_tx12 = deaggregate(tx1234.clone(), vec![tx34.clone()]).unwrap();
assert!(deaggregated_tx12.validate().is_ok());
assert_eq!(tx12, deaggregated_tx12);
}
#[test]
fn multi_kernel_transaction_deaggregation_2() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
let tx123 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone()]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
assert!(tx123.validate().is_ok());
assert!(tx12.validate().is_ok());
let deaggregated_tx3 = deaggregate(tx123.clone(), vec![tx12.clone()]).unwrap();
assert!(deaggregated_tx3.validate().is_ok());
assert_eq!(tx3, deaggregated_tx3);
}
#[test]
fn multi_kernel_transaction_deaggregation_3() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
let tx123 = aggregate(vec![tx1.clone(), tx2.clone(), tx3.clone()]).unwrap();
let tx13 = aggregate(vec![tx1.clone(), tx3.clone()]).unwrap();
let tx2 = aggregate(vec![tx2.clone()]).unwrap();
assert!(tx123.validate().is_ok());
assert!(tx2.validate().is_ok());
let deaggregated_tx13 = deaggregate(tx123.clone(), vec![tx2.clone()]).unwrap();
assert!(deaggregated_tx13.validate().is_ok());
assert_eq!(tx13, deaggregated_tx13);
}
#[test]
fn multi_kernel_transaction_deaggregation_4() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
let tx5 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
assert!(tx5.validate().is_ok());
let tx12345 = aggregate(vec![
tx1.clone(),
tx2.clone(),
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
assert!(tx12345.validate().is_ok());
let deaggregated_tx5 = deaggregate(
tx12345.clone(),
vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone()],
).unwrap();
assert!(deaggregated_tx5.validate().is_ok());
assert_eq!(tx5, deaggregated_tx5);
}
#[test]
fn multi_kernel_transaction_deaggregation_5() {
let tx1 = tx1i1o();
let tx2 = tx1i1o();
let tx3 = tx1i1o();
let tx4 = tx1i1o();
let tx5 = tx1i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
assert!(tx3.validate().is_ok());
assert!(tx4.validate().is_ok());
assert!(tx5.validate().is_ok());
let tx12345 = aggregate(vec![
tx1.clone(),
tx2.clone(),
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
let tx34 = aggregate(vec![tx3.clone(), tx4.clone()]).unwrap();
assert!(tx12345.validate().is_ok());
let deaggregated_tx5 = deaggregate(tx12345.clone(), vec![tx12.clone(), tx34.clone()]).unwrap();
assert!(deaggregated_tx5.validate().is_ok());
assert_eq!(tx5, deaggregated_tx5);
}
// Attempt to deaggregate a multi-kernel transaction
#[test]
fn basic_transaction_deaggregation() {
let tx1 = tx1i2o();
let tx2 = tx2i1o();
assert!(tx1.validate().is_ok());
assert!(tx2.validate().is_ok());
// now build a "cut_through" tx from tx1 and tx2
let tx3 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
assert!(tx3.validate().is_ok());
let deaggregated_tx1 = deaggregate(tx3.clone(), vec![tx2.clone()]).unwrap();
assert!(deaggregated_tx1.validate().is_ok());
assert_eq!(tx1, deaggregated_tx1);
let deaggregated_tx2 = deaggregate(tx3.clone(), vec![tx1.clone()]).unwrap();
assert!(deaggregated_tx2.validate().is_ok());
assert_eq!(tx2, deaggregated_tx2);
}
#[test]
fn hash_output() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let tx = build::transaction(
vec![
input(75, key_id1),
output(42, key_id2),
output(32, key_id3),
with_fee(1),
],
&keychain,
).unwrap();
let h = tx.outputs[0].hash();
assert!(h != ZERO_HASH);
let h2 = tx.outputs[1].hash();
assert!(h != h2);
}
#[ignore]
#[test]
fn blind_tx() {
let btx = tx2i1o();
assert!(btx.validate().is_ok());
// Ignored for bullet proofs, because calling range_proof_info
// with a bullet proof causes painful errors
// checks that the range proof on our blind output is sufficiently hiding
let Output { proof, .. } = btx.outputs[0];
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let info = secp.range_proof_info(proof);
assert!(info.min == 0);
assert!(info.max == u64::max_value());
}
#[test]
fn tx_hash_diff() {
let btx1 = tx2i1o();
let btx2 = tx1i1o();
if btx1.hash() == btx2.hash() {
panic!("diff txs have same hash")
}
}
/// Simulate the standard exchange between 2 parties when creating a basic
/// 2 inputs, 2 outputs transaction.
#[test]
fn tx_build_exchange() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let key_id4 = keychain.derive_key_id(4).unwrap();
let (tx_alice, blind_sum) = {
// Alice gets 2 of her pre-existing outputs to send 5 coins to Bob, they
// become inputs in the new transaction
let (in1, in2) = (input(4, key_id1), input(3, key_id2));
// Alice builds her transaction, with change, which also produces the sum
// of blinding factors before they're obscured.
let (tx, sum) =
build::partial_transaction(vec![in1, in2, output(1, key_id3), with_fee(2)], &keychain)
.unwrap();
(tx, sum)
};
// From now on, Bob only has the obscured transaction and the sum of
// blinding factors. He adds his output, finalizes the transaction so it's
// ready for broadcast.
let tx_final = build::transaction(
vec![
initial_tx(tx_alice),
with_excess(blind_sum),
output(4, key_id4),
],
&keychain,
).unwrap();
tx_final.validate().unwrap();
}
#[test]
fn reward_empty_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let previous_header = BlockHeader::default();
let b = new_block(vec![], &keychain, &previous_header, &key_id);
b.cut_through()
.validate(&zero_commit, &zero_commit)
.unwrap();
}
#[test]
fn reward_with_tx_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut tx1 = tx2i1o();
tx1.validate().unwrap();
let previous_header = BlockHeader::default();
let block = new_block(vec![&mut tx1], &keychain, &previous_header, &key_id);
block
.cut_through()
.validate(&zero_commit, &zero_commit)
.unwrap();
}
#[test]
fn simple_block() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
let mut tx1 = tx2i1o();
let mut tx2 = tx1i1o();
let previous_header = BlockHeader::default();
let b = new_block(
vec![&mut tx1, &mut tx2],
&keychain,
&previous_header,
&key_id,
);
b.validate(&zero_commit, &zero_commit).unwrap();
}
#[test]
fn test_block_with_timelocked_tx() {
let keychain = keychain::Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let zero_commit = secp_static::commit_to_zero_value();
// first check we can add a timelocked tx where lock height matches current
// block height and that the resulting block is valid
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(1),
],
&keychain,
).unwrap();
let previous_header = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &previous_header, &key_id3.clone());
b.validate(&zero_commit, &zero_commit).unwrap();
// now try adding a timelocked tx where lock height is greater than current
// block height
let tx1 = build::transaction(
vec![
input(5, key_id1.clone()),
output(3, key_id2.clone()),
with_fee(2),
with_lock_height(2),
],
&keychain,
).unwrap();
let previous_header = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &previous_header, &key_id3.clone());
match b.validate(&zero_commit, &zero_commit) {
Err(KernelLockHeight(height)) => {
assert_eq!(height, 2);
}
_ => panic!("expecting KernelLockHeight error here"),
}
}
#[test]
pub fn test_verify_1i1o_sig() {
let tx = tx1i1o();
tx.validate().unwrap();
}
#[test]
pub fn test_verify_2i1o_sig() {
let tx = tx2i1o();
tx.validate().unwrap();
}

50
core/tests/transaction.rs Normal file
View file

@ -0,0 +1,50 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Transaction integration tests
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
pub mod common;
use grin_core::core::{Output, OutputFeatures};
use grin_core::ser;
use keychain::Keychain;
use util::secp;
use wallet::libwallet::proof;
#[test]
fn test_output_ser_deser() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let commit = keychain.commit(5, &key_id).unwrap();
let msg = secp::pedersen::ProofMessage::empty();
let proof = proof::create(&keychain, 5, &key_id, commit, None, msg).unwrap();
let out = Output {
features: OutputFeatures::DEFAULT_OUTPUT,
commit: commit,
proof: proof,
};
let mut vec = vec![];
ser::serialize(&mut vec, &out).expect("serialized failed");
let dout: Output = ser::deserialize(&mut &vec[..]).unwrap();
assert_eq!(dout.features, OutputFeatures::DEFAULT_OUTPUT);
assert_eq!(dout.commit, out.commit);
assert_eq!(dout.proof, out.proof);
}

View file

@ -19,13 +19,10 @@ use std::{error, fmt};
use util::secp;
use util::secp::{Message, Secp256k1, Signature};
use util::secp::key::{PublicKey, SecretKey};
use util::secp::pedersen::{Commitment, ProofInfo, ProofMessage, RangeProof};
use util::secp::aggsig;
use util::secp::key::SecretKey;
use util::secp::pedersen::Commitment;
use util::logger::LOGGER;
use util::kernel_sig_msg;
use blake2;
use uuid::Uuid;
use blind::{BlindSum, BlindingFactor};
use extkey::{self, Identifier};
@ -66,23 +63,10 @@ impl fmt::Display for Error {
}
}
/// Holds internal information about an aggsig operation
#[derive(Clone, Debug)]
pub struct AggSigTxContext {
// Secret key (of which public is shared)
pub sec_key: SecretKey,
// Secret nonce (of which public is shared)
// (basically a SecretKey)
pub sec_nonce: SecretKey,
// If I'm the recipient, store my outputs between invocations (that I need to sum)
pub output_ids: Vec<Identifier>,
}
#[derive(Clone, Debug)]
pub struct Keychain {
secp: Secp256k1,
extkey: extkey::ExtendedKey,
pub aggsig_contexts: Arc<RwLock<Option<HashMap<Uuid, AggSigTxContext>>>>,
key_overrides: HashMap<Identifier, SecretKey>,
key_derivation_cache: Arc<RwLock<HashMap<Identifier, u32>>>,
}
@ -111,7 +95,6 @@ impl Keychain {
let keychain = Keychain {
secp: secp,
extkey: extkey,
aggsig_contexts: Arc::new(RwLock::new(None)),
key_overrides: HashMap::new(),
key_derivation_cache: Arc::new(RwLock::new(HashMap::new())),
};
@ -130,7 +113,7 @@ impl Keychain {
Ok(child_key.key_id)
}
fn derived_key(&self, key_id: &Identifier) -> Result<SecretKey, Error> {
pub fn derived_key(&self, key_id: &Identifier) -> Result<SecretKey, Error> {
// first check our overrides and just return the key if we have one in there
if let Some(key) = self.key_overrides.get(key_id) {
trace!(
@ -214,93 +197,6 @@ impl Keychain {
Ok(commit)
}
pub fn rangeproof_create_nonce(&self, commit: &Commitment) -> SecretKey {
// hash(commit|masterkey) as nonce
let root_key = self.root_key_id().to_bytes();
let res = blake2::blake2b::blake2b(32, &commit.0, &root_key);
let res = res.as_bytes();
let mut ret_val = [0; 32];
for i in 0..res.len() {
ret_val[i] = res[i];
}
SecretKey::from_slice(&self.secp, &ret_val).unwrap()
}
pub fn range_proof(
&self,
amount: u64,
key_id: &Identifier,
_commit: Commitment,
extra_data: Option<Vec<u8>>,
msg: ProofMessage,
) -> Result<RangeProof, Error> {
let commit = self.commit(amount, key_id)?;
let skey = self.derived_key(key_id)?;
let nonce = self.rangeproof_create_nonce(&commit);
if msg.len() == 0 {
return Ok(self.secp
.bullet_proof(amount, skey, nonce, extra_data, None));
} else {
if msg.len() != 64 {
error!(LOGGER, "Bullet proof message must be 64 bytes.");
return Err(Error::RangeProof(
"Bullet proof message must be 64 bytes".to_string(),
));
}
}
return Ok(self.secp
.bullet_proof(amount, skey, nonce, extra_data, Some(msg)));
}
pub fn verify_range_proof(
secp: &Secp256k1,
commit: Commitment,
proof: RangeProof,
extra_data: Option<Vec<u8>>,
) -> Result<(), secp::Error> {
let result = secp.verify_bullet_proof(commit, proof, extra_data);
match result {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
pub fn rewind_range_proof(
&self,
key_id: &Identifier,
commit: Commitment,
extra_data: Option<Vec<u8>>,
proof: RangeProof,
) -> Result<ProofInfo, Error> {
let skey = self.derived_key(key_id)?;
let nonce = self.rangeproof_create_nonce(&commit);
let proof_message = self.secp
.unwind_bullet_proof(commit, skey, nonce, extra_data, proof);
let proof_info = match proof_message {
Ok(p) => ProofInfo {
success: true,
value: 0,
message: p,
mlen: 0,
min: 0,
max: 0,
exp: 0,
mantissa: 0,
},
Err(_) => ProofInfo {
success: false,
value: 0,
message: ProofMessage::empty(),
mlen: 0,
min: 0,
max: 0,
exp: 0,
mantissa: 0,
},
};
return Ok(proof_info);
}
pub fn blind_sum(&self, blind_sum: &BlindSum) -> Result<BlindingFactor, Error> {
let mut pos_keys: Vec<SecretKey> = blind_sum
.positive_key_ids
@ -330,236 +226,6 @@ impl Keychain {
Ok(BlindingFactor::from_secret_key(sum))
}
pub fn aggsig_create_context(
&self,
transaction_id: &Uuid,
sec_key: SecretKey,
) -> Result<(), Error> {
let mut contexts = self.aggsig_contexts.write().unwrap();
if contexts.is_none() {
*contexts = Some(HashMap::new())
}
if contexts.as_mut().unwrap().contains_key(transaction_id) {
return Err(Error::Transaction(String::from(
"Duplication transaction id",
)));
}
contexts.as_mut().unwrap().insert(
transaction_id.clone(),
AggSigTxContext {
sec_key: sec_key,
sec_nonce: aggsig::export_secnonce_single(&self.secp).unwrap(),
output_ids: vec![],
},
);
Ok(())
}
/// Tracks an output contributing to my excess value (if it needs to
/// be kept between invocations
pub fn aggsig_add_output(&self, transaction_id: &Uuid, output_id: &Identifier) {
let mut agg_contexts = self.aggsig_contexts.write().unwrap();
let mut agg_contexts_local = agg_contexts.as_mut().unwrap().clone();
let mut agg_context = agg_contexts_local.get(transaction_id).unwrap().clone();
agg_context.output_ids.push(output_id.clone());
agg_contexts_local.insert(transaction_id.clone(), agg_context);
*agg_contexts = Some(agg_contexts_local);
}
/// Returns all stored outputs
pub fn aggsig_get_outputs(&self, transaction_id: &Uuid) -> Vec<Identifier> {
let contexts = self.aggsig_contexts.clone();
let contexts_read = contexts.read().unwrap();
let agg_context = contexts_read.as_ref().unwrap();
let agg_context_return = agg_context.get(transaction_id);
agg_context_return.unwrap().output_ids.clone()
}
/// Returns private key, private nonce
pub fn aggsig_get_private_keys(&self, transaction_id: &Uuid) -> (SecretKey, SecretKey) {
let contexts = self.aggsig_contexts.clone();
let contexts_read = contexts.read().unwrap();
let agg_context = contexts_read.as_ref().unwrap();
let agg_context_return = agg_context.get(transaction_id);
(
agg_context_return.unwrap().sec_key.clone(),
agg_context_return.unwrap().sec_nonce.clone(),
)
}
/// Returns public key, public nonce
pub fn aggsig_get_public_keys(&self, transaction_id: &Uuid) -> (PublicKey, PublicKey) {
let contexts = self.aggsig_contexts.clone();
let contexts_read = contexts.read().unwrap();
let agg_context = contexts_read.as_ref().unwrap();
let agg_context_return = agg_context.get(transaction_id);
(
PublicKey::from_secret_key(&self.secp, &agg_context_return.unwrap().sec_key).unwrap(),
PublicKey::from_secret_key(&self.secp, &agg_context_return.unwrap().sec_nonce).unwrap(),
)
}
/// Note 'secnonce' here is used to perform the signature, while 'pubnonce' just allows you to
/// provide a custom public nonce to include while calculating e
/// nonce_sum is the sum used to decide whether secnonce should be inverted during sig time
pub fn aggsig_sign_single(
&self,
transaction_id: &Uuid,
msg: &Message,
secnonce: Option<&SecretKey>,
pubnonce: Option<&PublicKey>,
nonce_sum: Option<&PublicKey>,
) -> Result<Signature, Error> {
let contexts = self.aggsig_contexts.clone();
let contexts_read = contexts.read().unwrap();
let agg_context = contexts_read.as_ref().unwrap();
let agg_context_return = agg_context.get(transaction_id);
let sig = aggsig::sign_single(
&self.secp,
msg,
&agg_context_return.unwrap().sec_key,
secnonce,
pubnonce,
nonce_sum,
)?;
Ok(sig)
}
//Verifies an aggsig signature
pub fn aggsig_verify_single(
&self,
sig: &Signature,
msg: &Message,
pubnonce: Option<&PublicKey>,
pubkey: &PublicKey,
is_partial: bool,
) -> bool {
aggsig::verify_single(&self.secp, sig, msg, pubnonce, pubkey, is_partial)
}
//Verifies other final sig corresponds with what we're expecting
pub fn aggsig_verify_final_sig_build_msg(
&self,
sig: &Signature,
pubkey: &PublicKey,
fee: u64,
lock_height: u64,
) -> bool {
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
self.aggsig_verify_single(sig, &msg, None, pubkey, true)
}
//Verifies other party's sig corresponds with what we're expecting
pub fn aggsig_verify_partial_sig(
&self,
transaction_id: &Uuid,
sig: &Signature,
other_pub_nonce: &PublicKey,
pubkey: &PublicKey,
fee: u64,
lock_height: u64,
) -> bool {
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
let mut nonce_sum = other_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
self.aggsig_verify_single(sig, &msg, Some(&nonce_sum), pubkey, true)
}
pub fn aggsig_calculate_partial_sig(
&self,
transaction_id: &Uuid,
other_pub_nonce: &PublicKey,
fee: u64,
lock_height: u64,
) -> Result<Signature, Error> {
// Add public nonces kR*G + kS*G
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
let mut nonce_sum = other_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height))?;
//Now calculate signature using message M=fee, nonce in e=nonce_sum
self.aggsig_sign_single(
transaction_id,
&msg,
Some(&sec_nonce),
Some(&nonce_sum),
Some(&nonce_sum),
)
}
/// Helper function to calculate final signature
pub fn aggsig_calculate_final_sig(
&self,
transaction_id: &Uuid,
their_sig: &Signature,
our_sig: &Signature,
their_pub_nonce: &PublicKey,
) -> Result<Signature, Error> {
// Add public nonces kR*G + kS*G
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
let mut nonce_sum = their_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
let sig = aggsig::add_signatures_single(&self.secp, their_sig, our_sig, &nonce_sum)?;
Ok(sig)
}
/// Helper function to calculate final public key
pub fn aggsig_calculate_final_pubkey(
&self,
transaction_id: &Uuid,
their_public_key: &PublicKey,
) -> Result<PublicKey, Error> {
let (our_sec_key, _) = self.aggsig_get_private_keys(transaction_id);
let mut pk_sum = their_public_key.clone();
let _ = pk_sum.add_exp_assign(&self.secp, &our_sec_key);
Ok(pk_sum)
}
/// Just a simple sig, creates its own nonce, etc
pub fn aggsig_sign_from_key_id(
&self,
msg: &Message,
key_id: &Identifier,
) -> Result<Signature, Error> {
let skey = self.derived_key(key_id)?;
let sig = aggsig::sign_single(&self.secp, &msg, &skey, None, None, None)?;
Ok(sig)
}
/// Verifies a sig given a commitment
pub fn aggsig_verify_single_from_commit(
secp: &Secp256k1,
sig: &Signature,
msg: &Message,
commit: &Commitment,
) -> bool {
// Extract the pubkey, unfortunately we need this hack for now, (we just hope
// one is valid) TODO: Create better secp256k1 API to do this
let pubkeys = commit.to_two_pubkeys(secp);
let mut valid = false;
for i in 0..pubkeys.len() {
valid = aggsig::verify_single(secp, &sig, &msg, None, &pubkeys[i], false);
if valid {
break;
}
}
valid
}
/// Just a simple sig, creates its own nonce, etc
pub fn aggsig_sign_with_blinding(
secp: &Secp256k1,
msg: &Message,
blinding: &BlindingFactor,
) -> Result<Signature, Error> {
let skey = &blinding.secret_key(&secp)?;
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
Ok(sig)
}
pub fn sign(&self, msg: &Message, key_id: &Identifier) -> Result<Signature, Error> {
let skey = self.derived_key(key_id)?;
let sig = self.secp.sign(msg, &skey)?;
@ -583,14 +249,8 @@ impl Keychain {
#[cfg(test)]
mod test {
use rand::thread_rng;
use uuid::Uuid;
use keychain::{BlindSum, BlindingFactor, Keychain};
use util::kernel_sig_msg;
use util::secp;
use util::secp::pedersen::ProofMessage;
use util::secp::key::SecretKey;
#[test]
@ -667,411 +327,4 @@ mod test {
BlindingFactor::from_secret_key(skey3),
);
}
#[test]
fn aggsig_sender_receiver_interaction() {
let sender_keychain = Keychain::from_random_seed().unwrap();
let receiver_keychain = Keychain::from_random_seed().unwrap();
// tx identifier for wallet interaction
let tx_id = Uuid::new_v4();
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let keychain = Keychain::from_random_seed().unwrap();
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2)))
.unwrap();
keychain
.secp
.commit(0, blinding_factor.secret_key(&keychain.secp).unwrap())
.unwrap()
};
// sender starts the tx interaction
let (sender_pub_excess, sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey)))
.unwrap();
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
keychain.aggsig_create_context(&tx_id, blind).unwrap();
keychain.aggsig_get_public_keys(&tx_id)
};
// receiver receives partial tx
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
// let blind = blind_sum.secret_key(&keychain.secp())?;
let blind = keychain.derived_key(&key_id).unwrap();
keychain.aggsig_create_context(&tx_id, blind).unwrap();
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
keychain.aggsig_add_output(&tx_id, &key_id);
let sig_part = keychain
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
.unwrap();
(pub_excess, pub_nonce, sig_part)
};
// check the sender can verify the partial signature
// received in the response back from the receiver
{
let keychain = sender_keychain.clone();
let sig_verifies = keychain.aggsig_verify_partial_sig(
&tx_id,
&sig_part,
&receiver_pub_nonce,
&receiver_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// now sender signs with their key
let sender_sig_part = {
let keychain = sender_keychain.clone();
keychain
.aggsig_calculate_partial_sig(&tx_id, &receiver_pub_nonce, 0, 0)
.unwrap()
};
// check the receiver can verify the partial signature
// received by the sender
{
let keychain = receiver_keychain.clone();
let sig_verifies = keychain.aggsig_verify_partial_sig(
&tx_id,
&sender_sig_part,
&sender_pub_nonce,
&sender_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// Receiver now builds final signature from sender and receiver parts
let (final_sig, final_pubkey) = {
let keychain = receiver_keychain.clone();
// Receiver recreates their partial sig (we do not maintain state from earlier)
let our_sig_part = keychain
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
.unwrap();
// Receiver now generates final signature from the two parts
let final_sig = keychain
.aggsig_calculate_final_sig(
&tx_id,
&sender_sig_part,
&our_sig_part,
&sender_pub_nonce,
)
.unwrap();
// Receiver calculates the final public key (to verify sig later)
let final_pubkey = keychain
.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess)
.unwrap();
(final_sig, final_pubkey)
};
// Receiver checks the final signature verifies
{
let keychain = receiver_keychain.clone();
// Receiver check the final signature verifies
let sig_verifies =
keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, 0, 0);
assert!(sig_verifies);
}
// Check we can verify the sig using the kernel excess
{
let keychain = Keychain::from_random_seed().unwrap();
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
&keychain.secp,
&final_sig,
&msg,
&kernel_excess,
);
assert!(sig_verifies);
}
}
#[test]
fn aggsig_sender_receiver_interaction_offset() {
let sender_keychain = Keychain::from_random_seed().unwrap();
let receiver_keychain = Keychain::from_random_seed().unwrap();
// tx identifier for wallet interaction
let tx_id = Uuid::new_v4();
// This is the kernel offset that we use to split the key
// Summing these at the block level prevents the
// kernels from being used to reconstruct (or identify) individual transactions
let kernel_offset = SecretKey::new(&sender_keychain.secp(), &mut thread_rng());
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let keychain = Keychain::from_random_seed().unwrap();
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
// subtract the kernel offset here like as would when
// verifying a kernel signature
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
.unwrap();
keychain
.secp
.commit(0, blinding_factor.secret_key(&keychain.secp).unwrap())
.unwrap()
};
// sender starts the tx interaction
let (sender_pub_excess, sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey))
// subtract the kernel offset to create an aggsig context
// with our "split" key
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
.unwrap();
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
keychain.aggsig_create_context(&tx_id, blind).unwrap();
keychain.aggsig_get_public_keys(&tx_id)
};
// receiver receives partial tx
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
let blind = keychain.derived_key(&key_id).unwrap();
keychain.aggsig_create_context(&tx_id, blind).unwrap();
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
keychain.aggsig_add_output(&tx_id, &key_id);
let sig_part = keychain
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
.unwrap();
(pub_excess, pub_nonce, sig_part)
};
// check the sender can verify the partial signature
// received in the response back from the receiver
{
let keychain = sender_keychain.clone();
let sig_verifies = keychain.aggsig_verify_partial_sig(
&tx_id,
&sig_part,
&receiver_pub_nonce,
&receiver_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// now sender signs with their key
let sender_sig_part = {
let keychain = sender_keychain.clone();
keychain
.aggsig_calculate_partial_sig(&tx_id, &receiver_pub_nonce, 0, 0)
.unwrap()
};
// check the receiver can verify the partial signature
// received by the sender
{
let keychain = receiver_keychain.clone();
let sig_verifies = keychain.aggsig_verify_partial_sig(
&tx_id,
&sender_sig_part,
&sender_pub_nonce,
&sender_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// Receiver now builds final signature from sender and receiver parts
let (final_sig, final_pubkey) = {
let keychain = receiver_keychain.clone();
// Receiver recreates their partial sig (we do not maintain state from earlier)
let our_sig_part = keychain
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
.unwrap();
// Receiver now generates final signature from the two parts
let final_sig = keychain
.aggsig_calculate_final_sig(
&tx_id,
&sender_sig_part,
&our_sig_part,
&sender_pub_nonce,
)
.unwrap();
// Receiver calculates the final public key (to verify sig later)
let final_pubkey = keychain
.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess)
.unwrap();
(final_sig, final_pubkey)
};
// Receiver checks the final signature verifies
{
let keychain = receiver_keychain.clone();
// Receiver check the final signature verifies
let sig_verifies =
keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, 0, 0);
assert!(sig_verifies);
}
// Check we can verify the sig using the kernel excess
{
let keychain = Keychain::from_random_seed().unwrap();
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
&keychain.secp,
&final_sig,
&msg,
&kernel_excess,
);
assert!(sig_verifies);
}
}
#[test]
fn test_rewind_range_proof() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let commit = keychain.commit(5, &key_id).unwrap();
let msg = ProofMessage::from_bytes(&[0u8; 64]);
let extra_data = [99u8; 64];
let proof = keychain
.range_proof(5, &key_id, commit, Some(extra_data.to_vec().clone()), msg)
.unwrap();
let proof_info = keychain
.rewind_range_proof(&key_id, commit, Some(extra_data.to_vec().clone()), proof)
.unwrap();
assert_eq!(proof_info.success, true);
// now check the recovered message is "empty" (but not truncated) i.e. all
// zeroes
//Value is in the message in this case
assert_eq!(
proof_info.message,
secp::pedersen::ProofMessage::from_bytes(&[0; secp::constants::BULLET_PROOF_MSG_SIZE])
);
let key_id2 = keychain.derive_key_id(2).unwrap();
// cannot rewind with a different nonce
let proof_info = keychain
.rewind_range_proof(&key_id2, commit, Some(extra_data.to_vec().clone()), proof)
.unwrap();
// With bullet proofs, if you provide the wrong nonce you'll get gibberish back
// as opposed to a failure to recover the message
assert_ne!(
proof_info.message,
secp::pedersen::ProofMessage::from_bytes(&[0; secp::constants::BULLET_PROOF_MSG_SIZE])
);
assert_eq!(proof_info.value, 0);
// cannot rewind with a commitment to the same value using a different key
let commit2 = keychain.commit(5, &key_id2).unwrap();
let proof_info = keychain
.rewind_range_proof(&key_id, commit2, Some(extra_data.to_vec().clone()), proof)
.unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
// cannot rewind with a commitment to a different value
let commit3 = keychain.commit(4, &key_id).unwrap();
let proof_info = keychain
.rewind_range_proof(&key_id, commit3, Some(extra_data.to_vec().clone()), proof)
.unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
// cannot rewind with wrong extra committed data
let commit3 = keychain.commit(4, &key_id).unwrap();
let wrong_extra_data = [98u8; 64];
let _should_err = keychain
.rewind_range_proof(
&key_id,
commit3,
Some(wrong_extra_data.to_vec().clone()),
proof,
)
.unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
}
}

View file

@ -26,10 +26,10 @@ extern crate serde_json;
extern crate slog;
extern crate uuid;
mod blind;
mod extkey;
pub mod blind;
pub mod extkey;
pub use blind::{BlindSum, BlindingFactor};
pub use extkey::{ExtendedKey, Identifier, IDENTIFIER_SIZE};
pub mod keychain;
pub use keychain::{AggSigTxContext, Error, Keychain};
pub use keychain::{Error, Keychain};

View file

@ -16,3 +16,6 @@ time = "0.1"
grin_core = { path = "../core" }
grin_keychain = { path = "../keychain" }
grin_util = { path = "../util" }
[dev-dependencies]
grin_wallet = { path = "../wallet" }

View file

@ -1,11 +1,25 @@
// This file is (hopefully) temporary.
// Copyright 2018 The Grin Developers
//
// It contains a trait based on (but not exactly equal to) the trait defined
// for the blockchain Output set, discussed at
// https://github.com/ignopeverell/grin/issues/29, and a dummy implementation
// of said trait.
// Notably, OutputDiff has been left off, and the question of how to handle
// abstract return types has been deferred.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! This file is (hopefully) temporary.
//!
//! It contains a trait based on (but not exactly equal to) the trait defined
//! for the blockchain Output set, discussed at
//! https://github.com/ignopeverell/grin/issues/29, and a dummy implementation
//! of said trait.
//! Notably, OutputDiff has been left off, and the question of how to handle
//! abstract return types has been deferred.
use std::collections::HashMap;
use std::clone::Clone;
@ -25,16 +39,19 @@ pub struct DummyOutputSet {
#[allow(dead_code)]
impl DummyOutputSet {
/// Empty output set
pub fn empty() -> DummyOutputSet {
DummyOutputSet {
outputs: HashMap::new(),
}
}
/// roots
pub fn root(&self) -> hash::Hash {
hash::ZERO_HASH
}
/// apply a block
pub fn apply(&self, b: &block::Block) -> DummyOutputSet {
let mut new_outputs = self.outputs.clone();
@ -49,6 +66,7 @@ impl DummyOutputSet {
}
}
/// create with block
pub fn with_block(&mut self, b: &block::Block) {
for input in &b.inputs {
self.outputs.remove(&input.commitment());
@ -58,12 +76,14 @@ impl DummyOutputSet {
}
}
/// rewind
pub fn rewind(&self, _: &block::Block) -> DummyOutputSet {
DummyOutputSet {
outputs: HashMap::new(),
}
}
/// get an output
pub fn get_output(&self, output_ref: &Commitment) -> Option<&transaction::Output> {
self.outputs.get(output_ref)
}
@ -74,7 +94,7 @@ impl DummyOutputSet {
}
}
// only for testing: add an output to the map
/// only for testing: add an output to the map
pub fn with_output(&self, output: transaction::Output) -> DummyOutputSet {
let mut new_outputs = self.outputs.clone();
new_outputs.insert(output.commitment(), output);
@ -94,6 +114,7 @@ pub struct DummyChainImpl {
#[allow(dead_code)]
impl DummyChainImpl {
/// new dummy chain
pub fn new() -> DummyChainImpl {
DummyChainImpl {
output: RwLock::new(DummyOutputSet {
@ -152,8 +173,12 @@ impl DummyChain for DummyChainImpl {
}
}
/// Dummy chain trait
pub trait DummyChain: BlockChain {
/// update output set
fn update_output_set(&mut self, new_output: DummyOutputSet);
/// apply a block
fn apply_block(&self, b: &block::Block);
/// store header
fn store_head_header(&self, block_header: &block::BlockHeader);
}

View file

@ -139,13 +139,14 @@ impl fmt::Debug for Edge {
/// The generic graph container. Both graphs, the pool and orphans, embed this
/// structure and add additional capability on top of it.
pub struct DirectedGraph {
edges: HashMap<Commitment, Edge>,
vertices: Vec<PoolEntry>,
// A small optimization: keeping roots (vertices with in-degree 0) in a
// separate list makes topological sort a bit faster. (This is true for
// Kahn's, not sure about other implementations)
roots: Vec<PoolEntry>,
/// Edges
pub edges: HashMap<Commitment, Edge>,
/// Vertices
pub vertices: Vec<PoolEntry>,
/// A small optimization: keeping roots (vertices with in-degree 0) in a
/// separate list makes topological sort a bit faster. (This is true for
/// Kahn's, not sure about other implementations)
pub roots: Vec<PoolEntry>,
}
impl DirectedGraph {
@ -290,76 +291,3 @@ pub fn transaction_identifier(tx: &core::transaction::Transaction) -> core::hash
// core::transaction::merkle_inputs_outputs(&tx.inputs, &tx.outputs)
tx.hash()
}
#[cfg(test)]
mod tests {
use super::*;
use keychain::Keychain;
use rand;
use core::core::OutputFeatures;
use core::core::transaction::ProofMessageElements;
#[test]
fn test_add_entry() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let output_commit = keychain.commit(70, &key_id1).unwrap();
let inputs = vec![
core::transaction::Input::new(
OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(50, &key_id2).unwrap(),
None,
None,
),
core::transaction::Input::new(
OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(25, &key_id3).unwrap(),
None,
None,
),
];
let msg = ProofMessageElements::new(100, &key_id1);
let output = core::transaction::Output {
features: OutputFeatures::DEFAULT_OUTPUT,
commit: output_commit,
proof: keychain
.range_proof(100, &key_id1, output_commit, None, msg.to_proof_message())
.unwrap(),
};
let kernel = core::transaction::TxKernel::empty()
.with_fee(5)
.with_lock_height(0);
let test_transaction =
core::transaction::Transaction::new(inputs, vec![output], vec![kernel]);
let test_pool_entry = PoolEntry::new(&test_transaction);
let incoming_edge_1 = Edge::new(
Some(random_hash()),
Some(core::hash::ZERO_HASH),
OutputIdentifier::from_output(&output),
);
let mut test_graph = DirectedGraph::empty();
test_graph.add_entry(test_pool_entry, vec![incoming_edge_1]);
assert_eq!(test_graph.vertices.len(), 1);
assert_eq!(test_graph.roots.len(), 0);
assert_eq!(test_graph.edges.len(), 1);
}
/// For testing/debugging: a random tx hash
fn random_hash() -> core::hash::Hash {
let hash_bytes: [u8; 32] = rand::random();
core::hash::Hash(hash_bytes)
}
}

View file

@ -22,9 +22,9 @@
#![warn(missing_docs)]
pub mod graph;
mod types;
mod blockchain;
mod pool;
pub mod types;
pub mod blockchain;
pub mod pool;
extern crate blake2_rfc as blake2;
extern crate grin_core as core;

File diff suppressed because it is too large Load diff

View file

@ -92,11 +92,25 @@ pub struct TxSource {
/// This enum describes the parent for a given input of a transaction.
#[derive(Clone)]
pub enum Parent {
/// Unknown
Unknown,
/// Block Transaction
BlockTransaction,
PoolTransaction { tx_ref: hash::Hash },
StemPoolTransaction { tx_ref: hash::Hash },
AlreadySpent { other_tx: hash::Hash },
/// Pool Transaction
PoolTransaction {
/// Transaction reference
tx_ref: hash::Hash,
},
/// StemPool Transaction
StemPoolTransaction {
/// Transaction reference
tx_ref: hash::Hash,
},
/// AlreadySpent
AlreadySpent {
/// Other transaction reference
other_tx: hash::Hash,
},
}
impl fmt::Debug for Parent {
@ -244,6 +258,7 @@ pub struct Pool {
}
impl Pool {
/// Return an empty pool
pub fn empty() -> Pool {
Pool {
graph: graph::DirectedGraph::empty(),
@ -263,18 +278,22 @@ impl Pool {
.map(|x| x.destination_hash().unwrap())
}
/// Length of roots
pub fn len_roots(&self) -> usize {
self.graph.len_roots()
}
/// Length of vertices
pub fn len_vertices(&self) -> usize {
self.graph.len_vertices()
}
/// Consumed outputs
pub fn get_blockchain_spent(&self, c: &Commitment) -> Option<&graph::Edge> {
self.consumed_blockchain_outputs.get(c)
}
/// Add transaction
pub fn add_pool_transaction(
&mut self,
pool_entry: graph::PoolEntry,
@ -309,9 +328,9 @@ impl Pool {
}
}
// More relax way for stempool transaction in order to accept scenario such as:
// Parent is in mempool, child is allowed in stempool
//
/// More relax way for stempool transaction in order to accept scenario such as:
/// Parent is in mempool, child is allowed in stempool
///
pub fn add_stempool_transaction(
&mut self,
pool_entry: graph::PoolEntry,
@ -342,10 +361,12 @@ impl Pool {
}
}
/// Update roots
pub fn update_roots(&mut self) {
self.graph.update_roots()
}
/// Remove transaction
pub fn remove_pool_transaction(
&mut self,
tx: &transaction::Transaction,
@ -429,6 +450,7 @@ pub struct Orphans {
}
impl Orphans {
/// empty set
pub fn empty() -> Orphans {
Orphans {
graph: graph::DirectedGraph::empty(),
@ -450,6 +472,7 @@ impl Orphans {
.map(|x| x.destination_hash().unwrap())
}
/// unknown output
pub fn get_unknown_output(&self, output: &Commitment) -> Option<&graph::Edge> {
self.missing_outputs.get(output)
}
@ -571,14 +594,17 @@ pub trait TransactionGraphContainer {
self.get_internal_spent_output(c)
}
/// number of root transactions
fn num_root_transactions(&self) -> usize {
self.get_graph().len_roots()
}
/// number of transactions
fn num_transactions(&self) -> usize {
self.get_graph().len_vertices()
}
/// number of output edges
fn num_output_edges(&self) -> usize {
self.get_graph().len_edges()
}

96
pool/tests/graph.rs Normal file
View file

@ -0,0 +1,96 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Top-level Graph tests
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_pool as pool;
extern crate grin_wallet as wallet;
extern crate rand;
use keychain::Keychain;
use core::core::OutputFeatures;
use core::core::transaction::ProofMessageElements;
use wallet::libwallet::proof;
#[test]
fn test_add_entry() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id1 = keychain.derive_key_id(1).unwrap();
let key_id2 = keychain.derive_key_id(2).unwrap();
let key_id3 = keychain.derive_key_id(3).unwrap();
let output_commit = keychain.commit(70, &key_id1).unwrap();
let inputs = vec![
core::core::transaction::Input::new(
OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(50, &key_id2).unwrap(),
None,
None,
),
core::core::transaction::Input::new(
OutputFeatures::DEFAULT_OUTPUT,
keychain.commit(25, &key_id3).unwrap(),
None,
None,
),
];
let msg = ProofMessageElements::new(100, &key_id1);
let output = core::core::transaction::Output {
features: OutputFeatures::DEFAULT_OUTPUT,
commit: output_commit,
proof: proof::create(
&keychain,
100,
&key_id1,
output_commit,
None,
msg.to_proof_message(),
).unwrap(),
};
let kernel = core::core::transaction::TxKernel::empty()
.with_fee(5)
.with_lock_height(0);
let test_transaction =
core::core::transaction::Transaction::new(inputs, vec![output], vec![kernel]);
let test_pool_entry = pool::graph::PoolEntry::new(&test_transaction);
let incoming_edge_1 = pool::graph::Edge::new(
Some(random_hash()),
Some(core::core::hash::ZERO_HASH),
core::core::OutputIdentifier::from_output(&output),
);
let mut test_graph = pool::graph::DirectedGraph::empty();
test_graph.add_entry(test_pool_entry, vec![incoming_edge_1]);
assert_eq!(test_graph.vertices.len(), 1);
assert_eq!(test_graph.roots.len(), 0);
assert_eq!(test_graph.edges.len(), 1);
}
/// For testing/debugging: a random tx hash
fn random_hash() -> core::core::hash::Hash {
let hash_bytes: [u8; 32] = rand::random();
core::core::hash::Hash(hash_bytes)
}

1150
pool/tests/pool.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -225,7 +225,8 @@ fn burn_reward(block_fees: BlockFees) -> Result<(core::Output, core::TxKernel, B
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let (out, kernel) =
core::Block::reward_output(&keychain, &key_id, block_fees.fees, block_fees.height).unwrap();
wallet::libwallet::reward::output(&keychain, &key_id, block_fees.fees, block_fees.height)
.unwrap();
Ok((out, kernel, block_fees))
}

View file

@ -222,7 +222,7 @@ impl LocalServerContainer {
"starting test Miner on port {}",
self.config.p2p_server_port
);
s.start_test_miner(wallet_url);
s.start_test_miner(Some(self.config.coinbase_wallet_address.clone()));
}
for p in &mut self.peer_list {
@ -262,7 +262,11 @@ impl LocalServerContainer {
self.wallet_config.data_file_dir = self.working_dir.clone();
let _ = fs::create_dir_all(self.wallet_config.clone().data_file_dir);
wallet::WalletSeed::init_file(&self.wallet_config).unwrap();
let r = wallet::WalletSeed::init_file(&self.wallet_config);
if let Err(e) = r {
//panic!("Error initting wallet seed: {}", e);
}
let wallet_seed = wallet::WalletSeed::from_file(&self.wallet_config)
.expect("Failed to read wallet seed file.");

View file

@ -35,7 +35,7 @@ use util::LOGGER;
/// Start 1 node mining and two wallets, then send a few
/// transactions from one to the other
// #[test]
//#[test]
fn basic_wallet_transactions() {
let test_name_dir = "test_servers";
core::global::set_mining_mode(core::global::ChainTypes::AutomatedTesting);
@ -50,6 +50,7 @@ fn basic_wallet_transactions() {
let mut coinbase_config = LocalServerContainerConfig::default();
coinbase_config.name = String::from("coinbase_wallet");
coinbase_config.wallet_validating_node_url = String::from("http://127.0.0.1:30001");
coinbase_config.coinbase_wallet_address = String::from("http://127.0.0.1:13415");
coinbase_config.wallet_port = 10002;
let coinbase_wallet = Arc::new(Mutex::new(
LocalServerContainer::new(coinbase_config).unwrap(),

View file

@ -30,8 +30,6 @@ extern crate memmap;
extern crate rocksdb;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate slog;
pub mod pmmr;

View file

@ -14,6 +14,7 @@ failure_derive = "0.1"
futures = "0.1"
hyper = "0.11"
iron = "0.5"
lazy_static = "0.2"
prettytable-rs = "0.6"
rand = "0.3"
router = "0.5"

View file

@ -41,6 +41,9 @@ extern crate router;
extern crate tokio_core;
extern crate tokio_retry;
#[macro_use]
extern crate lazy_static;
extern crate grin_api as api;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
@ -56,6 +59,7 @@ mod types;
mod restore;
pub mod client;
pub mod server;
pub mod libwallet;
pub use outputs::show_outputs;
pub use info::{retrieve_info, show_info};

View file

@ -0,0 +1,269 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Aggsig library definitions
use std::collections::HashMap;
use util::secp::key::{PublicKey, SecretKey};
use util::secp::{self, aggsig, Message, Secp256k1, Signature};
use util::secp::pedersen::Commitment;
use util::kernel_sig_msg;
use uuid::Uuid;
use keychain::Keychain;
use keychain::extkey::Identifier;
use keychain::blind::BlindingFactor;
use libwallet::error::Error;
#[derive(Clone, Debug)]
/// Holds the context for a single aggsig transaction
pub struct Context {
/// Transaction ID
pub transaction_id: Uuid,
/// Secret key (of which public is shared)
pub sec_key: SecretKey,
/// Secret nonce (of which public is shared)
/// (basically a SecretKey)
pub sec_nonce: SecretKey,
/// If I'm the recipient, store my outputs between invocations (that I need
/// to sum)
pub output_ids: Vec<Identifier>,
}
#[derive(Clone, Debug)]
/// Holds many contexts, to support multiple transactions hitting a wallet receiver
/// at once
pub struct ContextManager {
contexts: HashMap<Uuid, Context>,
}
impl ContextManager {
/// Create
pub fn new() -> ContextManager {
ContextManager {
contexts: HashMap::new(),
}
}
/// Creates a context for a transaction id if required
/// otherwise does nothing
pub fn create_context(
&mut self,
secp: &secp::Secp256k1,
transaction_id: &Uuid,
sec_key: SecretKey,
) -> Context {
if !self.contexts.contains_key(transaction_id) {
self.contexts.insert(
transaction_id.clone(),
Context {
sec_key: sec_key,
transaction_id: transaction_id.clone(),
sec_nonce: aggsig::export_secnonce_single(secp).unwrap(),
output_ids: vec![],
},
);
}
self.get_context(transaction_id)
}
/// Retrieve a context by transaction id
pub fn get_context(&self, transaction_id: &Uuid) -> Context {
self.contexts.get(&transaction_id).unwrap().clone()
}
/// Save context
pub fn save_context(&mut self, c: Context) {
self.contexts.insert(c.transaction_id.clone(), c);
}
}
impl Context {
/// Tracks an output contributing to my excess value (if it needs to
/// be kept between invocations
pub fn add_output(&mut self, output_id: &Identifier) {
self.output_ids.push(output_id.clone());
}
/// Returns all stored outputs
pub fn get_outputs(&self) -> Vec<Identifier> {
self.output_ids.clone()
}
/// Returns private key, private nonce
pub fn get_private_keys(&self) -> (SecretKey, SecretKey) {
(self.sec_key.clone(), self.sec_nonce.clone())
}
/// Returns public key, public nonce
pub fn get_public_keys(&self, secp: &Secp256k1) -> (PublicKey, PublicKey) {
(
PublicKey::from_secret_key(secp, &self.sec_key).unwrap(),
PublicKey::from_secret_key(secp, &self.sec_nonce).unwrap(),
)
}
/// Note 'secnonce' here is used to perform the signature, while 'pubnonce' just allows you to
/// provide a custom public nonce to include while calculating e
/// nonce_sum is the sum used to decide whether secnonce should be inverted during sig time
pub fn sign_single(
&self,
secp: &Secp256k1,
msg: &Message,
secnonce: Option<&SecretKey>,
pubnonce: Option<&PublicKey>,
nonce_sum: Option<&PublicKey>,
) -> Result<Signature, Error> {
let sig = aggsig::sign_single(secp, msg, &self.sec_key, secnonce, pubnonce, nonce_sum)?;
Ok(sig)
}
//Verifies other final sig corresponds with what we're expecting
pub fn verify_final_sig_build_msg(
&self,
secp: &Secp256k1,
sig: &Signature,
pubkey: &PublicKey,
fee: u64,
lock_height: u64,
) -> bool {
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
verify_single(secp, sig, &msg, None, pubkey, true)
}
//Verifies other party's sig corresponds with what we're expecting
pub fn verify_partial_sig(
&self,
secp: &Secp256k1,
sig: &Signature,
other_pub_nonce: &PublicKey,
pubkey: &PublicKey,
fee: u64,
lock_height: u64,
) -> bool {
let (_, sec_nonce) = self.get_private_keys();
let mut nonce_sum = other_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(secp, &sec_nonce);
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
verify_single(secp, sig, &msg, Some(&nonce_sum), pubkey, true)
}
pub fn calculate_partial_sig(
&self,
secp: &Secp256k1,
other_pub_nonce: &PublicKey,
fee: u64,
lock_height: u64,
) -> Result<Signature, Error> {
// Add public nonces kR*G + kS*G
let (_, sec_nonce) = self.get_private_keys();
let mut nonce_sum = other_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(secp, &sec_nonce);
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height))?;
//Now calculate signature using message M=fee, nonce in e=nonce_sum
self.sign_single(
secp,
&msg,
Some(&sec_nonce),
Some(&nonce_sum),
Some(&nonce_sum),
)
}
/// Helper function to calculate final signature
pub fn calculate_final_sig(
&self,
secp: &Secp256k1,
their_sig: &Signature,
our_sig: &Signature,
their_pub_nonce: &PublicKey,
) -> Result<Signature, Error> {
// Add public nonces kR*G + kS*G
let (_, sec_nonce) = self.get_private_keys();
let mut nonce_sum = their_pub_nonce.clone();
let _ = nonce_sum.add_exp_assign(secp, &sec_nonce);
let sig = aggsig::add_signatures_single(&secp, their_sig, our_sig, &nonce_sum)?;
Ok(sig)
}
/// Helper function to calculate final public key
pub fn calculate_final_pubkey(
&self,
secp: &Secp256k1,
their_public_key: &PublicKey,
) -> Result<PublicKey, Error> {
let (our_sec_key, _) = self.get_private_keys();
let mut pk_sum = their_public_key.clone();
let _ = pk_sum.add_exp_assign(secp, &our_sec_key);
Ok(pk_sum)
}
}
// Contextless functions
/// Just a simple sig, creates its own nonce, etc
pub fn sign_from_key_id(
secp: &Secp256k1,
k: &Keychain,
msg: &Message,
key_id: &Identifier,
) -> Result<Signature, Error> {
let skey = k.derived_key(key_id)?;
let sig = aggsig::sign_single(secp, &msg, &skey, None, None, None)?;
Ok(sig)
}
/// Verifies a sig given a commitment
pub fn verify_single_from_commit(
secp: &Secp256k1,
sig: &Signature,
msg: &Message,
commit: &Commitment,
) -> bool {
// Extract the pubkey, unfortunately we need this hack for now, (we just hope
// one is valid) TODO: Create better secp256k1 API to do this
let pubkeys = commit.to_two_pubkeys(secp);
let mut valid = false;
for i in 0..pubkeys.len() {
valid = aggsig::verify_single(secp, &sig, &msg, None, &pubkeys[i], false);
if valid {
break;
}
}
valid
}
//Verifies an aggsig signature
pub fn verify_single(
secp: &Secp256k1,
sig: &Signature,
msg: &Message,
pubnonce: Option<&PublicKey>,
pubkey: &PublicKey,
is_partial: bool,
) -> bool {
aggsig::verify_single(secp, sig, msg, pubnonce, pubkey, is_partial)
}
/// Just a simple sig, creates its own nonce, etc
pub fn sign_with_blinding(
secp: &Secp256k1,
msg: &Message,
blinding: &BlindingFactor,
) -> Result<Signature, Error> {
let skey = &blinding.secret_key(&secp)?;
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
Ok(sig)
}

View file

@ -0,0 +1,15 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Stub while figuring out wallet redesign

View file

@ -27,9 +27,10 @@
use util::{kernel_sig_msg, secp};
use core::{Input, Output, OutputFeatures, ProofMessageElements, Transaction, TxKernel};
use core::hash::Hash;
use core::pmmr::MerkleProof;
use core::core::{Input, Output, OutputFeatures, ProofMessageElements, Transaction, TxKernel};
use core::core::hash::Hash;
use core::core::pmmr::MerkleProof;
use libwallet::{aggsig, proof};
use keychain;
use keychain::{BlindSum, BlindingFactor, Identifier, Keychain};
use util::LOGGER;
@ -105,10 +106,14 @@ pub fn output(value: u64, key_id: Identifier) -> Box<Append> {
let msg = ProofMessageElements::new(value, &key_id);
let rproof = build
.keychain
.range_proof(value, &key_id, commit, None, msg.to_proof_message())
.unwrap();
let rproof = proof::create(
build.keychain,
value,
&key_id,
commit,
None,
msg.to_proof_message(),
).unwrap();
(
tx.with_output(Output {
@ -214,7 +219,7 @@ pub fn transaction(
let skey = blind_sum.secret_key(&keychain.secp())?;
kern.excess = keychain.secp().commit(0, skey)?;
kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &blind_sum)?;
kern.excess_sig = aggsig::sign_with_blinding(&keychain.secp(), &msg, &blind_sum).unwrap();
tx.kernels.push(kern);
@ -243,7 +248,7 @@ pub fn transaction_with_offset(
// generate kernel excess and excess_sig using the split key k1
let skey = k1.secret_key(&keychain.secp())?;
kern.excess = ctx.keychain.secp().commit(0, skey)?;
kern.excess_sig = Keychain::aggsig_sign_with_blinding(&keychain.secp(), &msg, &k1)?;
kern.excess_sig = aggsig::sign_with_blinding(&keychain.secp(), &msg, &k1).unwrap();
// store the kernel offset (k2) on the tx itself
// commitments will sum correctly when including the offset

View file

@ -0,0 +1,60 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Wallet lib errors
use util::secp;
use keychain::{self, extkey};
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum Error {
Secp(secp::Error),
Keychain(keychain::Error),
ExtendedKey(extkey::Error),
Transaction(String),
RangeProof(String),
}
impl From<secp::Error> for Error {
fn from(e: secp::Error) -> Error {
Error::Secp(e)
}
}
impl From<extkey::Error> for Error {
fn from(e: extkey::Error) -> Error {
Error::ExtendedKey(e)
}
}
impl From<keychain::Error> for Error {
fn from(e: keychain::Error) -> Error {
Error::Keychain(e)
}
}
/*impl error::Error for Error {
fn description(&self) -> &str {
match *self {
_ => "some kind of wallet lib error",
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
_ => write!(f, "some kind of wallet lib error"),
}
}
}*/

View file

@ -0,0 +1,29 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Wallet lib... should be used by clients to build wallets and
//! encapsulate all functions needed to build transactions and operate a wallet
#![deny(non_upper_case_globals)]
#![deny(non_camel_case_types)]
#![deny(non_snake_case)]
#![deny(unused_mut)]
#![warn(missing_docs)]
pub mod error;
pub mod aggsig;
pub mod blind;
pub mod proof;
pub mod reward;
pub mod build;

View file

@ -0,0 +1,113 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Rangeproof library functions
use keychain::Keychain;
use util::secp::pedersen::{Commitment, ProofInfo, ProofMessage, RangeProof};
use util::secp::key::SecretKey;
use util::secp::{self, Secp256k1};
use util::logger::LOGGER;
use keychain::extkey::Identifier;
use libwallet::error::Error;
use blake2;
pub fn create_nonce(k: &Keychain, commit: &Commitment) -> SecretKey {
// hash(commit|masterkey) as nonce
let root_key = k.root_key_id().to_bytes();
let res = blake2::blake2b::blake2b(32, &commit.0, &root_key);
let res = res.as_bytes();
let mut ret_val = [0; 32];
for i in 0..res.len() {
ret_val[i] = res[i];
}
SecretKey::from_slice(k.secp(), &ret_val).unwrap()
}
/// So we want this to take an opaque structure that can be called
/// back to get the sensitive data
pub fn create(
k: &Keychain,
amount: u64,
key_id: &Identifier,
_commit: Commitment,
extra_data: Option<Vec<u8>>,
msg: ProofMessage,
) -> Result<RangeProof, Error> {
let commit = k.commit(amount, key_id)?;
let skey = k.derived_key(key_id)?;
let nonce = create_nonce(k, &commit);
if msg.len() == 0 {
return Ok(k.secp().bullet_proof(amount, skey, nonce, extra_data, None));
} else {
if msg.len() != 64 {
error!(LOGGER, "Bullet proof message must be 64 bytes.");
return Err(Error::RangeProof(
"Bullet proof message must be 64 bytes".to_string(),
));
}
}
return Ok(k.secp()
.bullet_proof(amount, skey, nonce, extra_data, Some(msg)));
}
pub fn verify(
secp: &Secp256k1,
commit: Commitment,
proof: RangeProof,
extra_data: Option<Vec<u8>>,
) -> Result<(), secp::Error> {
let result = secp.verify_bullet_proof(commit, proof, extra_data);
match result {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
pub fn rewind(
k: &Keychain,
key_id: &Identifier,
commit: Commitment,
extra_data: Option<Vec<u8>>,
proof: RangeProof,
) -> Result<ProofInfo, Error> {
let skey = k.derived_key(key_id)?;
let nonce = create_nonce(k, &commit);
let proof_message = k.secp()
.unwind_bullet_proof(commit, skey, nonce, extra_data, proof);
let proof_info = match proof_message {
Ok(p) => ProofInfo {
success: true,
value: 0,
message: p,
mlen: 0,
min: 0,
max: 0,
exp: 0,
mantissa: 0,
},
Err(_) => ProofInfo {
success: false,
value: 0,
message: ProofMessage::empty(),
mlen: 0,
min: 0,
max: 0,
exp: 0,
mantissa: 0,
},
};
return Ok(proof_info);
}

View file

@ -0,0 +1,80 @@
// Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Builds the blinded output and related signature proof for the block
/// reward.
use keychain;
use core::core::{Output, OutputFeatures, ProofMessageElements, TxKernel};
use core::consensus::reward;
use libwallet::{aggsig, proof};
use libwallet::error::Error;
use core::core::KernelFeatures;
use util::{kernel_sig_msg, secp, static_secp_instance, LOGGER};
/// output a reward output
pub fn output(
keychain: &keychain::Keychain,
key_id: &keychain::Identifier,
fees: u64,
height: u64,
) -> Result<(Output, TxKernel), Error> {
let value = reward(fees);
let commit = keychain.commit(value, key_id)?;
let msg = ProofMessageElements::new(value, key_id);
trace!(LOGGER, "Block reward - Pedersen Commit is: {:?}", commit,);
let rproof = proof::create(
keychain,
value,
key_id,
commit,
None,
msg.to_proof_message(),
)?;
let output = Output {
features: OutputFeatures::COINBASE_OUTPUT,
commit: commit,
proof: rproof,
};
let secp = static_secp_instance();
let secp = secp.lock().unwrap();
let over_commit = secp.commit_value(reward(fees))?;
let out_commit = output.commitment();
let excess = secp.commit_sum(vec![out_commit], vec![over_commit])?;
// NOTE: Remember we sign the fee *and* the lock_height.
// For a coinbase output the fee is 0 and the lock_height is
// the lock_height of the coinbase output itself,
// not the lock_height of the tx (there is no tx for a coinbase output).
// This output will not be spendable earlier than lock_height (and we sign this
// here).
let msg = secp::Message::from_slice(&kernel_sig_msg(0, height))?;
let sig = aggsig::sign_from_key_id(&secp, keychain, &msg, &key_id)?;
let proof = TxKernel {
features: KernelFeatures::COINBASE_KERNEL,
excess: excess,
excess_sig: sig,
fee: 0,
// lock_height here is the height of the block (tx should be valid immediately)
// *not* the lock_height of the coinbase output (only spendable 1,000 blocks later)
lock_height: height,
};
Ok((output, proof))
}

View file

@ -21,11 +21,12 @@ use iron::Handler;
use iron::prelude::*;
use iron::status;
use serde_json;
use uuid::Uuid;
use std::sync::{Arc, RwLock};
use api;
use core::consensus::reward;
use core::core::{amount_to_hr_string, build, Block, Committed, Output, Transaction, TxKernel};
use core::core::{amount_to_hr_string, Committed, Output, Transaction, TxKernel};
use libwallet::{aggsig, build, reward};
use core::{global, ser};
use failure::{Fail, ResultExt};
use keychain::{BlindingFactor, Identifier, Keychain};
@ -39,6 +40,12 @@ pub struct TxWrapper {
pub tx_hex: String,
}
lazy_static! {
/// Static reference to aggsig context (temporary while wallet is being refactored)
pub static ref AGGSIG_CONTEXT_MANAGER:Arc<RwLock<aggsig::ContextManager>>
= Arc::new(RwLock::new(aggsig::ContextManager::new()));
}
/// Receive Part 1 of interactive transactions from sender, Sender Initiation
/// Return result of part 2, Recipient Initation, to sender
/// -Receiver receives inputs, outputs xS * G and kS * G
@ -52,6 +59,7 @@ pub struct TxWrapper {
fn handle_sender_initiation(
config: &WalletConfig,
context_manager: &mut aggsig::ContextManager,
keychain: &Keychain,
partial_tx: &PartialTx,
) -> Result<PartialTx, Error> {
@ -125,14 +133,13 @@ fn handle_sender_initiation(
let blind = blind_sum
.secret_key(&keychain.secp())
.context(ErrorKind::Keychain)?;
keychain
.aggsig_create_context(&partial_tx.id, blind)
.context(ErrorKind::Keychain)?;
keychain.aggsig_add_output(&partial_tx.id, &key_id);
let mut context = context_manager.create_context(keychain.secp(), &partial_tx.id, blind);
let sig_part = keychain
.aggsig_calculate_partial_sig(
&partial_tx.id,
context.add_output(&key_id);
let sig_part = context
.calculate_partial_sig(
keychain.secp(),
&sender_pub_nonce,
tx.fee(),
tx.lock_height(),
@ -142,7 +149,7 @@ fn handle_sender_initiation(
// Build the response, which should contain sR, blinding excess xR * G, public
// nonce kR * G
let mut partial_tx = build_partial_tx(
&partial_tx.id,
&context,
keychain,
amount,
kernel_offset,
@ -151,6 +158,8 @@ fn handle_sender_initiation(
);
partial_tx.phase = PartialTxPhase::ReceiverInitiation;
context_manager.save_context(context);
Ok(partial_tx)
}
@ -169,15 +178,17 @@ fn handle_sender_initiation(
fn handle_sender_confirmation(
config: &WalletConfig,
context_manager: &mut aggsig::ContextManager,
keychain: &Keychain,
partial_tx: &PartialTx,
fluff: bool,
) -> Result<PartialTx, Error> {
let (amount, sender_pub_blinding, sender_pub_nonce, kernel_offset, sender_sig_part, tx) =
read_partial_tx(keychain, partial_tx)?;
let mut context = context_manager.get_context(&partial_tx.id);
let sender_sig_part = sender_sig_part.unwrap();
let res = keychain.aggsig_verify_partial_sig(
&partial_tx.id,
let res = context.verify_partial_sig(
&keychain.secp(),
&sender_sig_part,
&sender_pub_nonce,
&sender_pub_blinding,
@ -191,9 +202,9 @@ fn handle_sender_confirmation(
}
// Just calculate our sig part again instead of storing
let our_sig_part = keychain
.aggsig_calculate_partial_sig(
&partial_tx.id,
let our_sig_part = context
.calculate_partial_sig(
&keychain.secp(),
&sender_pub_nonce,
tx.fee(),
tx.lock_height(),
@ -201,9 +212,9 @@ fn handle_sender_confirmation(
.unwrap();
// And the final signature
let final_sig = keychain
.aggsig_calculate_final_sig(
&partial_tx.id,
let final_sig = context
.calculate_final_sig(
&keychain.secp(),
&sender_sig_part,
&our_sig_part,
&sender_pub_nonce,
@ -211,12 +222,13 @@ fn handle_sender_confirmation(
.unwrap();
// Calculate the final public key (for our own sanity check)
let final_pubkey = keychain
.aggsig_calculate_final_pubkey(&partial_tx.id, &sender_pub_blinding)
let final_pubkey = context
.calculate_final_pubkey(&keychain.secp(), &sender_pub_blinding)
.unwrap();
// Check our final sig verifies
let res = keychain.aggsig_verify_final_sig_build_msg(
let res = context.verify_final_sig_build_msg(
&keychain.secp(),
&final_sig,
&final_pubkey,
tx.fee(),
@ -229,7 +241,7 @@ fn handle_sender_confirmation(
}
let final_tx = build_final_transaction(
&partial_tx.id,
&mut context,
config,
keychain,
amount,
@ -254,13 +266,15 @@ fn handle_sender_confirmation(
// Return what we've actually posted
// TODO - why build_partial_tx here? Just a naming issue?
let mut partial_tx = build_partial_tx(
&partial_tx.id,
&context,
keychain,
amount,
kernel_offset,
Some(final_sig),
tx,
);
context_manager.save_context(context);
partial_tx.phase = PartialTxPhase::ReceiverConfirmation;
Ok(partial_tx)
}
@ -285,10 +299,12 @@ impl Handler for WalletReceiver {
}
if let Ok(Some(partial_tx)) = struct_body {
let mut acm = AGGSIG_CONTEXT_MANAGER.write().unwrap();
match partial_tx.phase {
PartialTxPhase::SenderInitiation => {
let resp_tx = handle_sender_initiation(
&self.config,
&mut acm,
&self.keychain,
&partial_tx,
).map_err(|e| {
@ -304,6 +320,7 @@ impl Handler for WalletReceiver {
PartialTxPhase::SenderConfirmation => {
let resp_tx = handle_sender_confirmation(
&self.config,
&mut acm,
&self.keychain,
&partial_tx,
fluff,
@ -393,14 +410,15 @@ pub fn receive_coinbase(
debug!(LOGGER, "receive_coinbase: {:?}", block_fees);
let (out, kern) = Block::reward_output(&keychain, &key_id, block_fees.fees, block_fees.height)
.context(ErrorKind::Keychain)?;
let (out, kern) =
reward::output(&keychain, &key_id, block_fees.fees, block_fees.height).unwrap();
/* .context(ErrorKind::Keychain)?; */
Ok((out, kern, block_fees))
}
/// builds a final transaction after the aggregated sig exchange
fn build_final_transaction(
tx_id: &Uuid,
context: &mut aggsig::Context,
config: &WalletConfig,
keychain: &Keychain,
amount: u64,
@ -443,7 +461,7 @@ fn build_final_transaction(
// Get output we created in earlier step
// TODO: will just be one for now, support multiple later
let output_vec = keychain.aggsig_get_outputs(tx_id);
let output_vec = context.get_outputs();
// operate within a lock on wallet data
let (key_id, derivation) = WalletData::with_wallet(&config.data_file_dir, |wallet_data| {

View file

@ -22,6 +22,7 @@ use core::core::transaction::ProofMessageElements;
use types::{Error, ErrorKind, MerkleProofWrapper, OutputData, OutputStatus, WalletConfig,
WalletData};
use byteorder::{BigEndian, ByteOrder};
use libwallet::proof;
pub fn get_chain_height(config: &WalletConfig) -> Result<u64, Error> {
let url = format!("{}/v1/chain", config.check_node_api_http_addr);
@ -142,9 +143,13 @@ fn find_outputs_with_key(
// message 3 times, indicating a strong match. Also, sec_key provided
// to unwind in this case will be meaningless. With only the nonce known
// only the first 32 bytes of the recovered message will be accurate
let info = keychain
.rewind_range_proof(&skey, output.commit, None, output.range_proof().unwrap())
.unwrap();
let info = proof::rewind(
keychain,
&skey,
output.commit,
None,
output.range_proof().unwrap(),
).unwrap();
let message = ProofMessageElements::from_proof_message(info.message).unwrap();
let value = message.value();
if value.is_err() {
@ -176,9 +181,13 @@ fn find_outputs_with_key(
}
found = true;
// we have a partial match, let's just confirm
let info = keychain
.rewind_range_proof(key_id, output.commit, None, output.range_proof().unwrap())
.unwrap();
let info = proof::rewind(
keychain,
key_id,
output.commit,
None,
output.range_proof().unwrap(),
).unwrap();
let message = ProofMessageElements::from_proof_message(info.message).unwrap();
let value = message.value();
if value.is_err() || !message.zeroes_correct() {

View file

@ -18,7 +18,8 @@ use uuid::Uuid;
use api;
use client;
use checker;
use core::core::{amount_to_hr_string, build, Transaction};
use core::core::{amount_to_hr_string, Transaction};
use libwallet::{aggsig, build};
use core::ser;
use keychain::{BlindSum, BlindingFactor, Identifier, Keychain};
use receiver::TxWrapper;
@ -79,24 +80,16 @@ pub fn issue_send_tx(
// computes total blinding excess xS -Sender picks random nonce kS
// -Sender posts inputs, outputs, Message M=fee, xS * G and kS * G to Receiver
//
// Create a new aggsig context
let tx_id = Uuid::new_v4();
let skey = blind_offset
.secret_key(&keychain.secp())
.context(ErrorKind::Keychain)?;
keychain
.aggsig_create_context(&tx_id, skey)
.context(ErrorKind::Keychain)?;
// let kernel_key = kernel_blind
// .secret_key(keychain.secp())
// .context(ErrorKind::Keychain)?;
// let kernel_offset = keychain
// .secp()
// .commit(0, kernel_key)
// .context(ErrorKind::Keychain)?;
// Create a new aggsig context
let mut context_manager = aggsig::ContextManager::new();
let context = context_manager.create_context(keychain.secp(), &tx_id, skey);
let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, None, tx);
let partial_tx = build_partial_tx(&context, keychain, amount_with_fee, kernel_offset, None, tx);
// Closure to acquire wallet lock and lock the coins being spent
// so we avoid accidental double spend attempt.
@ -178,8 +171,8 @@ pub fn issue_send_tx(
*/
let (_amount, recp_pub_blinding, recp_pub_nonce, kernel_offset, sig, tx) =
read_partial_tx(keychain, &res.unwrap())?;
let res = keychain.aggsig_verify_partial_sig(
&tx_id,
let res = context.verify_partial_sig(
&keychain.secp(),
&sig.unwrap(),
&recp_pub_nonce,
&recp_pub_blinding,
@ -191,15 +184,20 @@ pub fn issue_send_tx(
return Err(ErrorKind::Signature("Partial Sig from recipient invalid."))?;
}
let sig_part = keychain
.aggsig_calculate_partial_sig(&tx_id, &recp_pub_nonce, tx.fee(), tx.lock_height())
let sig_part = context
.calculate_partial_sig(
&keychain.secp(),
&recp_pub_nonce,
tx.fee(),
tx.lock_height(),
)
.unwrap();
// Build the next stage, containing sS (and our pubkeys again, for the
// recipient's convenience) offset has not been modified during tx building,
// so pass it back in
let mut partial_tx = build_partial_tx(
&tx_id,
&context,
keychain,
amount_with_fee,
kernel_offset,
@ -225,6 +223,9 @@ pub fn issue_send_tx(
return Err(e);
}
// Not really necessary here
context_manager.save_context(context);
// All good so
update_wallet()?;
Ok(())
@ -457,7 +458,7 @@ fn inputs_and_change(
#[cfg(test)]
mod test {
use core::core::build;
use libwallet::build;
use keychain::Keychain;
#[test]

View file

@ -24,6 +24,7 @@ use std::path::Path;
use std::path::MAIN_SEPARATOR;
use std::collections::HashMap;
use std::cmp::min;
use libwallet::aggsig;
use serde;
use serde_json;
@ -777,14 +778,14 @@ pub struct PartialTx {
/// aggsig_tx_context should contain the private key/nonce pair
/// the resulting partial tx will contain the corresponding public keys
pub fn build_partial_tx(
transaction_id: &Uuid,
context: &aggsig::Context,
keychain: &keychain::Keychain,
receive_amount: u64,
kernel_offset: BlindingFactor,
part_sig: Option<secp::Signature>,
tx: Transaction,
) -> PartialTx {
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(transaction_id);
let (pub_excess, pub_nonce) = context.get_public_keys(keychain.secp());
let mut pub_excess = pub_excess.serialize_vec(keychain.secp(), true).clone();
let len = pub_excess.clone().len();
let pub_excess: Vec<_> = pub_excess.drain(0..len).collect();
@ -795,7 +796,7 @@ pub fn build_partial_tx(
PartialTx {
phase: PartialTxPhase::SenderInitiation,
id: transaction_id.clone(),
id: context.transaction_id,
amount: receive_amount,
public_blind_excess: util::to_hex(pub_excess),
public_nonce: util::to_hex(pub_nonce),

453
wallet/tests/libwallet.rs Normal file
View file

@ -0,0 +1,453 @@
// Copyright 2018 The Grin Developers
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! libwallet specific tests
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
extern crate rand;
extern crate uuid;
use uuid::Uuid;
use util::{kernel_sig_msg, secp};
use util::secp::key::SecretKey;
use util::secp::pedersen::ProofMessage;
use keychain::{BlindSum, BlindingFactor, Keychain};
use wallet::libwallet::{aggsig, proof};
use rand::thread_rng;
#[test]
fn aggsig_sender_receiver_interaction() {
let sender_keychain = Keychain::from_random_seed().unwrap();
let receiver_keychain = Keychain::from_random_seed().unwrap();
let mut sender_aggsig_cm = aggsig::ContextManager::new();
let mut receiver_aggsig_cm = aggsig::ContextManager::new();
// tx identifier for wallet interaction
let tx_id = Uuid::new_v4();
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let keychain = Keychain::from_random_seed().unwrap();
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2)))
.unwrap();
keychain
.secp()
.commit(0, blinding_factor.secret_key(&keychain.secp()).unwrap())
.unwrap()
};
// sender starts the tx interaction
let (sender_pub_excess, sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
let bs = BlindSum::new();
let blinding_factor = keychain
.blind_sum(&bs.sub_blinding_factor(BlindingFactor::from_secret_key(skey)))
.unwrap();
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
let cx = sender_aggsig_cm.create_context(&keychain.secp(), &tx_id, blind);
cx.get_public_keys(&keychain.secp())
};
// receiver receives partial tx
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
// let blind = blind_sum.secret_key(&keychain.secp())?;
let blind = keychain.derived_key(&key_id).unwrap();
let mut cx = receiver_aggsig_cm.create_context(&keychain.secp(), &tx_id, blind);
let (pub_excess, pub_nonce) = cx.get_public_keys(&keychain.secp());
cx.add_output(&key_id);
let sig_part = cx.calculate_partial_sig(&keychain.secp(), &sender_pub_nonce, 0, 0)
.unwrap();
receiver_aggsig_cm.save_context(cx);
(pub_excess, pub_nonce, sig_part)
};
// check the sender can verify the partial signature
// received in the response back from the receiver
{
let keychain = sender_keychain.clone();
let cx = sender_aggsig_cm.get_context(&tx_id);
let sig_verifies = cx.verify_partial_sig(
&keychain.secp(),
&sig_part,
&receiver_pub_nonce,
&receiver_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// now sender signs with their key
let sender_sig_part = {
let keychain = sender_keychain.clone();
let cx = sender_aggsig_cm.get_context(&tx_id);
cx.calculate_partial_sig(&keychain.secp(), &receiver_pub_nonce, 0, 0)
.unwrap()
};
// check the receiver can verify the partial signature
// received by the sender
{
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
let sig_verifies = cx.verify_partial_sig(
&keychain.secp(),
&sender_sig_part,
&sender_pub_nonce,
&sender_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// Receiver now builds final signature from sender and receiver parts
let (final_sig, final_pubkey) = {
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
// Receiver recreates their partial sig (we do not maintain state from earlier)
let our_sig_part = cx.calculate_partial_sig(&keychain.secp(), &sender_pub_nonce, 0, 0)
.unwrap();
// Receiver now generates final signature from the two parts
let final_sig = cx.calculate_final_sig(
&keychain.secp(),
&sender_sig_part,
&our_sig_part,
&sender_pub_nonce,
).unwrap();
// Receiver calculates the final public key (to verify sig later)
let final_pubkey = cx.calculate_final_pubkey(&keychain.secp(), &sender_pub_excess)
.unwrap();
(final_sig, final_pubkey)
};
// Receiver checks the final signature verifies
{
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
// Receiver check the final signature verifies
let sig_verifies =
cx.verify_final_sig_build_msg(&keychain.secp(), &final_sig, &final_pubkey, 0, 0);
assert!(sig_verifies);
}
// Check we can verify the sig using the kernel excess
{
let keychain = Keychain::from_random_seed().unwrap();
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
let sig_verifies =
aggsig::verify_single_from_commit(&keychain.secp(), &final_sig, &msg, &kernel_excess);
assert!(sig_verifies);
}
}
#[test]
fn aggsig_sender_receiver_interaction_offset() {
let sender_keychain = Keychain::from_random_seed().unwrap();
let receiver_keychain = Keychain::from_random_seed().unwrap();
let mut sender_aggsig_cm = aggsig::ContextManager::new();
let mut receiver_aggsig_cm = aggsig::ContextManager::new();
// tx identifier for wallet interaction
let tx_id = Uuid::new_v4();
// This is the kernel offset that we use to split the key
// Summing these at the block level prevents the
// kernels from being used to reconstruct (or identify) individual transactions
let kernel_offset = SecretKey::new(&sender_keychain.secp(), &mut thread_rng());
// Calculate the kernel excess here for convenience.
// Normally this would happen during transaction building.
let kernel_excess = {
let skey1 = sender_keychain
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
.unwrap();
let skey2 = receiver_keychain
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
.unwrap();
let keychain = Keychain::from_random_seed().unwrap();
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
// subtract the kernel offset here like as would when
// verifying a kernel signature
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
.unwrap();
keychain
.secp()
.commit(0, blinding_factor.secret_key(&keychain.secp()).unwrap())
.unwrap()
};
// sender starts the tx interaction
let (sender_pub_excess, sender_pub_nonce) = {
let keychain = sender_keychain.clone();
let skey = keychain
.derived_key(&keychain.derive_key_id(1).unwrap())
.unwrap();
// dealing with an input here so we need to negate the blinding_factor
// rather than use it as is
let blinding_factor = keychain
.blind_sum(&BlindSum::new()
.sub_blinding_factor(BlindingFactor::from_secret_key(skey))
// subtract the kernel offset to create an aggsig context
// with our "split" key
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
.unwrap();
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
let cx = sender_aggsig_cm.create_context(&keychain.secp(), &tx_id, blind);
cx.get_public_keys(&keychain.secp())
};
// receiver receives partial tx
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
let keychain = receiver_keychain.clone();
let key_id = keychain.derive_key_id(1).unwrap();
let blind = keychain.derived_key(&key_id).unwrap();
let mut cx = receiver_aggsig_cm.create_context(&keychain.secp(), &tx_id, blind);
let (pub_excess, pub_nonce) = cx.get_public_keys(&keychain.secp());
cx.add_output(&key_id);
let sig_part = cx.calculate_partial_sig(&keychain.secp(), &sender_pub_nonce, 0, 0)
.unwrap();
receiver_aggsig_cm.save_context(cx);
(pub_excess, pub_nonce, sig_part)
};
// check the sender can verify the partial signature
// received in the response back from the receiver
{
let keychain = sender_keychain.clone();
let cx = sender_aggsig_cm.get_context(&tx_id);
let sig_verifies = cx.verify_partial_sig(
&keychain.secp(),
&sig_part,
&receiver_pub_nonce,
&receiver_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// now sender signs with their key
let sender_sig_part = {
let keychain = sender_keychain.clone();
let cx = sender_aggsig_cm.get_context(&tx_id);
cx.calculate_partial_sig(&keychain.secp(), &receiver_pub_nonce, 0, 0)
.unwrap()
};
// check the receiver can verify the partial signature
// received by the sender
{
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
let sig_verifies = cx.verify_partial_sig(
&keychain.secp(),
&sender_sig_part,
&sender_pub_nonce,
&sender_pub_excess,
0,
0,
);
assert!(sig_verifies);
}
// Receiver now builds final signature from sender and receiver parts
let (final_sig, final_pubkey) = {
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
// Receiver recreates their partial sig (we do not maintain state from earlier)
let our_sig_part = cx.calculate_partial_sig(&keychain.secp(), &sender_pub_nonce, 0, 0)
.unwrap();
// Receiver now generates final signature from the two parts
let final_sig = cx.calculate_final_sig(
&keychain.secp(),
&sender_sig_part,
&our_sig_part,
&sender_pub_nonce,
).unwrap();
// Receiver calculates the final public key (to verify sig later)
let final_pubkey = cx.calculate_final_pubkey(&keychain.secp(), &sender_pub_excess)
.unwrap();
(final_sig, final_pubkey)
};
// Receiver checks the final signature verifies
{
let keychain = receiver_keychain.clone();
let cx = receiver_aggsig_cm.get_context(&tx_id);
// Receiver check the final signature verifies
let sig_verifies =
cx.verify_final_sig_build_msg(&keychain.secp(), &final_sig, &final_pubkey, 0, 0);
assert!(sig_verifies);
}
// Check we can verify the sig using the kernel excess
{
let keychain = Keychain::from_random_seed().unwrap();
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
let sig_verifies =
aggsig::verify_single_from_commit(&keychain.secp(), &final_sig, &msg, &kernel_excess);
assert!(sig_verifies);
}
}
#[test]
fn test_rewind_range_proof() {
let keychain = Keychain::from_random_seed().unwrap();
let key_id = keychain.derive_key_id(1).unwrap();
let commit = keychain.commit(5, &key_id).unwrap();
let msg = ProofMessage::from_bytes(&[0u8; 64]);
let extra_data = [99u8; 64];
let proof = proof::create(
&keychain,
5,
&key_id,
commit,
Some(extra_data.to_vec().clone()),
msg,
).unwrap();
let proof_info = proof::rewind(
&keychain,
&key_id,
commit,
Some(extra_data.to_vec().clone()),
proof,
).unwrap();
assert_eq!(proof_info.success, true);
// now check the recovered message is "empty" (but not truncated) i.e. all
// zeroes
//Value is in the message in this case
assert_eq!(
proof_info.message,
secp::pedersen::ProofMessage::from_bytes(&[0; secp::constants::BULLET_PROOF_MSG_SIZE])
);
let key_id2 = keychain.derive_key_id(2).unwrap();
// cannot rewind with a different nonce
let proof_info = proof::rewind(
&keychain,
&key_id2,
commit,
Some(extra_data.to_vec().clone()),
proof,
).unwrap();
// With bullet proofs, if you provide the wrong nonce you'll get gibberish back
// as opposed to a failure to recover the message
assert_ne!(
proof_info.message,
secp::pedersen::ProofMessage::from_bytes(&[0; secp::constants::BULLET_PROOF_MSG_SIZE])
);
assert_eq!(proof_info.value, 0);
// cannot rewind with a commitment to the same value using a different key
let commit2 = keychain.commit(5, &key_id2).unwrap();
let proof_info = proof::rewind(
&keychain,
&key_id,
commit2,
Some(extra_data.to_vec().clone()),
proof,
).unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
// cannot rewind with a commitment to a different value
let commit3 = keychain.commit(4, &key_id).unwrap();
let proof_info = proof::rewind(
&keychain,
&key_id,
commit3,
Some(extra_data.to_vec().clone()),
proof,
).unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
// cannot rewind with wrong extra committed data
let commit3 = keychain.commit(4, &key_id).unwrap();
let wrong_extra_data = [98u8; 64];
let _should_err = proof::rewind(
&keychain,
&key_id,
commit3,
Some(wrong_extra_data.to_vec().clone()),
proof,
).unwrap();
assert_eq!(proof_info.success, false);
assert_eq!(proof_info.value, 0);
}