2018-03-05 22:33:44 +03:00
|
|
|
// Copyright 2018 The Grin Developers
|
2017-10-03 03:02:31 +03:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
use rand::{thread_rng, Rng};
|
2017-10-16 20:11:01 +03:00
|
|
|
use std::collections::HashMap;
|
2017-11-10 18:12:15 +03:00
|
|
|
use std::sync::{Arc, RwLock};
|
2018-02-28 20:56:09 +03:00
|
|
|
use std::{error, fmt};
|
2017-10-03 03:02:31 +03:00
|
|
|
|
2017-11-01 02:20:55 +03:00
|
|
|
use util::secp;
|
|
|
|
use util::secp::{Message, Secp256k1, Signature};
|
2018-03-04 03:19:54 +03:00
|
|
|
use util::secp::key::{PublicKey, SecretKey};
|
|
|
|
use util::secp::pedersen::{Commitment, ProofInfo, ProofMessage, RangeProof};
|
2018-01-10 22:36:27 +03:00
|
|
|
use util::secp::aggsig;
|
2017-11-07 19:48:37 +03:00
|
|
|
use util::logger::LOGGER;
|
2018-01-10 22:36:27 +03:00
|
|
|
use util::kernel_sig_msg;
|
2017-10-03 03:02:31 +03:00
|
|
|
use blake2;
|
2018-02-06 14:42:26 +03:00
|
|
|
use uuid::Uuid;
|
2017-11-01 02:32:33 +03:00
|
|
|
use blind::{BlindSum, BlindingFactor};
|
2017-10-13 07:45:07 +03:00
|
|
|
use extkey::{self, Identifier};
|
2017-10-03 03:02:31 +03:00
|
|
|
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
|
|
|
pub enum Error {
|
|
|
|
ExtendedKey(extkey::Error),
|
|
|
|
Secp(secp::Error),
|
|
|
|
KeyDerivation(String),
|
2018-02-06 14:42:26 +03:00
|
|
|
Transaction(String),
|
2018-02-16 23:34:54 +03:00
|
|
|
RangeProof(String),
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<secp::Error> for Error {
|
|
|
|
fn from(e: secp::Error) -> Error {
|
|
|
|
Error::Secp(e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<extkey::Error> for Error {
|
|
|
|
fn from(e: extkey::Error) -> Error {
|
|
|
|
Error::ExtendedKey(e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-28 20:56:09 +03:00
|
|
|
impl error::Error for Error {
|
|
|
|
fn description(&self) -> &str {
|
|
|
|
match *self {
|
|
|
|
_ => "some kind of keychain error",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Display for Error {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
match *self {
|
|
|
|
_ => write!(f, "some kind of keychain error"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-10 22:36:27 +03:00
|
|
|
/// Holds internal information about an aggsig operation
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct AggSigTxContext {
|
|
|
|
// Secret key (of which public is shared)
|
|
|
|
pub sec_key: SecretKey,
|
|
|
|
// Secret nonce (of which public is shared)
|
|
|
|
// (basically a SecretKey)
|
|
|
|
pub sec_nonce: SecretKey,
|
|
|
|
// If I'm the recipient, store my outputs between invocations (that I need to sum)
|
|
|
|
pub output_ids: Vec<Identifier>,
|
|
|
|
}
|
|
|
|
|
2017-10-03 03:02:31 +03:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct Keychain {
|
|
|
|
secp: Secp256k1,
|
|
|
|
extkey: extkey::ExtendedKey,
|
2018-03-04 03:19:54 +03:00
|
|
|
pub aggsig_contexts: Arc<RwLock<Option<HashMap<Uuid, AggSigTxContext>>>>,
|
2017-10-16 20:11:01 +03:00
|
|
|
key_overrides: HashMap<Identifier, SecretKey>,
|
2017-11-10 18:12:15 +03:00
|
|
|
key_derivation_cache: Arc<RwLock<HashMap<Identifier, u32>>>,
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Keychain {
|
2017-10-13 07:45:07 +03:00
|
|
|
pub fn root_key_id(&self) -> Identifier {
|
|
|
|
self.extkey.root_key_id.clone()
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
2017-10-16 20:11:01 +03:00
|
|
|
// For tests and burn only, associate a key identifier with a known secret key.
|
|
|
|
pub fn burn_enabled(keychain: &Keychain, burn_key_id: &Identifier) -> Keychain {
|
|
|
|
let mut key_overrides = HashMap::new();
|
|
|
|
key_overrides.insert(
|
|
|
|
burn_key_id.clone(),
|
|
|
|
SecretKey::from_slice(&keychain.secp, &[1; 32]).unwrap(),
|
|
|
|
);
|
|
|
|
Keychain {
|
|
|
|
key_overrides: key_overrides,
|
|
|
|
..keychain.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-03 03:02:31 +03:00
|
|
|
pub fn from_seed(seed: &[u8]) -> Result<Keychain, Error> {
|
|
|
|
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::Commit);
|
|
|
|
let extkey = extkey::ExtendedKey::from_seed(&secp, seed)?;
|
|
|
|
let keychain = Keychain {
|
|
|
|
secp: secp,
|
|
|
|
extkey: extkey,
|
2018-02-06 14:42:26 +03:00
|
|
|
aggsig_contexts: Arc::new(RwLock::new(None)),
|
2017-10-16 20:11:01 +03:00
|
|
|
key_overrides: HashMap::new(),
|
2017-11-10 18:12:15 +03:00
|
|
|
key_derivation_cache: Arc::new(RwLock::new(HashMap::new())),
|
2017-10-03 03:02:31 +03:00
|
|
|
};
|
|
|
|
Ok(keychain)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// For testing - probably not a good idea to use outside of tests.
|
|
|
|
pub fn from_random_seed() -> Result<Keychain, Error> {
|
|
|
|
let seed: String = thread_rng().gen_ascii_chars().take(16).collect();
|
|
|
|
let seed = blake2::blake2b::blake2b(32, &[], seed.as_bytes());
|
|
|
|
Keychain::from_seed(seed.as_bytes())
|
|
|
|
}
|
|
|
|
|
2017-10-13 07:45:07 +03:00
|
|
|
pub fn derive_key_id(&self, derivation: u32) -> Result<Identifier, Error> {
|
2018-01-25 23:19:32 +03:00
|
|
|
let child_key = self.extkey.derive(&self.secp, derivation)?;
|
|
|
|
Ok(child_key.key_id)
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
2017-11-10 18:12:15 +03:00
|
|
|
fn derived_key(&self, key_id: &Identifier) -> Result<SecretKey, Error> {
|
|
|
|
// first check our overrides and just return the key if we have one in there
|
2017-10-16 20:11:01 +03:00
|
|
|
if let Some(key) = self.key_overrides.get(key_id) {
|
2018-03-04 03:19:54 +03:00
|
|
|
trace!(
|
|
|
|
LOGGER,
|
|
|
|
"... Derived Key (using override) key_id: {}",
|
|
|
|
key_id
|
|
|
|
);
|
2017-10-16 20:11:01 +03:00
|
|
|
return Ok(*key);
|
2017-10-12 06:35:40 +03:00
|
|
|
}
|
2017-10-07 20:38:41 +03:00
|
|
|
|
2018-01-25 23:19:32 +03:00
|
|
|
let child_key = self.derived_child_key(key_id)?;
|
|
|
|
Ok(child_key.key)
|
2018-01-23 15:14:06 +03:00
|
|
|
}
|
|
|
|
|
2018-01-25 23:19:32 +03:00
|
|
|
fn derived_child_key(&self, key_id: &Identifier) -> Result<extkey::ChildKey, Error> {
|
2018-01-23 15:14:06 +03:00
|
|
|
trace!(LOGGER, "Derived Key by key_id: {}", key_id);
|
|
|
|
|
2017-11-10 18:12:15 +03:00
|
|
|
// then check the derivation cache to see if we have previously derived this key
|
|
|
|
// if so use the derivation from the cache to derive the key
|
|
|
|
{
|
|
|
|
let cache = self.key_derivation_cache.read().unwrap();
|
|
|
|
if let Some(derivation) = cache.get(key_id) {
|
2018-03-04 03:19:54 +03:00
|
|
|
trace!(
|
|
|
|
LOGGER,
|
|
|
|
"... Derived Key (cache hit) key_id: {}, derivation: {}",
|
|
|
|
key_id,
|
|
|
|
derivation
|
|
|
|
);
|
|
|
|
return Ok(self.derived_key_from_index(*derivation)?);
|
2017-11-10 18:12:15 +03:00
|
|
|
}
|
|
|
|
}
|
2017-11-07 19:48:37 +03:00
|
|
|
|
2017-11-10 18:12:15 +03:00
|
|
|
// otherwise iterate over a large number of derivations looking for our key
|
|
|
|
// cache the resulting derivations by key_id for faster lookup later
|
2017-12-19 04:33:44 +03:00
|
|
|
// TODO - remove hard limit (within reason)
|
|
|
|
// TODO - do we benefit here if we track our max known n_child?
|
2017-11-10 18:12:15 +03:00
|
|
|
{
|
|
|
|
let mut cache = self.key_derivation_cache.write().unwrap();
|
2017-12-19 04:33:44 +03:00
|
|
|
for i in 1..100_000 {
|
2018-01-25 23:19:32 +03:00
|
|
|
let child_key = self.extkey.derive(&self.secp, i)?;
|
|
|
|
// let child_key_id = extkey.identifier(&self.secp)?;
|
|
|
|
|
|
|
|
if !cache.contains_key(&child_key.key_id) {
|
|
|
|
trace!(
|
|
|
|
LOGGER,
|
|
|
|
"... Derived Key (cache miss) key_id: {}, derivation: {}",
|
|
|
|
child_key.key_id,
|
|
|
|
child_key.n_child,
|
|
|
|
);
|
|
|
|
cache.insert(child_key.key_id.clone(), child_key.n_child);
|
2017-11-10 18:12:15 +03:00
|
|
|
}
|
|
|
|
|
2018-01-25 23:19:32 +03:00
|
|
|
if child_key.key_id == *key_id {
|
|
|
|
return Ok(child_key);
|
2017-11-10 18:12:15 +03:00
|
|
|
}
|
2017-10-07 20:38:41 +03:00
|
|
|
}
|
|
|
|
}
|
2017-11-10 18:12:15 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
Err(Error::KeyDerivation(format!(
|
|
|
|
"failed to derive child_key for {:?}",
|
|
|
|
key_id
|
|
|
|
)))
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
2017-11-10 18:12:15 +03:00
|
|
|
// if we know the derivation index we can just straight to deriving the key
|
2018-03-04 03:19:54 +03:00
|
|
|
fn derived_key_from_index(&self, derivation: u32) -> Result<extkey::ChildKey, Error> {
|
2017-11-10 18:12:15 +03:00
|
|
|
trace!(LOGGER, "Derived Key (fast) by derivation: {}", derivation);
|
2018-01-25 23:19:32 +03:00
|
|
|
let child_key = self.extkey.derive(&self.secp, derivation)?;
|
2018-03-04 03:19:54 +03:00
|
|
|
return Ok(child_key);
|
2017-11-07 19:48:37 +03:00
|
|
|
}
|
|
|
|
|
2017-10-13 07:45:07 +03:00
|
|
|
pub fn commit(&self, amount: u64, key_id: &Identifier) -> Result<Commitment, Error> {
|
|
|
|
let skey = self.derived_key(key_id)?;
|
2017-10-03 03:02:31 +03:00
|
|
|
let commit = self.secp.commit(amount, skey)?;
|
|
|
|
Ok(commit)
|
|
|
|
}
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn commit_with_key_index(&self, amount: u64, derivation: u32) -> Result<Commitment, Error> {
|
2018-01-25 23:19:32 +03:00
|
|
|
let child_key = self.derived_key_from_index(derivation)?;
|
|
|
|
let commit = self.secp.commit(amount, child_key.key)?;
|
2017-11-07 19:48:37 +03:00
|
|
|
Ok(commit)
|
|
|
|
}
|
|
|
|
|
2017-10-03 03:02:31 +03:00
|
|
|
pub fn range_proof(
|
|
|
|
&self,
|
|
|
|
amount: u64,
|
2017-10-13 07:45:07 +03:00
|
|
|
key_id: &Identifier,
|
2018-03-13 21:22:34 +03:00
|
|
|
_commit: Commitment,
|
2018-02-28 00:11:55 +03:00
|
|
|
extra_data: Option<Vec<u8>>,
|
2017-10-03 03:02:31 +03:00
|
|
|
) -> Result<RangeProof, Error> {
|
2017-10-13 07:45:07 +03:00
|
|
|
let skey = self.derived_key(key_id)?;
|
2018-03-22 03:10:11 +03:00
|
|
|
Ok(self.secp.bullet_proof(amount, skey, extra_data, None))
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
2018-02-16 23:34:54 +03:00
|
|
|
pub fn verify_range_proof(
|
|
|
|
secp: &Secp256k1,
|
2018-02-28 00:11:55 +03:00
|
|
|
commit: Commitment,
|
|
|
|
proof: RangeProof,
|
2018-03-04 03:19:54 +03:00
|
|
|
extra_data: Option<Vec<u8>>,
|
|
|
|
) -> Result<(), secp::Error> {
|
|
|
|
let result = secp.verify_bullet_proof(commit, proof, extra_data);
|
|
|
|
match result {
|
|
|
|
Ok(_) => Ok(()),
|
|
|
|
Err(e) => Err(e),
|
2018-02-16 23:34:54 +03:00
|
|
|
}
|
2018-03-04 03:19:54 +03:00
|
|
|
}
|
2018-02-16 23:34:54 +03:00
|
|
|
|
2017-10-06 00:40:46 +03:00
|
|
|
pub fn rewind_range_proof(
|
|
|
|
&self,
|
2017-10-13 07:45:07 +03:00
|
|
|
key_id: &Identifier,
|
2017-10-06 00:40:46 +03:00
|
|
|
commit: Commitment,
|
2018-02-28 00:11:55 +03:00
|
|
|
extra_data: Option<Vec<u8>>,
|
2017-10-06 00:40:46 +03:00
|
|
|
proof: RangeProof,
|
|
|
|
) -> Result<ProofInfo, Error> {
|
2017-10-13 07:45:07 +03:00
|
|
|
let nonce = self.derived_key(key_id)?;
|
2018-03-04 03:19:54 +03:00
|
|
|
let proof_message = self.secp
|
2018-03-14 14:03:09 +03:00
|
|
|
.unwind_bullet_proof(commit, nonce, nonce, extra_data, proof);
|
2018-02-28 00:11:55 +03:00
|
|
|
let proof_info = match proof_message {
|
2018-03-04 03:19:54 +03:00
|
|
|
Ok(p) => ProofInfo {
|
2018-02-28 00:11:55 +03:00
|
|
|
success: true,
|
|
|
|
value: 0,
|
|
|
|
message: p,
|
|
|
|
mlen: 0,
|
|
|
|
min: 0,
|
|
|
|
max: 0,
|
|
|
|
exp: 0,
|
|
|
|
mantissa: 0,
|
|
|
|
},
|
|
|
|
Err(_) => ProofInfo {
|
|
|
|
success: false,
|
|
|
|
value: 0,
|
|
|
|
message: ProofMessage::empty(),
|
|
|
|
mlen: 0,
|
|
|
|
min: 0,
|
|
|
|
max: 0,
|
|
|
|
exp: 0,
|
|
|
|
mantissa: 0,
|
2018-03-04 03:19:54 +03:00
|
|
|
},
|
2018-02-28 00:11:55 +03:00
|
|
|
};
|
|
|
|
return Ok(proof_info);
|
2017-10-06 00:40:46 +03:00
|
|
|
}
|
|
|
|
|
2017-10-03 03:02:31 +03:00
|
|
|
pub fn blind_sum(&self, blind_sum: &BlindSum) -> Result<BlindingFactor, Error> {
|
|
|
|
let mut pos_keys: Vec<SecretKey> = blind_sum
|
2017-10-13 07:45:07 +03:00
|
|
|
.positive_key_ids
|
2017-10-03 03:02:31 +03:00
|
|
|
.iter()
|
|
|
|
.filter_map(|k| self.derived_key(&k).ok())
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let mut neg_keys: Vec<SecretKey> = blind_sum
|
2017-10-13 07:45:07 +03:00
|
|
|
.negative_key_ids
|
2017-10-03 03:02:31 +03:00
|
|
|
.iter()
|
|
|
|
.filter_map(|k| self.derived_key(&k).ok())
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
pos_keys.extend(&blind_sum
|
|
|
|
.positive_blinding_factors
|
|
|
|
.iter()
|
2018-02-13 18:35:30 +03:00
|
|
|
.filter_map(|b| b.secret_key(&self.secp).ok())
|
2017-10-03 03:02:31 +03:00
|
|
|
.collect::<Vec<SecretKey>>());
|
|
|
|
|
|
|
|
neg_keys.extend(&blind_sum
|
|
|
|
.negative_blinding_factors
|
|
|
|
.iter()
|
2018-02-13 18:35:30 +03:00
|
|
|
.filter_map(|b| b.secret_key(&self.secp).ok())
|
2017-10-03 03:02:31 +03:00
|
|
|
.collect::<Vec<SecretKey>>());
|
|
|
|
|
2018-02-13 18:35:30 +03:00
|
|
|
let sum = self.secp.blind_sum(pos_keys, neg_keys)?;
|
|
|
|
Ok(BlindingFactor::from_secret_key(sum))
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_create_context(
|
|
|
|
&self,
|
|
|
|
transaction_id: &Uuid,
|
|
|
|
sec_key: SecretKey,
|
|
|
|
) -> Result<(), Error> {
|
2018-02-06 14:42:26 +03:00
|
|
|
let mut contexts = self.aggsig_contexts.write().unwrap();
|
|
|
|
if contexts.is_none() {
|
|
|
|
*contexts = Some(HashMap::new())
|
|
|
|
}
|
|
|
|
if contexts.as_mut().unwrap().contains_key(transaction_id) {
|
2018-03-04 03:19:54 +03:00
|
|
|
return Err(Error::Transaction(String::from(
|
|
|
|
"Duplication transaction id",
|
|
|
|
)));
|
2018-02-06 14:42:26 +03:00
|
|
|
}
|
2018-03-04 03:19:54 +03:00
|
|
|
contexts.as_mut().unwrap().insert(
|
|
|
|
transaction_id.clone(),
|
|
|
|
AggSigTxContext {
|
|
|
|
sec_key: sec_key,
|
|
|
|
sec_nonce: aggsig::export_secnonce_single(&self.secp).unwrap(),
|
|
|
|
output_ids: vec![],
|
|
|
|
},
|
|
|
|
);
|
2018-02-06 14:42:26 +03:00
|
|
|
Ok(())
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Tracks an output contributing to my excess value (if it needs to
|
|
|
|
/// be kept between invocations
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_add_output(&self, transaction_id: &Uuid, output_id: &Identifier) {
|
2018-02-06 14:42:26 +03:00
|
|
|
let mut agg_contexts = self.aggsig_contexts.write().unwrap();
|
|
|
|
let mut agg_contexts_local = agg_contexts.as_mut().unwrap().clone();
|
|
|
|
let mut agg_context = agg_contexts_local.get(transaction_id).unwrap().clone();
|
|
|
|
agg_context.output_ids.push(output_id.clone());
|
|
|
|
agg_contexts_local.insert(transaction_id.clone(), agg_context);
|
|
|
|
*agg_contexts = Some(agg_contexts_local);
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns all stored outputs
|
2018-02-06 14:42:26 +03:00
|
|
|
pub fn aggsig_get_outputs(&self, transaction_id: &Uuid) -> Vec<Identifier> {
|
|
|
|
let contexts = self.aggsig_contexts.clone();
|
|
|
|
let contexts_read = contexts.read().unwrap();
|
|
|
|
let agg_context = contexts_read.as_ref().unwrap();
|
|
|
|
let agg_context_return = agg_context.get(transaction_id);
|
|
|
|
agg_context_return.unwrap().output_ids.clone()
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns private key, private nonce
|
2018-02-06 14:42:26 +03:00
|
|
|
pub fn aggsig_get_private_keys(&self, transaction_id: &Uuid) -> (SecretKey, SecretKey) {
|
|
|
|
let contexts = self.aggsig_contexts.clone();
|
2018-03-04 03:19:54 +03:00
|
|
|
let contexts_read = contexts.read().unwrap();
|
2018-02-06 14:42:26 +03:00
|
|
|
let agg_context = contexts_read.as_ref().unwrap();
|
|
|
|
let agg_context_return = agg_context.get(transaction_id);
|
2018-03-04 03:19:54 +03:00
|
|
|
(
|
|
|
|
agg_context_return.unwrap().sec_key.clone(),
|
|
|
|
agg_context_return.unwrap().sec_nonce.clone(),
|
|
|
|
)
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns public key, public nonce
|
2018-02-06 14:42:26 +03:00
|
|
|
pub fn aggsig_get_public_keys(&self, transaction_id: &Uuid) -> (PublicKey, PublicKey) {
|
|
|
|
let contexts = self.aggsig_contexts.clone();
|
2018-03-04 03:19:54 +03:00
|
|
|
let contexts_read = contexts.read().unwrap();
|
2018-02-06 14:42:26 +03:00
|
|
|
let agg_context = contexts_read.as_ref().unwrap();
|
|
|
|
let agg_context_return = agg_context.get(transaction_id);
|
2018-03-04 03:19:54 +03:00
|
|
|
(
|
|
|
|
PublicKey::from_secret_key(&self.secp, &agg_context_return.unwrap().sec_key).unwrap(),
|
|
|
|
PublicKey::from_secret_key(&self.secp, &agg_context_return.unwrap().sec_nonce).unwrap(),
|
|
|
|
)
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Note 'secnonce' here is used to perform the signature, while 'pubnonce' just allows you to
|
|
|
|
/// provide a custom public nonce to include while calculating e
|
|
|
|
/// nonce_sum is the sum used to decide whether secnonce should be inverted during sig time
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_sign_single(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
transaction_id: &Uuid,
|
|
|
|
msg: &Message,
|
2018-03-04 03:19:54 +03:00
|
|
|
secnonce: Option<&SecretKey>,
|
2018-02-06 14:42:26 +03:00
|
|
|
pubnonce: Option<&PublicKey>,
|
2018-03-04 03:19:54 +03:00
|
|
|
nonce_sum: Option<&PublicKey>,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-02-06 14:42:26 +03:00
|
|
|
let contexts = self.aggsig_contexts.clone();
|
2018-03-04 03:19:54 +03:00
|
|
|
let contexts_read = contexts.read().unwrap();
|
2018-02-06 14:42:26 +03:00
|
|
|
let agg_context = contexts_read.as_ref().unwrap();
|
|
|
|
let agg_context_return = agg_context.get(transaction_id);
|
2018-03-04 03:19:54 +03:00
|
|
|
let sig = aggsig::sign_single(
|
|
|
|
&self.secp,
|
|
|
|
msg,
|
|
|
|
&agg_context_return.unwrap().sec_key,
|
|
|
|
secnonce,
|
|
|
|
pubnonce,
|
|
|
|
nonce_sum,
|
|
|
|
)?;
|
2018-01-10 22:36:27 +03:00
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
|
|
|
//Verifies an aggsig signature
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_verify_single(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
sig: &Signature,
|
|
|
|
msg: &Message,
|
2018-03-04 03:19:54 +03:00
|
|
|
pubnonce: Option<&PublicKey>,
|
|
|
|
pubkey: &PublicKey,
|
|
|
|
is_partial: bool,
|
|
|
|
) -> bool {
|
2018-01-10 22:36:27 +03:00
|
|
|
aggsig::verify_single(&self.secp, sig, msg, pubnonce, pubkey, is_partial)
|
|
|
|
}
|
|
|
|
|
|
|
|
//Verifies other final sig corresponds with what we're expecting
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_verify_final_sig_build_msg(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
sig: &Signature,
|
|
|
|
pubkey: &PublicKey,
|
|
|
|
fee: u64,
|
2018-03-04 03:19:54 +03:00
|
|
|
lock_height: u64,
|
|
|
|
) -> bool {
|
2018-01-10 22:36:27 +03:00
|
|
|
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
|
|
|
|
self.aggsig_verify_single(sig, &msg, None, pubkey, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
//Verifies other party's sig corresponds with what we're expecting
|
2018-03-04 03:19:54 +03:00
|
|
|
pub fn aggsig_verify_partial_sig(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
transaction_id: &Uuid,
|
|
|
|
sig: &Signature,
|
2018-03-04 03:19:54 +03:00
|
|
|
other_pub_nonce: &PublicKey,
|
|
|
|
pubkey: &PublicKey,
|
2018-02-06 14:42:26 +03:00
|
|
|
fee: u64,
|
2018-03-04 03:19:54 +03:00
|
|
|
lock_height: u64,
|
|
|
|
) -> bool {
|
2018-02-06 14:42:26 +03:00
|
|
|
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
|
2018-01-10 22:36:27 +03:00
|
|
|
let mut nonce_sum = other_pub_nonce.clone();
|
|
|
|
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
|
|
|
|
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height)).unwrap();
|
|
|
|
|
|
|
|
self.aggsig_verify_single(sig, &msg, Some(&nonce_sum), pubkey, true)
|
|
|
|
}
|
|
|
|
|
2018-02-13 18:35:30 +03:00
|
|
|
pub fn aggsig_calculate_partial_sig(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
transaction_id: &Uuid,
|
2018-02-13 18:35:30 +03:00
|
|
|
other_pub_nonce: &PublicKey,
|
|
|
|
fee: u64,
|
2018-03-04 03:19:54 +03:00
|
|
|
lock_height: u64,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-01-10 22:36:27 +03:00
|
|
|
// Add public nonces kR*G + kS*G
|
2018-02-06 14:42:26 +03:00
|
|
|
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
|
2018-01-10 22:36:27 +03:00
|
|
|
let mut nonce_sum = other_pub_nonce.clone();
|
|
|
|
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
|
|
|
|
let msg = secp::Message::from_slice(&kernel_sig_msg(fee, lock_height))?;
|
|
|
|
|
|
|
|
//Now calculate signature using message M=fee, nonce in e=nonce_sum
|
2018-03-04 03:19:54 +03:00
|
|
|
self.aggsig_sign_single(
|
|
|
|
transaction_id,
|
|
|
|
&msg,
|
|
|
|
Some(&sec_nonce),
|
|
|
|
Some(&nonce_sum),
|
|
|
|
Some(&nonce_sum),
|
|
|
|
)
|
2018-01-10 22:36:27 +03:00
|
|
|
}
|
|
|
|
|
2018-02-13 18:35:30 +03:00
|
|
|
/// Helper function to calculate final signature
|
|
|
|
pub fn aggsig_calculate_final_sig(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
transaction_id: &Uuid,
|
|
|
|
their_sig: &Signature,
|
|
|
|
our_sig: &Signature,
|
2018-03-04 03:19:54 +03:00
|
|
|
their_pub_nonce: &PublicKey,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-01-10 22:36:27 +03:00
|
|
|
// Add public nonces kR*G + kS*G
|
2018-02-06 14:42:26 +03:00
|
|
|
let (_, sec_nonce) = self.aggsig_get_private_keys(transaction_id);
|
2018-01-10 22:36:27 +03:00
|
|
|
let mut nonce_sum = their_pub_nonce.clone();
|
|
|
|
let _ = nonce_sum.add_exp_assign(&self.secp, &sec_nonce);
|
|
|
|
let sig = aggsig::add_signatures_single(&self.secp, their_sig, our_sig, &nonce_sum)?;
|
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
2018-01-17 06:03:40 +03:00
|
|
|
/// Helper function to calculate final public key
|
|
|
|
pub fn aggsig_calculate_final_pubkey(
|
|
|
|
&self,
|
2018-02-06 14:42:26 +03:00
|
|
|
transaction_id: &Uuid,
|
2018-01-17 06:03:40 +03:00
|
|
|
their_public_key: &PublicKey,
|
|
|
|
) -> Result<PublicKey, Error> {
|
2018-02-06 14:42:26 +03:00
|
|
|
let (our_sec_key, _) = self.aggsig_get_private_keys(transaction_id);
|
2018-01-10 22:36:27 +03:00
|
|
|
let mut pk_sum = their_public_key.clone();
|
|
|
|
let _ = pk_sum.add_exp_assign(&self.secp, &our_sec_key);
|
|
|
|
Ok(pk_sum)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Just a simple sig, creates its own nonce, etc
|
2018-01-17 06:03:40 +03:00
|
|
|
pub fn aggsig_sign_from_key_id(
|
|
|
|
&self,
|
|
|
|
msg: &Message,
|
|
|
|
key_id: &Identifier,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-01-10 22:36:27 +03:00
|
|
|
let skey = self.derived_key(key_id)?;
|
|
|
|
let sig = aggsig::sign_single(&self.secp, &msg, &skey, None, None, None)?;
|
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Verifies a sig given a commitment
|
2018-01-17 06:03:40 +03:00
|
|
|
pub fn aggsig_verify_single_from_commit(
|
2018-03-04 03:19:54 +03:00
|
|
|
secp: &Secp256k1,
|
2018-01-17 06:03:40 +03:00
|
|
|
sig: &Signature,
|
|
|
|
msg: &Message,
|
|
|
|
commit: &Commitment,
|
|
|
|
) -> bool {
|
2018-03-04 03:19:54 +03:00
|
|
|
// Extract the pubkey, unfortunately we need this hack for now, (we just hope
|
|
|
|
// one is valid) TODO: Create better secp256k1 API to do this
|
2018-01-10 22:36:27 +03:00
|
|
|
let pubkeys = commit.to_two_pubkeys(secp);
|
2018-01-17 06:03:40 +03:00
|
|
|
let mut valid = false;
|
2018-01-10 22:36:27 +03:00
|
|
|
for i in 0..pubkeys.len() {
|
2018-01-17 06:03:40 +03:00
|
|
|
valid = aggsig::verify_single(secp, &sig, &msg, None, &pubkeys[i], false);
|
2018-01-10 22:36:27 +03:00
|
|
|
if valid {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
valid
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Just a simple sig, creates its own nonce, etc
|
2018-01-17 06:03:40 +03:00
|
|
|
pub fn aggsig_sign_with_blinding(
|
|
|
|
secp: &Secp256k1,
|
|
|
|
msg: &Message,
|
|
|
|
blinding: &BlindingFactor,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-02-13 18:35:30 +03:00
|
|
|
let skey = &blinding.secret_key(&secp)?;
|
|
|
|
let sig = aggsig::sign_single(secp, &msg, skey, None, None, None)?;
|
2018-01-10 22:36:27 +03:00
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
2017-10-13 07:45:07 +03:00
|
|
|
pub fn sign(&self, msg: &Message, key_id: &Identifier) -> Result<Signature, Error> {
|
|
|
|
let skey = self.derived_key(key_id)?;
|
2017-10-03 03:02:31 +03:00
|
|
|
let sig = self.secp.sign(msg, &skey)?;
|
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn sign_with_blinding(
|
|
|
|
&self,
|
|
|
|
msg: &Message,
|
|
|
|
blinding: &BlindingFactor,
|
|
|
|
) -> Result<Signature, Error> {
|
2018-02-13 18:35:30 +03:00
|
|
|
let skey = &blinding.secret_key(&self.secp)?;
|
|
|
|
let sig = self.secp.sign(msg, &skey)?;
|
2017-10-03 03:02:31 +03:00
|
|
|
Ok(sig)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn secp(&self) -> &Secp256k1 {
|
|
|
|
&self.secp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
2018-02-13 18:35:30 +03:00
|
|
|
use rand::thread_rng;
|
|
|
|
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
2018-02-02 17:51:55 +03:00
|
|
|
use keychain::{BlindSum, BlindingFactor, Keychain};
|
2018-02-13 18:35:30 +03:00
|
|
|
use util::kernel_sig_msg;
|
2017-11-01 02:20:55 +03:00
|
|
|
use util::secp;
|
|
|
|
use util::secp::pedersen::ProofMessage;
|
2018-02-02 17:51:55 +03:00
|
|
|
use util::secp::key::SecretKey;
|
|
|
|
|
2017-10-03 03:02:31 +03:00
|
|
|
#[test]
|
|
|
|
fn test_key_derivation() {
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
2017-11-09 22:26:45 +03:00
|
|
|
let secp = keychain.secp();
|
2017-10-03 03:02:31 +03:00
|
|
|
|
2017-10-13 07:45:07 +03:00
|
|
|
// use the keychain to derive a "key_id" based on the underlying seed
|
|
|
|
let key_id = keychain.derive_key_id(1).unwrap();
|
2017-10-03 03:02:31 +03:00
|
|
|
|
|
|
|
let msg_bytes = [0; 32];
|
|
|
|
let msg = secp::Message::from_slice(&msg_bytes[..]).unwrap();
|
|
|
|
|
|
|
|
// now create a zero commitment using the key on the keychain associated with
|
2018-01-17 06:03:40 +03:00
|
|
|
// the key_id
|
2017-10-13 07:45:07 +03:00
|
|
|
let commit = keychain.commit(0, &key_id).unwrap();
|
2017-10-03 03:02:31 +03:00
|
|
|
|
|
|
|
// now check we can use our key to verify a signature from this zero commitment
|
2017-10-13 07:45:07 +03:00
|
|
|
let sig = keychain.sign(&msg, &key_id).unwrap();
|
2017-10-03 03:02:31 +03:00
|
|
|
secp.verify_from_commit(&msg, &sig, &commit).unwrap();
|
|
|
|
}
|
2017-10-06 00:40:46 +03:00
|
|
|
|
2018-02-02 17:51:55 +03:00
|
|
|
// We plan to "offset" the key used in the kernel commitment
|
|
|
|
// so we are going to be doing some key addition/subtraction.
|
|
|
|
// This test is mainly to demonstrate that idea that summing commitments
|
|
|
|
// and summing the keys used to commit to 0 have the same result.
|
|
|
|
#[test]
|
|
|
|
fn secret_key_addition() {
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
|
|
|
|
let skey1 = SecretKey::from_slice(
|
|
|
|
&keychain.secp,
|
|
|
|
&[
|
2018-03-04 03:19:54 +03:00
|
|
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
|
|
|
0, 0, 0, 1,
|
2018-02-02 17:51:55 +03:00
|
|
|
],
|
|
|
|
).unwrap();
|
|
|
|
|
|
|
|
let skey2 = SecretKey::from_slice(
|
|
|
|
&keychain.secp,
|
|
|
|
&[
|
2018-03-04 03:19:54 +03:00
|
|
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
|
|
|
0, 0, 0, 2,
|
2018-02-02 17:51:55 +03:00
|
|
|
],
|
|
|
|
).unwrap();
|
|
|
|
|
|
|
|
// adding secret keys 1 and 2 to give secret key 3
|
|
|
|
let mut skey3 = skey1.clone();
|
|
|
|
let _ = skey3.add_assign(&keychain.secp, &skey2).unwrap();
|
|
|
|
|
|
|
|
// create commitments for secret keys 1, 2 and 3
|
|
|
|
// all committing to the value 0 (which is what we do for tx_kernels)
|
|
|
|
let commit_1 = keychain.secp.commit(0, skey1).unwrap();
|
|
|
|
let commit_2 = keychain.secp.commit(0, skey2).unwrap();
|
|
|
|
let commit_3 = keychain.secp.commit(0, skey3).unwrap();
|
|
|
|
|
|
|
|
// now sum commitments for keys 1 and 2
|
2018-03-04 03:19:54 +03:00
|
|
|
let sum = keychain
|
|
|
|
.secp
|
|
|
|
.commit_sum(vec![commit_1.clone(), commit_2.clone()], vec![])
|
|
|
|
.unwrap();
|
2018-02-02 17:51:55 +03:00
|
|
|
|
|
|
|
// confirm the commitment for key 3 matches the sum of the commitments 1 and 2
|
|
|
|
assert_eq!(sum, commit_3);
|
|
|
|
|
|
|
|
// now check we can sum keys up using keychain.blind_sum()
|
|
|
|
// in the same way (convenience function)
|
|
|
|
assert_eq!(
|
2018-03-04 03:19:54 +03:00
|
|
|
keychain
|
|
|
|
.blind_sum(&BlindSum::new()
|
|
|
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
|
|
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2)))
|
|
|
|
.unwrap(),
|
2018-02-13 18:35:30 +03:00
|
|
|
BlindingFactor::from_secret_key(skey3),
|
2018-02-02 17:51:55 +03:00
|
|
|
);
|
|
|
|
}
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn aggsig_sender_receiver_interaction() {
|
|
|
|
let sender_keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
let receiver_keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
|
|
|
|
// tx identifier for wallet interaction
|
|
|
|
let tx_id = Uuid::new_v4();
|
|
|
|
|
|
|
|
// Calculate the kernel excess here for convenience.
|
|
|
|
// Normally this would happen during transaction building.
|
|
|
|
let kernel_excess = {
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey1 = sender_keychain
|
|
|
|
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey2 = receiver_keychain
|
|
|
|
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
2018-03-04 03:19:54 +03:00
|
|
|
let blinding_factor = keychain
|
|
|
|
.blind_sum(&BlindSum::new()
|
2018-02-13 18:35:30 +03:00
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
2018-03-04 03:19:54 +03:00
|
|
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2)))
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
keychain
|
|
|
|
.secp
|
|
|
|
.commit(0, blinding_factor.secret_key(&keychain.secp).unwrap())
|
|
|
|
.unwrap()
|
2018-02-13 18:35:30 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
// sender starts the tx interaction
|
|
|
|
let (sender_pub_excess, sender_pub_nonce) = {
|
|
|
|
let keychain = sender_keychain.clone();
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey = keychain
|
|
|
|
.derived_key(&keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// dealing with an input here so we need to negate the blinding_factor
|
|
|
|
// rather than use it as is
|
2018-03-04 03:19:54 +03:00
|
|
|
let blinding_factor = keychain
|
|
|
|
.blind_sum(&BlindSum::new()
|
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey)))
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
|
|
|
|
|
|
|
|
keychain.aggsig_create_context(&tx_id, blind);
|
|
|
|
keychain.aggsig_get_public_keys(&tx_id)
|
|
|
|
};
|
|
|
|
|
|
|
|
// receiver receives partial tx
|
|
|
|
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
let key_id = keychain.derive_key_id(1).unwrap();
|
|
|
|
|
|
|
|
// let blind = blind_sum.secret_key(&keychain.secp())?;
|
|
|
|
let blind = keychain.derived_key(&key_id).unwrap();
|
|
|
|
|
|
|
|
keychain.aggsig_create_context(&tx_id, blind);
|
|
|
|
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
|
|
|
|
keychain.aggsig_add_output(&tx_id, &key_id);
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let sig_part = keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
(pub_excess, pub_nonce, sig_part)
|
|
|
|
};
|
|
|
|
|
|
|
|
// check the sender can verify the partial signature
|
|
|
|
// received in the response back from the receiver
|
|
|
|
{
|
|
|
|
let keychain = sender_keychain.clone();
|
|
|
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sig_part,
|
|
|
|
&receiver_pub_nonce,
|
|
|
|
&receiver_pub_excess,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
);
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// now sender signs with their key
|
|
|
|
let sender_sig_part = {
|
|
|
|
let keychain = sender_keychain.clone();
|
2018-03-04 03:19:54 +03:00
|
|
|
keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &receiver_pub_nonce, 0, 0)
|
|
|
|
.unwrap()
|
2018-02-13 18:35:30 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
// check the receiver can verify the partial signature
|
|
|
|
// received by the sender
|
|
|
|
{
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sender_sig_part,
|
|
|
|
&sender_pub_nonce,
|
|
|
|
&sender_pub_excess,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
);
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Receiver now builds final signature from sender and receiver parts
|
|
|
|
let (final_sig, final_pubkey) = {
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
|
|
|
|
// Receiver recreates their partial sig (we do not maintain state from earlier)
|
2018-03-04 03:19:54 +03:00
|
|
|
let our_sig_part = keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// Receiver now generates final signature from the two parts
|
2018-03-04 03:19:54 +03:00
|
|
|
let final_sig = keychain
|
|
|
|
.aggsig_calculate_final_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sender_sig_part,
|
|
|
|
&our_sig_part,
|
|
|
|
&sender_pub_nonce,
|
|
|
|
)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// Receiver calculates the final public key (to verify sig later)
|
2018-03-04 03:19:54 +03:00
|
|
|
let final_pubkey = keychain
|
|
|
|
.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
(final_sig, final_pubkey)
|
|
|
|
};
|
|
|
|
|
|
|
|
// Receiver checks the final signature verifies
|
|
|
|
{
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
|
|
|
|
// Receiver check the final signature verifies
|
2018-03-04 03:19:54 +03:00
|
|
|
let sig_verifies =
|
|
|
|
keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, 0, 0);
|
2018-02-13 18:35:30 +03:00
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check we can verify the sig using the kernel excess
|
|
|
|
{
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
|
|
|
|
&keychain.secp,
|
|
|
|
&final_sig,
|
|
|
|
&msg,
|
|
|
|
&kernel_excess,
|
|
|
|
);
|
|
|
|
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn aggsig_sender_receiver_interaction_offset() {
|
|
|
|
let sender_keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
let receiver_keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
|
|
|
|
// tx identifier for wallet interaction
|
|
|
|
let tx_id = Uuid::new_v4();
|
|
|
|
|
|
|
|
// This is the kernel offset that we use to split the key
|
|
|
|
// Summing these at the block level prevents the
|
|
|
|
// kernels from being used to reconstruct (or identify) individual transactions
|
|
|
|
let kernel_offset = SecretKey::new(&sender_keychain.secp(), &mut thread_rng());
|
|
|
|
|
|
|
|
// Calculate the kernel excess here for convenience.
|
|
|
|
// Normally this would happen during transaction building.
|
|
|
|
let kernel_excess = {
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey1 = sender_keychain
|
|
|
|
.derived_key(&sender_keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey2 = receiver_keychain
|
|
|
|
.derived_key(&receiver_keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
2018-03-04 03:19:54 +03:00
|
|
|
let blinding_factor = keychain
|
|
|
|
.blind_sum(&BlindSum::new()
|
2018-02-13 18:35:30 +03:00
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey1))
|
|
|
|
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
|
|
|
|
// subtract the kernel offset here like as would when
|
|
|
|
// verifying a kernel signature
|
2018-03-04 03:19:54 +03:00
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
keychain
|
|
|
|
.secp
|
|
|
|
.commit(0, blinding_factor.secret_key(&keychain.secp).unwrap())
|
|
|
|
.unwrap()
|
2018-02-13 18:35:30 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
// sender starts the tx interaction
|
|
|
|
let (sender_pub_excess, sender_pub_nonce) = {
|
|
|
|
let keychain = sender_keychain.clone();
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let skey = keychain
|
|
|
|
.derived_key(&keychain.derive_key_id(1).unwrap())
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// dealing with an input here so we need to negate the blinding_factor
|
|
|
|
// rather than use it as is
|
2018-03-04 03:19:54 +03:00
|
|
|
let blinding_factor = keychain
|
|
|
|
.blind_sum(&BlindSum::new()
|
2018-02-13 18:35:30 +03:00
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(skey))
|
|
|
|
// subtract the kernel offset to create an aggsig context
|
|
|
|
// with our "split" key
|
2018-03-04 03:19:54 +03:00
|
|
|
.sub_blinding_factor(BlindingFactor::from_secret_key(kernel_offset)))
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let blind = blinding_factor.secret_key(&keychain.secp()).unwrap();
|
|
|
|
|
|
|
|
keychain.aggsig_create_context(&tx_id, blind);
|
|
|
|
keychain.aggsig_get_public_keys(&tx_id)
|
|
|
|
};
|
|
|
|
|
|
|
|
// receiver receives partial tx
|
|
|
|
let (receiver_pub_excess, receiver_pub_nonce, sig_part) = {
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
let key_id = keychain.derive_key_id(1).unwrap();
|
|
|
|
|
|
|
|
let blind = keychain.derived_key(&key_id).unwrap();
|
|
|
|
|
|
|
|
keychain.aggsig_create_context(&tx_id, blind);
|
|
|
|
let (pub_excess, pub_nonce) = keychain.aggsig_get_public_keys(&tx_id);
|
|
|
|
keychain.aggsig_add_output(&tx_id, &key_id);
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let sig_part = keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
(pub_excess, pub_nonce, sig_part)
|
|
|
|
};
|
|
|
|
|
|
|
|
// check the sender can verify the partial signature
|
|
|
|
// received in the response back from the receiver
|
|
|
|
{
|
|
|
|
let keychain = sender_keychain.clone();
|
|
|
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sig_part,
|
|
|
|
&receiver_pub_nonce,
|
|
|
|
&receiver_pub_excess,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
);
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// now sender signs with their key
|
|
|
|
let sender_sig_part = {
|
|
|
|
let keychain = sender_keychain.clone();
|
2018-03-04 03:19:54 +03:00
|
|
|
keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &receiver_pub_nonce, 0, 0)
|
|
|
|
.unwrap()
|
2018-02-13 18:35:30 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
// check the receiver can verify the partial signature
|
|
|
|
// received by the sender
|
|
|
|
{
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
let sig_verifies = keychain.aggsig_verify_partial_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sender_sig_part,
|
|
|
|
&sender_pub_nonce,
|
|
|
|
&sender_pub_excess,
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
);
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Receiver now builds final signature from sender and receiver parts
|
|
|
|
let (final_sig, final_pubkey) = {
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
|
|
|
|
// Receiver recreates their partial sig (we do not maintain state from earlier)
|
2018-03-04 03:19:54 +03:00
|
|
|
let our_sig_part = keychain
|
|
|
|
.aggsig_calculate_partial_sig(&tx_id, &sender_pub_nonce, 0, 0)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// Receiver now generates final signature from the two parts
|
2018-03-04 03:19:54 +03:00
|
|
|
let final_sig = keychain
|
|
|
|
.aggsig_calculate_final_sig(
|
|
|
|
&tx_id,
|
|
|
|
&sender_sig_part,
|
|
|
|
&our_sig_part,
|
|
|
|
&sender_pub_nonce,
|
|
|
|
)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
// Receiver calculates the final public key (to verify sig later)
|
2018-03-04 03:19:54 +03:00
|
|
|
let final_pubkey = keychain
|
|
|
|
.aggsig_calculate_final_pubkey(&tx_id, &sender_pub_excess)
|
|
|
|
.unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
(final_sig, final_pubkey)
|
|
|
|
};
|
|
|
|
|
|
|
|
// Receiver checks the final signature verifies
|
|
|
|
{
|
|
|
|
let keychain = receiver_keychain.clone();
|
|
|
|
|
|
|
|
// Receiver check the final signature verifies
|
2018-03-04 03:19:54 +03:00
|
|
|
let sig_verifies =
|
|
|
|
keychain.aggsig_verify_final_sig_build_msg(&final_sig, &final_pubkey, 0, 0);
|
2018-02-13 18:35:30 +03:00
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check we can verify the sig using the kernel excess
|
|
|
|
{
|
|
|
|
let keychain = Keychain::from_random_seed().unwrap();
|
|
|
|
|
2018-03-04 03:19:54 +03:00
|
|
|
let msg = secp::Message::from_slice(&kernel_sig_msg(0, 0)).unwrap();
|
2018-02-13 18:35:30 +03:00
|
|
|
|
|
|
|
let sig_verifies = Keychain::aggsig_verify_single_from_commit(
|
|
|
|
&keychain.secp,
|
|
|
|
&final_sig,
|
|
|
|
&msg,
|
|
|
|
&kernel_excess,
|
|
|
|
);
|
|
|
|
|
|
|
|
assert!(sig_verifies);
|
|
|
|
}
|
|
|
|
}
|
2017-10-03 03:02:31 +03:00
|
|
|
}
|