Refactor Output Identifiers (#3371)

* refactor output to have internal output identifier

refactor to use AsRef for output identifier

make the output MMR explicit in terms of output identifiers

* put the serde deser back for rangeproof

* add json test for transactions
This commit is contained in:
Antioch Peverell 2020-09-03 10:59:54 +01:00 committed by GitHub
parent caa6b8c747
commit 133089e985
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 198 additions and 154 deletions

View file

@ -246,7 +246,7 @@ impl OutputHandler {
let outputs = block
.outputs()
.iter()
.filter(|output| commitments.is_empty() || commitments.contains(&output.commit))
.filter(|output| commitments.is_empty() || commitments.contains(&output.commitment()))
.map(|output| {
OutputPrintable::from_output(output, &chain, Some(&header), include_proof, true)
})
@ -279,7 +279,7 @@ impl OutputHandler {
let outputs = block
.outputs()
.iter()
.filter(|output| commitments.is_empty() || commitments.contains(&output.commit))
.filter(|output| commitments.is_empty() || commitments.contains(&output.commitment()))
.map(|output| {
OutputPrintable::from_output(
output,

View file

@ -290,8 +290,7 @@ impl OutputPrintable {
OutputType::Transaction
};
let out_id = core::OutputIdentifier::from(output);
let pos = chain.get_unspent(out_id.commitment())?;
let pos = chain.get_unspent(output.commitment())?;
let spent = pos.is_none();
@ -319,13 +318,13 @@ impl OutputPrintable {
let mut merkle_proof = None;
if include_merkle_proof && output.is_coinbase() && !spent {
if let Some(block_header) = block_header {
merkle_proof = chain.get_merkle_proof(&out_id, &block_header).ok();
merkle_proof = chain.get_merkle_proof(output, &block_header).ok();
}
};
Ok(OutputPrintable {
output_type,
commit: output.commit,
commit: output.commitment(),
spent,
proof,
proof_hash: output.proof.hash().to_hex(),

View file

@ -767,9 +767,9 @@ impl Chain {
}
/// Return a Merkle proof for the given commitment from the store.
pub fn get_merkle_proof(
pub fn get_merkle_proof<T: AsRef<OutputIdentifier>>(
&self,
output: &OutputIdentifier,
out_id: T,
header: &BlockHeader,
) -> Result<MerkleProof, Error> {
let mut header_pmmr = self.header_pmmr.write();
@ -777,7 +777,7 @@ impl Chain {
let merkle_proof =
txhashset::extending_readonly(&mut header_pmmr, &mut txhashset, |ext, batch| {
pipe::rewind_and_apply_fork(&header, ext, batch)?;
ext.extension.merkle_proof(output, batch)
ext.extension.merkle_proof(out_id, batch)
})?;
Ok(merkle_proof)
@ -1304,11 +1304,7 @@ impl Chain {
}
let mut output_vec: Vec<Output> = vec![];
for (ref x, &y) in outputs.1.iter().zip(rangeproofs.1.iter()) {
output_vec.push(Output {
commit: x.commit,
features: x.features,
proof: y,
});
output_vec.push(Output::new(x.features, x.commitment(), y));
}
Ok((outputs.0, last_index, output_vec))
}

View file

@ -144,7 +144,7 @@ impl PMMRHandle<BlockHeader> {
/// may have commitments that have already been spent, even with
/// pruning enabled.
pub struct TxHashSet {
output_pmmr_h: PMMRHandle<Output>,
output_pmmr_h: PMMRHandle<OutputIdentifier>,
rproof_pmmr_h: PMMRHandle<RangeProof>,
kernel_pmmr_h: PMMRHandle<TxKernel>,
@ -237,7 +237,9 @@ impl TxHashSet {
}
// Build a new bitmap accumulator for the provided output PMMR.
fn bitmap_accumulator(pmmr_h: &PMMRHandle<Output>) -> Result<BitmapAccumulator, Error> {
fn bitmap_accumulator(
pmmr_h: &PMMRHandle<OutputIdentifier>,
) -> Result<BitmapAccumulator, Error> {
let pmmr = ReadonlyPMMR::at(&pmmr_h.backend, pmmr_h.last_pos);
let size = pmmr::n_leaves(pmmr_h.last_pos);
let mut bitmap_accumulator = BitmapAccumulator::new();
@ -261,7 +263,7 @@ impl TxHashSet {
) -> Result<Option<(OutputIdentifier, CommitPos)>, Error> {
match self.commit_index.get_output_pos_height(&commit) {
Ok(Some(pos)) => {
let output_pmmr: ReadonlyPMMR<'_, Output, _> =
let output_pmmr: ReadonlyPMMR<'_, OutputIdentifier, _> =
ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
if let Some(out) = output_pmmr.get_data(pos.pos) {
if out.commitment() == commit {
@ -997,7 +999,7 @@ pub struct ExtensionPair<'a> {
pub struct Extension<'a> {
head: Tip,
output_pmmr: PMMR<'a, Output, PMMRBackend<Output>>,
output_pmmr: PMMR<'a, OutputIdentifier, PMMRBackend<OutputIdentifier>>,
rproof_pmmr: PMMR<'a, RangeProof, PMMRBackend<RangeProof>>,
kernel_pmmr: PMMR<'a, TxKernel, PMMRBackend<TxKernel>>,
@ -1171,13 +1173,13 @@ impl<'a> Extension<'a> {
// push the new output to the MMR.
let output_pos = self
.output_pmmr
.push(out)
.push(&out.identifier())
.map_err(&ErrorKind::TxHashSetErr)?;
// push the rangeproof to the MMR.
let rproof_pos = self
.rproof_pmmr
.push(&out.proof)
.push(&out.proof())
.map_err(&ErrorKind::TxHashSetErr)?;
// The output and rproof MMRs should be exactly the same size
@ -1231,14 +1233,15 @@ impl<'a> Extension<'a> {
/// Note: this relies on the MMR being stable even after pruning/compaction.
/// We need the hash of each sibling pos from the pos up to the peak
/// including the sibling leaf node which may have been removed.
pub fn merkle_proof(
pub fn merkle_proof<T: AsRef<OutputIdentifier>>(
&self,
output: &OutputIdentifier,
out_id: T,
batch: &Batch<'_>,
) -> Result<MerkleProof, Error> {
debug!("txhashset: merkle_proof: output: {:?}", output.commit,);
let out_id = out_id.as_ref();
debug!("txhashset: merkle_proof: output: {:?}", out_id.commit);
// then calculate the Merkle Proof based on the known pos
let pos = batch.get_output_pos(&output.commit)?;
let pos = batch.get_output_pos(&out_id.commit)?;
let merkle_proof = self
.output_pmmr
.merkle_proof(pos)

View file

@ -27,7 +27,7 @@ use grin_store::pmmr::PMMRBackend;
/// Readonly view of the UTXO set (based on output MMR).
pub struct UTXOView<'a> {
header_pmmr: ReadonlyPMMR<'a, BlockHeader, PMMRBackend<BlockHeader>>,
output_pmmr: ReadonlyPMMR<'a, Output, PMMRBackend<Output>>,
output_pmmr: ReadonlyPMMR<'a, OutputIdentifier, PMMRBackend<OutputIdentifier>>,
rproof_pmmr: ReadonlyPMMR<'a, RangeProof, PMMRBackend<RangeProof>>,
}
@ -35,7 +35,7 @@ impl<'a> UTXOView<'a> {
/// Build a new UTXO view.
pub fn new(
header_pmmr: ReadonlyPMMR<'a, BlockHeader, PMMRBackend<BlockHeader>>,
output_pmmr: ReadonlyPMMR<'a, Output, PMMRBackend<Output>>,
output_pmmr: ReadonlyPMMR<'a, OutputIdentifier, PMMRBackend<OutputIdentifier>>,
rproof_pmmr: ReadonlyPMMR<'a, RangeProof, PMMRBackend<RangeProof>>,
) -> UTXOView<'a> {
UTXOView {

View file

@ -16,7 +16,7 @@ use self::chain::types::{NoopAdapter, Tip};
use self::chain::Chain;
use self::core::core::hash::Hashed;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::core::{Block, BlockHeader, KernelFeatures, OutputIdentifier, Transaction};
use self::core::core::{Block, BlockHeader, KernelFeatures, Transaction};
use self::core::global::ChainTypes;
use self::core::libtx::{self, build, ProofBuilder};
use self::core::pow::Difficulty;
@ -549,8 +549,7 @@ fn spend_rewind_spend() {
// mine the first block and keep track of the block_hash
// so we can spend the coinbase later
let b = prepare_block_key_idx(&kc, &head, &chain, 2, 1);
let out_id = OutputIdentifier::from(&b.outputs()[0]);
assert!(out_id.features.is_coinbase());
assert!(b.outputs()[0].is_coinbase());
head = b.header.clone();
chain
.process_block(b.clone(), chain::Options::SKIP_POW)
@ -623,8 +622,7 @@ fn spend_in_fork_and_compact() {
// mine the first block and keep track of the block_hash
// so we can spend the coinbase later
let b = prepare_block(&kc, &fork_head, &chain, 2);
let out_id = OutputIdentifier::from(&b.outputs()[0]);
assert!(out_id.features.is_coinbase());
assert!(b.outputs()[0].is_coinbase());
fork_head = b.header.clone();
chain
.process_block(b.clone(), chain::Options::SKIP_POW)

View file

@ -806,12 +806,6 @@ impl TransactionBody {
self
}
/// Fully replace inputs.
pub fn replace_outputs(mut self, outputs: &[Output]) -> TransactionBody {
self.outputs = outputs.to_vec();
self
}
/// Builds a new TransactionBody with the provided output added. Existing
/// outputs, if any, are kept intact.
/// Sort order is maintained.
@ -822,6 +816,12 @@ impl TransactionBody {
self
}
/// Fully replace outputs.
pub fn replace_outputs(mut self, outputs: &[Output]) -> TransactionBody {
self.outputs = outputs.to_vec();
self
}
/// Builds a new TransactionBody with the provided kernel added. Existing
/// kernels, if any, are kept intact.
/// Sort order is maintained.
@ -1059,7 +1059,7 @@ impl TransactionBody {
let mut commits = vec![];
let mut proofs = vec![];
for x in &outputs {
commits.push(x.commit);
commits.push(x.commitment());
proofs.push(x.proof);
}
Output::batch_verify_proofs(&commits, &proofs)?;
@ -1590,6 +1590,7 @@ impl Input {
}
/// Wrapper around a vec of inputs.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(untagged)]
pub enum Inputs {
/// Vec of inputs.
FeaturesAndCommit(Vec<Input>),
@ -1714,15 +1715,10 @@ impl Readable for OutputFeatures {
/// overflow and the ownership of the private key.
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct Output {
/// Options for an output's structure or use
pub features: OutputFeatures,
/// The homomorphic commitment representing the output amount
#[serde(
serialize_with = "secp_ser::as_hex",
deserialize_with = "secp_ser::commitment_from_hex"
)]
pub commit: Commitment,
/// A proof that the commitment is in the right range
/// Output identifier (features and commitment).
#[serde(flatten)]
pub identifier: OutputIdentifier,
/// Rangeproof associated with the commitment.
#[serde(
serialize_with = "secp_ser::as_hex",
deserialize_with = "secp_ser::rangeproof_from_hex"
@ -1730,12 +1726,29 @@ pub struct Output {
pub proof: RangeProof,
}
impl DefaultHashable for Output {}
hashable_ord!(Output);
impl Ord for Output {
fn cmp(&self, other: &Self) -> Ordering {
self.identifier.cmp(&other.identifier)
}
}
impl PartialOrd for Output {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Output {
fn eq(&self, other: &Self) -> bool {
self.identifier == other.identifier
}
}
impl Eq for Output {}
impl AsRef<Commitment> for Output {
fn as_ref(&self) -> &Commitment {
&self.commit
&self.identifier.commit
}
}
@ -1751,13 +1764,8 @@ impl ::std::hash::Hash for Output {
/// an Output as binary.
impl Writeable for Output {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
self.features.write(writer)?;
self.commit.write(writer)?;
// The hash of an output doesn't include the range proof, which
// is committed to separately
if writer.serialization_mode() != ser::SerializationMode::Hash {
writer.write_bytes(&self.proof)?
}
self.identifier.write(writer)?;
self.proof.write(writer)?;
Ok(())
}
}
@ -1767,30 +1775,12 @@ impl Writeable for Output {
impl Readable for Output {
fn read<R: Reader>(reader: &mut R) -> Result<Output, ser::Error> {
Ok(Output {
features: OutputFeatures::read(reader)?,
commit: Commitment::read(reader)?,
identifier: OutputIdentifier::read(reader)?,
proof: RangeProof::read(reader)?,
})
}
}
/// We can build an Output MMR but store instances of OutputIdentifier in the MMR data file.
impl PMMRable for Output {
type E = OutputIdentifier;
fn as_elmt(&self) -> OutputIdentifier {
OutputIdentifier::from(self)
}
fn elmt_size() -> Option<u16> {
Some(
(1 + secp::constants::PEDERSEN_COMMITMENT_SIZE)
.try_into()
.unwrap(),
)
}
}
impl OutputFeatures {
/// Is this a coinbase output?
pub fn is_coinbase(self) -> bool {
@ -1804,19 +1794,37 @@ impl OutputFeatures {
}
impl Output {
/// Create a new output with the provided features, commitment and rangeproof.
pub fn new(features: OutputFeatures, commit: Commitment, proof: RangeProof) -> Output {
Output {
identifier: OutputIdentifier { features, commit },
proof,
}
}
/// Output identifier.
pub fn identifier(&self) -> OutputIdentifier {
self.identifier
}
/// Commitment for the output
pub fn commitment(&self) -> Commitment {
self.commit
self.identifier.commitment()
}
/// Is this a coinbase kernel?
/// Output features.
pub fn features(&self) -> OutputFeatures {
self.identifier.features
}
/// Is this a coinbase output?
pub fn is_coinbase(&self) -> bool {
self.features.is_coinbase()
self.identifier.is_coinbase()
}
/// Is this a plain kernel?
/// Is this a plain output?
pub fn is_plain(&self) -> bool {
self.features.is_plain()
self.identifier.is_plain()
}
/// Range proof for the output
@ -1833,7 +1841,7 @@ impl Output {
pub fn verify_proof(&self) -> Result<(), Error> {
let secp = static_secp_instance();
secp.lock()
.verify_bullet_proof(self.commit, self.proof, None)?;
.verify_bullet_proof(self.commitment(), self.proof, None)?;
Ok(())
}
@ -1846,6 +1854,12 @@ impl Output {
}
}
impl AsRef<OutputIdentifier> for Output {
fn as_ref(&self) -> &OutputIdentifier {
&self.identifier
}
}
/// An output_identifier can be build from either an input _or_ an output and
/// contains everything we need to uniquely identify an output being spent.
/// Needed because it is not sufficient to pass a commitment around.
@ -1856,6 +1870,10 @@ pub struct OutputIdentifier {
/// enforced.
pub features: OutputFeatures,
/// Output commitment
#[serde(
serialize_with = "secp_ser::as_hex",
deserialize_with = "secp_ser::commitment_from_hex"
)]
pub commit: Commitment,
}
@ -1882,12 +1900,21 @@ impl OutputIdentifier {
self.commit
}
/// Is this a coinbase output?
pub fn is_coinbase(&self) -> bool {
self.features.is_coinbase()
}
/// Is this a plain output?
pub fn is_plain(&self) -> bool {
self.features.is_plain()
}
/// Converts this identifier to a full output, provided a RangeProof
pub fn into_output(self, proof: RangeProof) -> Output {
Output {
identifier: self,
proof,
features: self.features,
commit: self.commit,
}
}
}
@ -1915,12 +1942,19 @@ impl Readable for OutputIdentifier {
}
}
impl From<&Output> for OutputIdentifier {
fn from(out: &Output) -> Self {
OutputIdentifier {
features: out.features,
commit: out.commit,
}
impl PMMRable for OutputIdentifier {
type E = Self;
fn as_elmt(&self) -> OutputIdentifier {
*self
}
fn elmt_size() -> Option<u16> {
Some(
(1 + secp::constants::PEDERSEN_COMMITMENT_SIZE)
.try_into()
.unwrap(),
)
}
}
@ -1933,6 +1967,12 @@ impl From<&Input> for OutputIdentifier {
}
}
impl AsRef<OutputIdentifier> for OutputIdentifier {
fn as_ref(&self) -> &OutputIdentifier {
self
}
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -103,13 +103,13 @@ pub fn genesis_floo() -> core::Block {
])
.unwrap(),
};
let output = core::Output {
features: core::OutputFeatures::Coinbase,
commit: Commitment::from_vec(
let output = core::Output::new(
core::OutputFeatures::Coinbase,
Commitment::from_vec(
util::from_hex("08c12007af16d1ee55fffe92cef808c77e318dae70c3bc70cb6361f49d517f1b68")
.unwrap(),
),
proof: RangeProof {
RangeProof {
plen: SINGLE_BULLET_PROOF_SIZE,
proof: [
159, 156, 202, 179, 128, 169, 14, 227, 176, 79, 118, 180, 62, 164, 2, 234, 123, 30,
@ -152,7 +152,7 @@ pub fn genesis_floo() -> core::Block {
52, 175, 76, 157, 120, 208, 99, 135, 210, 81, 114, 230, 181,
],
},
};
);
gen.with_reward(output, kernel)
}
@ -215,13 +215,13 @@ pub fn genesis_main() -> core::Block {
])
.unwrap(),
};
let output = core::Output {
features: core::OutputFeatures::Coinbase,
commit: Commitment::from_vec(
let output = core::Output::new(
core::OutputFeatures::Coinbase,
Commitment::from_vec(
util::from_hex("08b7e57c448db5ef25aa119dde2312c64d7ff1b890c416c6dda5ec73cbfed2edea")
.unwrap(),
),
proof: RangeProof {
RangeProof {
plen: SINGLE_BULLET_PROOF_SIZE,
proof: [
147, 48, 173, 140, 222, 32, 95, 49, 124, 101, 55, 236, 169, 107, 134, 98, 147, 160,
@ -264,7 +264,7 @@ pub fn genesis_main() -> core::Block {
156, 243, 168, 216, 103, 38, 160, 20, 71, 148,
],
},
};
);
gen.with_reward(output, kernel)
}

View file

@ -234,12 +234,8 @@ pub fn verify_partial_sig(
/// let switch = SwitchCommitmentType::Regular;
/// let commit = keychain.commit(value, &key_id, switch).unwrap();
/// let builder = proof::ProofBuilder::new(&keychain);
/// let rproof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap();
/// let output = Output {
/// features: OutputFeatures::Coinbase,
/// commit: commit,
/// proof: rproof,
/// };
/// let proof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap();
/// let output = Output::new(OutputFeatures::Coinbase, commit, proof);
/// let height = 20;
/// let over_commit = secp.commit_value(reward(fees)).unwrap();
/// let out_commit = output.commitment();
@ -301,12 +297,8 @@ where
/// let switch = SwitchCommitmentType::Regular;
/// let commit = keychain.commit(value, &key_id, switch).unwrap();
/// let builder = proof::ProofBuilder::new(&keychain);
/// let rproof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap();
/// let output = Output {
/// features: OutputFeatures::Coinbase,
/// commit: commit,
/// proof: rproof,
/// };
/// let proof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap();
/// let output = Output::new(OutputFeatures::Coinbase, commit, proof);
/// let height = 20;
/// let over_commit = secp.commit_value(reward(fees)).unwrap();
/// let out_commit = output.commitment();

View file

@ -125,7 +125,7 @@ where
debug!("Building output: {}, {:?}", value, commit);
let rproof = proof::create(
let proof = proof::create(
build.keychain,
build.builder,
value,
@ -136,11 +136,7 @@ where
)?;
Ok((
tx.with_output(Output {
features: OutputFeatures::Plain,
commit,
proof: rproof,
}),
tx.with_output(Output::new(OutputFeatures::Plain, commit, proof)),
sum.add_key_id(key_id.to_value_path(value)),
))
},

View file

@ -43,13 +43,9 @@ where
trace!("Block reward - Pedersen Commit is: {:?}", commit,);
let rproof = proof::create(keychain, builder, value, key_id, switch, commit, None)?;
let proof = proof::create(keychain, builder, value, key_id, switch, commit, None)?;
let output = Output {
features: OutputFeatures::Coinbase,
commit,
proof: rproof,
};
let output = Output::new(OutputFeatures::Coinbase, commit, proof);
let secp = static_secp_instance();
let secp = secp.lock();
@ -78,10 +74,10 @@ where
}
};
let proof = TxKernel {
let kernel = TxKernel {
features: KernelFeatures::Coinbase,
excess,
excess_sig: sig,
};
Ok((output, proof))
Ok((output, kernel))
}

View file

@ -675,11 +675,9 @@ pub trait VerifySortedAndUnique<T> {
fn verify_sorted_and_unique(&self) -> Result<(), Error>;
}
impl<T: Hashed> VerifySortedAndUnique<T> for Vec<T> {
impl<T: Ord> VerifySortedAndUnique<T> for Vec<T> {
fn verify_sorted_and_unique(&self) -> Result<(), Error> {
let hashes = self.iter().map(|item| item.hash()).collect::<Vec<_>>();
let pairs = hashes.windows(2);
for pair in pairs {
for pair in self.windows(2) {
if pair[0] > pair[1] {
return Err(Error::SortError);
} else if pair[0] == pair[1] {

View file

@ -19,7 +19,7 @@ use crate::core::core::block::{Block, BlockHeader, Error, HeaderVersion, Untrust
use crate::core::core::hash::Hashed;
use crate::core::core::id::ShortIdentifiable;
use crate::core::core::transaction::{
self, KernelFeatures, NRDRelativeHeight, OutputFeatures, Transaction,
self, KernelFeatures, NRDRelativeHeight, Output, OutputFeatures, Transaction,
};
use crate::core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use crate::core::core::{Committed, CompactBlock};
@ -344,11 +344,13 @@ fn remove_coinbase_output_flag() {
let builder = ProofBuilder::new(&keychain);
let prev = BlockHeader::default();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let mut b = new_block(&[], &keychain, &builder, &prev, &key_id);
let mut output = b.outputs()[0].clone();
output.features = OutputFeatures::Plain;
b.body.outputs = vec![output];
let b = new_block(&[], &keychain, &builder, &prev, &key_id);
let output = b.outputs()[0];
let output = Output::new(OutputFeatures::Plain, output.commitment(), output.proof());
let b = Block {
body: b.body.replace_outputs(&[output]),
..b
};
assert_eq!(b.verify_coinbase(), Err(Error::CoinbaseSumMismatch));
assert!(b

View file

@ -460,9 +460,9 @@ fn hash_output() {
&builder,
)
.unwrap();
let h = tx.outputs()[0].hash();
let h = tx.outputs()[0].identifier.hash();
assert!(h != ZERO_HASH);
let h2 = tx.outputs()[1].hash();
let h2 = tx.outputs()[1].identifier.hash();
assert!(h != h2);
}

View file

@ -15,10 +15,10 @@
//! Transaction integration tests
pub mod common;
use crate::common::tx1i1o;
use crate::core::core::transaction::{self, Error};
use crate::core::core::verifier_cache::LruVerifierCache;
use crate::core::core::{KernelFeatures, Output, OutputFeatures, Weighting};
use crate::core::core::{KernelFeatures, Output, OutputFeatures, Transaction, Weighting};
use crate::core::global;
use crate::core::libtx::build;
use crate::core::libtx::proof::{self, ProofBuilder};
@ -28,6 +28,35 @@ use keychain::{ExtKeychain, Keychain};
use std::sync::Arc;
use util::RwLock;
// We use json serialization between wallet->node when pushing transactions to the network.
// This test ensures we exercise this serialization/deserialization code.
#[test]
fn test_transaction_json_ser_deser() {
let tx1 = tx1i1o();
let value = serde_json::to_value(&tx1).unwrap();
assert!(value["offset"].is_string());
assert_eq!(value["body"]["inputs"][0]["features"], "Plain");
assert!(value["body"]["inputs"][0]["commit"].is_string());
assert_eq!(value["body"]["outputs"][0]["features"], "Plain");
assert!(value["body"]["outputs"][0]["commit"].is_string());
assert!(value["body"]["outputs"][0]["proof"].is_string());
// Note: Tx kernel "features" serialize in a slightly unexpected way.
assert_eq!(value["body"]["kernels"][0]["features"]["Plain"]["fee"], 2);
assert!(value["body"]["kernels"][0]["excess"].is_string());
assert!(value["body"]["kernels"][0]["excess_sig"].is_string());
let tx2: Transaction = serde_json::from_value(value).unwrap();
assert_eq!(tx1, tx2);
let tx1 = tx1i1o();
let str = serde_json::to_string(&tx1).unwrap();
println!("{}", str);
let tx2: Transaction = serde_json::from_str(&str).unwrap();
assert_eq!(tx1, tx2);
}
#[test]
fn test_output_ser_deser() {
let keychain = ExtKeychain::from_random_seed(false).unwrap();
@ -37,18 +66,14 @@ fn test_output_ser_deser() {
let builder = ProofBuilder::new(&keychain);
let proof = proof::create(&keychain, &builder, 5, &key_id, switch, commit, None).unwrap();
let out = Output {
features: OutputFeatures::Plain,
commit: commit,
proof: proof,
};
let out = Output::new(OutputFeatures::Plain, commit, proof);
let mut vec = vec![];
ser::serialize_default(&mut vec, &out).expect("serialized failed");
let dout: Output = ser::deserialize_default(&mut &vec[..]).unwrap();
assert_eq!(dout.features, OutputFeatures::Plain);
assert_eq!(dout.commit, out.commit);
assert_eq!(dout.features(), OutputFeatures::Plain);
assert_eq!(dout.commitment(), out.commitment());
assert_eq!(dout.proof, out.proof);
}

View file

@ -37,11 +37,7 @@ fn test_verifier_cache_rangeproofs() {
let builder = proof::ProofBuilder::new(&keychain);
let proof = proof::create(&keychain, &builder, 5, &key_id, switch, commit, None).unwrap();
let out = Output {
features: OutputFeatures::Plain,
commit: commit,
proof: proof,
};
let out = Output::new(OutputFeatures::Plain, commit, proof);
// Check our output is not verified according to the cache.
{

View file

@ -303,8 +303,11 @@ where
let agg_tx = self
.all_transactions_aggregate(extra_tx)?
.unwrap_or(Transaction::empty());
let mut outputs: Vec<OutputIdentifier> =
agg_tx.outputs().iter().map(|out| out.into()).collect();
let mut outputs: Vec<OutputIdentifier> = agg_tx
.outputs()
.iter()
.map(|out| out.identifier())
.collect();
// By applying cut_through to tx inputs and agg_tx outputs we can
// determine the outputs being spent from the pool and those still unspent