mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-20 19:11:08 +03:00
verify_cut_through and test coverage (#3424)
* fix inconsistent verify_cut_through() logic * add test coverage for chain::process_block() and cut_through logic * fix comment
This commit is contained in:
parent
6a012d7e5b
commit
29cffe9b3c
7 changed files with 508 additions and 22 deletions
|
@ -486,7 +486,8 @@ impl Chain {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn new_ctx<'a>(
|
||||
/// Build a new block processing context.
|
||||
pub fn new_ctx<'a>(
|
||||
&self,
|
||||
opts: Options,
|
||||
batch: store::Batch<'a>,
|
||||
|
@ -691,6 +692,22 @@ impl Chain {
|
|||
})
|
||||
}
|
||||
|
||||
/// Sets prev_root on a brand new block header by applying the previous header to the header MMR.
|
||||
pub fn set_prev_root_only(&self, header: &mut BlockHeader) -> Result<(), Error> {
|
||||
let mut header_pmmr = self.header_pmmr.write();
|
||||
let prev_root =
|
||||
txhashset::header_extending_readonly(&mut header_pmmr, &self.store(), |ext, batch| {
|
||||
let prev_header = batch.get_previous_header(header)?;
|
||||
pipe::rewind_and_apply_header_fork(&prev_header, ext, batch)?;
|
||||
ext.root()
|
||||
})?;
|
||||
|
||||
// Set the prev_root on the header.
|
||||
header.prev_root = prev_root;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets the txhashset roots on a brand new block by applying the block on
|
||||
/// the current txhashset state.
|
||||
pub fn set_txhashset_roots(&self, b: &mut Block) -> Result<(), Error> {
|
||||
|
|
|
@ -588,7 +588,9 @@ pub fn rewind_and_apply_fork(
|
|||
Ok(fork_point)
|
||||
}
|
||||
|
||||
/// Validate block inputs against utxo.
|
||||
/// Validate block inputs and outputs against utxo.
|
||||
/// Every input must spend an unspent output.
|
||||
/// No duplicate outputs created.
|
||||
fn validate_utxo(
|
||||
block: &Block,
|
||||
ext: &mut txhashset::ExtensionPair<'_>,
|
||||
|
|
|
@ -766,6 +766,38 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Start a new readonly header MMR extension.
|
||||
/// This MMR can be extended individually beyond the other (output, rangeproof and kernel) MMRs
|
||||
/// to allow headers to be validated before we receive the full block data.
|
||||
pub fn header_extending_readonly<'a, F, T>(
|
||||
handle: &'a mut PMMRHandle<BlockHeader>,
|
||||
store: &ChainStore,
|
||||
inner: F,
|
||||
) -> Result<T, Error>
|
||||
where
|
||||
F: FnOnce(&mut HeaderExtension<'_>, &Batch<'_>) -> Result<T, Error>,
|
||||
{
|
||||
let batch = store.batch()?;
|
||||
|
||||
// Note: Extending either the sync_head or header_head MMR here.
|
||||
// Use underlying MMR to determine the "head".
|
||||
let head = match handle.head_hash() {
|
||||
Ok(hash) => {
|
||||
let header = batch.get_block_header(&hash)?;
|
||||
Tip::from_header(&header)
|
||||
}
|
||||
Err(_) => Tip::default(),
|
||||
};
|
||||
|
||||
let pmmr = PMMR::at(&mut handle.backend, handle.last_pos);
|
||||
let mut extension = HeaderExtension::new(pmmr, head);
|
||||
let res = inner(&mut extension, &batch);
|
||||
|
||||
handle.backend.discard();
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// Start a new header MMR unit of work.
|
||||
/// This MMR can be extended individually beyond the other (output, rangeproof and kernel) MMRs
|
||||
/// to allow headers to be validated before we receive the full block data.
|
||||
|
|
176
chain/tests/process_block_cut_through.rs
Normal file
176
chain/tests/process_block_cut_through.rs
Normal file
|
@ -0,0 +1,176 @@
|
|||
// Copyright 2020 The Grin Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
mod chain_test_helper;
|
||||
|
||||
use grin_chain as chain;
|
||||
use grin_core as core;
|
||||
use grin_keychain as keychain;
|
||||
use grin_util as util;
|
||||
|
||||
use self::chain_test_helper::{clean_output_dir, genesis_block, init_chain};
|
||||
use crate::chain::{pipe, Chain, Options};
|
||||
use crate::core::core::verifier_cache::LruVerifierCache;
|
||||
use crate::core::core::{block, transaction};
|
||||
use crate::core::core::{Block, KernelFeatures, Transaction, Weighting};
|
||||
use crate::core::libtx::{build, reward, ProofBuilder};
|
||||
use crate::core::{consensus, global, pow};
|
||||
use crate::keychain::{ExtKeychain, ExtKeychainPath, Keychain, SwitchCommitmentType};
|
||||
use crate::util::RwLock;
|
||||
use chrono::Duration;
|
||||
use std::sync::Arc;
|
||||
|
||||
fn build_block<K>(
|
||||
chain: &Chain,
|
||||
keychain: &K,
|
||||
txs: &[Transaction],
|
||||
skip_roots: bool,
|
||||
) -> Result<Block, chain::Error>
|
||||
where
|
||||
K: Keychain,
|
||||
{
|
||||
let prev = chain.head_header().unwrap();
|
||||
let next_height = prev.height + 1;
|
||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter()?);
|
||||
let fee = txs.iter().map(|x| x.fee()).sum();
|
||||
let key_id = ExtKeychainPath::new(1, next_height as u32, 0, 0, 0).to_identifier();
|
||||
let reward =
|
||||
reward::output(keychain, &ProofBuilder::new(keychain), &key_id, fee, false).unwrap();
|
||||
|
||||
let mut block = Block::new(&prev, txs, next_header_info.clone().difficulty, reward)?;
|
||||
|
||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||
|
||||
// If we are skipping roots then just set the header prev_root and skip the other MMR roots.
|
||||
// This allows us to build a header for an "invalid" block by ignoring outputs and kernels.
|
||||
if skip_roots {
|
||||
chain.set_prev_root_only(&mut block.header)?;
|
||||
} else {
|
||||
chain.set_txhashset_roots(&mut block)?;
|
||||
}
|
||||
|
||||
let edge_bits = global::min_edge_bits();
|
||||
block.header.pow.proof.edge_bits = edge_bits;
|
||||
pow::pow_size(
|
||||
&mut block.header,
|
||||
next_header_info.difficulty,
|
||||
global::proofsize(),
|
||||
edge_bits,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_block_cut_through() -> Result<(), chain::Error> {
|
||||
let chain_dir = ".grin.cut_through";
|
||||
global::set_local_chain_type(global::ChainTypes::AutomatedTesting);
|
||||
util::init_test_logger();
|
||||
clean_output_dir(chain_dir);
|
||||
|
||||
let keychain = ExtKeychain::from_random_seed(false)?;
|
||||
let pb = ProofBuilder::new(&keychain);
|
||||
let genesis = genesis_block(&keychain);
|
||||
let chain = init_chain(chain_dir, genesis.clone());
|
||||
|
||||
// Mine a few empty blocks.
|
||||
for _ in 1..6 {
|
||||
let block = build_block(&chain, &keychain, &[], false)?;
|
||||
chain.process_block(block, Options::MINE)?;
|
||||
}
|
||||
|
||||
let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier();
|
||||
let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier();
|
||||
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
|
||||
|
||||
// Build a tx that spends a couple of early coinbase outputs and produces some new outputs.
|
||||
// Note: We reuse key_ids resulting in an input and an output sharing the same commitment.
|
||||
// The input is coinbase and the output is plain.
|
||||
let tx = build::transaction(
|
||||
KernelFeatures::Plain { fee: 0 },
|
||||
&[
|
||||
build::coinbase_input(consensus::REWARD, key_id1.clone()),
|
||||
build::coinbase_input(consensus::REWARD, key_id2.clone()),
|
||||
build::output(60_000_000_000, key_id1.clone()),
|
||||
build::output(50_000_000_000, key_id2.clone()),
|
||||
build::output(10_000_000_000, key_id3.clone()),
|
||||
],
|
||||
&keychain,
|
||||
&pb,
|
||||
)
|
||||
.expect("valid tx");
|
||||
|
||||
// The offending commitment, reused in both an input and an output.
|
||||
let commit = keychain.commit(60_000_000_000, &key_id1, SwitchCommitmentType::Regular)?;
|
||||
let inputs: Vec<_> = tx.inputs().into();
|
||||
assert!(inputs.iter().any(|input| input.commitment() == commit));
|
||||
assert!(tx
|
||||
.outputs()
|
||||
.iter()
|
||||
.any(|output| output.commitment() == commit));
|
||||
|
||||
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
|
||||
|
||||
// Transaction is invalid due to cut-through.
|
||||
assert_eq!(
|
||||
tx.validate(Weighting::AsTransaction, verifier_cache.clone()),
|
||||
Err(transaction::Error::CutThrough),
|
||||
);
|
||||
|
||||
// Transaction will not validate against the chain (utxo).
|
||||
assert_eq!(
|
||||
chain.validate_tx(&tx).map_err(|e| e.kind()),
|
||||
Err(chain::ErrorKind::DuplicateCommitment(commit)),
|
||||
);
|
||||
|
||||
// Build a block with this single invalid transaction.
|
||||
let block = build_block(&chain, &keychain, &[tx.clone()], true)?;
|
||||
|
||||
// The block is invalid due to cut-through.
|
||||
let prev = chain.head_header()?;
|
||||
assert_eq!(
|
||||
block.validate(&prev.total_kernel_offset(), verifier_cache),
|
||||
Err(block::Error::Transaction(transaction::Error::CutThrough))
|
||||
);
|
||||
|
||||
// The block processing pipeline will refuse to accept the block due to "duplicate commitment".
|
||||
// Note: The error is "Other" with a stringified backtrace and is effectively impossible to introspect here...
|
||||
assert!(chain.process_block(block.clone(), Options::MINE).is_err());
|
||||
|
||||
// Now exercise the internal call to pipe::process_block() directly so we can introspect the error
|
||||
// without it being wrapped as above.
|
||||
{
|
||||
let store = chain.store();
|
||||
let header_pmmr = chain.header_pmmr();
|
||||
let txhashset = chain.txhashset();
|
||||
|
||||
let mut header_pmmr = header_pmmr.write();
|
||||
let mut txhashset = txhashset.write();
|
||||
let batch = store.batch()?;
|
||||
|
||||
let mut ctx = chain.new_ctx(Options::NONE, batch, &mut header_pmmr, &mut txhashset)?;
|
||||
let res = pipe::process_block(&block, &mut ctx).map_err(|e| e.kind());
|
||||
assert_eq!(
|
||||
res,
|
||||
Err(chain::ErrorKind::InvalidBlockProof(
|
||||
block::Error::Transaction(transaction::Error::CutThrough)
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
clean_output_dir(chain_dir);
|
||||
Ok(())
|
||||
}
|
|
@ -810,6 +810,12 @@ impl TransactionBody {
|
|||
self
|
||||
}
|
||||
|
||||
/// Fully replace inputs.
|
||||
pub fn replace_outputs(mut self, outputs: &[Output]) -> TransactionBody {
|
||||
self.outputs = outputs.to_vec();
|
||||
self
|
||||
}
|
||||
|
||||
/// Builds a new TransactionBody with the provided output added. Existing
|
||||
/// outputs, if any, are kept intact.
|
||||
/// Sort order is maintained.
|
||||
|
@ -968,23 +974,24 @@ impl TransactionBody {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// Returns a single sorted vec of all input and output commitments.
|
||||
// This gives us a convenient way of verifying cut_through.
|
||||
fn inputs_outputs_committed(&self) -> Vec<Commitment> {
|
||||
let mut commits = self.inputs_committed();
|
||||
commits.extend_from_slice(self.outputs_committed().as_slice());
|
||||
commits.sort_unstable();
|
||||
commits
|
||||
}
|
||||
|
||||
// Verify that no input is spending an output from the same block.
|
||||
// Assumes inputs and outputs are sorted
|
||||
// The inputs and outputs are not guaranteed to be sorted consistently once we support "commit only" inputs.
|
||||
// We need to allocate as we need to sort the commitments so we keep this very simple and just look
|
||||
// for duplicates across all input and output commitments.
|
||||
fn verify_cut_through(&self) -> Result<(), Error> {
|
||||
let inputs: Vec<_> = self.inputs().into();
|
||||
let mut inputs = inputs.iter().map(|x| x.hash()).peekable();
|
||||
let mut outputs = self.outputs.iter().map(|x| x.hash()).peekable();
|
||||
while let (Some(ih), Some(oh)) = (inputs.peek(), outputs.peek()) {
|
||||
match ih.cmp(oh) {
|
||||
Ordering::Less => {
|
||||
inputs.next();
|
||||
}
|
||||
Ordering::Greater => {
|
||||
outputs.next();
|
||||
}
|
||||
Ordering::Equal => {
|
||||
return Err(Error::CutThrough);
|
||||
}
|
||||
let commits = self.inputs_outputs_committed();
|
||||
for pair in commits.windows(2) {
|
||||
if pair[0] == pair[1] {
|
||||
return Err(Error::CutThrough);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -1023,6 +1030,7 @@ impl TransactionBody {
|
|||
self.verify_weight(weighting)?;
|
||||
self.verify_no_nrd_duplicates()?;
|
||||
self.verify_sorted()?;
|
||||
self.verify_cut_through()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1035,7 +1043,6 @@ impl TransactionBody {
|
|||
verifier: Arc<RwLock<dyn VerifierCache>>,
|
||||
) -> Result<(), Error> {
|
||||
self.validate_read(weighting)?;
|
||||
self.verify_cut_through()?;
|
||||
|
||||
// Find all the outputs that have not had their rangeproofs verified.
|
||||
let outputs = {
|
||||
|
|
|
@ -784,3 +784,131 @@ fn validate_header_proof() {
|
|||
)
|
||||
.is_err());
|
||||
}
|
||||
|
||||
// Test coverage for verifying cut-through during block validation.
|
||||
// It is not valid for a block to spend an output and produce a new output with the same commitment.
|
||||
// This test covers the case where a plain output is spent, producing a plain output with the same commitment.
|
||||
#[test]
|
||||
fn test_verify_cut_through_plain() -> Result<(), Error> {
|
||||
global::set_local_chain_type(global::ChainTypes::UserTesting);
|
||||
|
||||
let keychain = ExtKeychain::from_random_seed(false).unwrap();
|
||||
|
||||
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
||||
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
||||
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
||||
|
||||
let builder = ProofBuilder::new(&keychain);
|
||||
|
||||
let tx = build::transaction(
|
||||
KernelFeatures::Plain { fee: 0 },
|
||||
&[
|
||||
build::input(10, key_id1.clone()),
|
||||
build::input(10, key_id2.clone()),
|
||||
build::output(10, key_id1.clone()),
|
||||
build::output(6, key_id2.clone()),
|
||||
build::output(4, key_id3.clone()),
|
||||
],
|
||||
&keychain,
|
||||
&builder,
|
||||
)
|
||||
.expect("valid tx");
|
||||
|
||||
let prev = BlockHeader::default();
|
||||
let key_id = ExtKeychain::derive_key_id(0, 0, 0, 0, 0);
|
||||
let mut block = new_block(&[tx], &keychain, &builder, &prev, &key_id);
|
||||
|
||||
// The block should fail validation due to cut-through.
|
||||
assert_eq!(
|
||||
block.validate(&BlindingFactor::zero(), verifier_cache()),
|
||||
Err(Error::Transaction(transaction::Error::CutThrough))
|
||||
);
|
||||
|
||||
// The block should fail lightweight "read" validation due to cut-through.
|
||||
assert_eq!(
|
||||
block.validate_read(),
|
||||
Err(Error::Transaction(transaction::Error::CutThrough))
|
||||
);
|
||||
|
||||
// Apply cut-through to eliminate the offending input and output.
|
||||
let mut inputs: Vec<_> = block.inputs().into();
|
||||
let mut outputs = block.outputs().to_vec();
|
||||
let (inputs, outputs, _, _) = transaction::cut_through(&mut inputs[..], &mut outputs[..])?;
|
||||
|
||||
block.body = block
|
||||
.body
|
||||
.replace_inputs(inputs.into())
|
||||
.replace_outputs(outputs);
|
||||
|
||||
// Block validates successfully after applying cut-through.
|
||||
block.validate(&BlindingFactor::zero(), verifier_cache())?;
|
||||
|
||||
// Block validates via lightweight "read" validation.
|
||||
block.validate_read()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test coverage for verifying cut-through during block validation.
|
||||
// It is not valid for a block to spend an output and produce a new output with the same commitment.
|
||||
// This test covers the case where a coinbase output is spent, producing a plain output with the same commitment.
|
||||
#[test]
|
||||
fn test_verify_cut_through_coinbase() -> Result<(), Error> {
|
||||
global::set_local_chain_type(global::ChainTypes::UserTesting);
|
||||
|
||||
let keychain = ExtKeychain::from_random_seed(false).unwrap();
|
||||
|
||||
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
||||
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
||||
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
||||
|
||||
let builder = ProofBuilder::new(&keychain);
|
||||
|
||||
let tx = build::transaction(
|
||||
KernelFeatures::Plain { fee: 0 },
|
||||
&[
|
||||
build::coinbase_input(consensus::REWARD, key_id1.clone()),
|
||||
build::coinbase_input(consensus::REWARD, key_id2.clone()),
|
||||
build::output(60_000_000_000, key_id1.clone()),
|
||||
build::output(50_000_000_000, key_id2.clone()),
|
||||
build::output(10_000_000_000, key_id3.clone()),
|
||||
],
|
||||
&keychain,
|
||||
&builder,
|
||||
)
|
||||
.expect("valid tx");
|
||||
|
||||
let prev = BlockHeader::default();
|
||||
let key_id = ExtKeychain::derive_key_id(0, 0, 0, 0, 0);
|
||||
let mut block = new_block(&[tx], &keychain, &builder, &prev, &key_id);
|
||||
|
||||
// The block should fail validation due to cut-through.
|
||||
assert_eq!(
|
||||
block.validate(&BlindingFactor::zero(), verifier_cache()),
|
||||
Err(Error::Transaction(transaction::Error::CutThrough))
|
||||
);
|
||||
|
||||
// The block should fail lightweight "read" validation due to cut-through.
|
||||
assert_eq!(
|
||||
block.validate_read(),
|
||||
Err(Error::Transaction(transaction::Error::CutThrough))
|
||||
);
|
||||
|
||||
// Apply cut-through to eliminate the offending input and output.
|
||||
let mut inputs: Vec<_> = block.inputs().into();
|
||||
let mut outputs = block.outputs().to_vec();
|
||||
let (inputs, outputs, _, _) = transaction::cut_through(&mut inputs[..], &mut outputs[..])?;
|
||||
|
||||
block.body = block
|
||||
.body
|
||||
.replace_inputs(inputs.into())
|
||||
.replace_outputs(outputs);
|
||||
|
||||
// Block validates successfully after applying cut-through.
|
||||
block.validate(&BlindingFactor::zero(), verifier_cache())?;
|
||||
|
||||
// Block validates via lightweight "read" validation.
|
||||
block.validate_read()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -16,11 +16,17 @@
|
|||
|
||||
pub mod common;
|
||||
|
||||
use self::core::core::{Output, OutputFeatures};
|
||||
use self::core::libtx::proof;
|
||||
use self::core::ser;
|
||||
use crate::core::core::transaction::{self, Error};
|
||||
use crate::core::core::verifier_cache::LruVerifierCache;
|
||||
use crate::core::core::{KernelFeatures, Output, OutputFeatures, Weighting};
|
||||
use crate::core::global;
|
||||
use crate::core::libtx::build;
|
||||
use crate::core::libtx::proof::{self, ProofBuilder};
|
||||
use crate::core::{consensus, ser};
|
||||
use grin_core as core;
|
||||
use keychain::{ExtKeychain, Keychain};
|
||||
use std::sync::Arc;
|
||||
use util::RwLock;
|
||||
|
||||
#[test]
|
||||
fn test_output_ser_deser() {
|
||||
|
@ -28,7 +34,7 @@ fn test_output_ser_deser() {
|
|||
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
||||
let switch = keychain::SwitchCommitmentType::Regular;
|
||||
let commit = keychain.commit(5, &key_id, switch).unwrap();
|
||||
let builder = proof::ProofBuilder::new(&keychain);
|
||||
let builder = ProofBuilder::new(&keychain);
|
||||
let proof = proof::create(&keychain, &builder, 5, &key_id, switch, commit, None).unwrap();
|
||||
|
||||
let out = Output {
|
||||
|
@ -45,3 +51,121 @@ fn test_output_ser_deser() {
|
|||
assert_eq!(dout.commit, out.commit);
|
||||
assert_eq!(dout.proof, out.proof);
|
||||
}
|
||||
|
||||
// Test coverage for verifying cut-through during transaction validation.
|
||||
// It is not valid for a transaction to spend an output and produce a new output with the same commitment.
|
||||
// This test covers the case where a plain output is spent, producing a plain output with the same commitment.
|
||||
#[test]
|
||||
fn test_verify_cut_through_plain() -> Result<(), Error> {
|
||||
global::set_local_chain_type(global::ChainTypes::UserTesting);
|
||||
|
||||
let keychain = ExtKeychain::from_random_seed(false)?;
|
||||
|
||||
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
||||
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
||||
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
||||
|
||||
let builder = proof::ProofBuilder::new(&keychain);
|
||||
|
||||
let mut tx = build::transaction(
|
||||
KernelFeatures::Plain { fee: 0 },
|
||||
&[
|
||||
build::input(10, key_id1.clone()),
|
||||
build::input(10, key_id2.clone()),
|
||||
build::output(10, key_id1.clone()),
|
||||
build::output(6, key_id2.clone()),
|
||||
build::output(4, key_id3.clone()),
|
||||
],
|
||||
&keychain,
|
||||
&builder,
|
||||
)
|
||||
.expect("valid tx");
|
||||
|
||||
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
|
||||
|
||||
// Transaction should fail validation due to cut-through.
|
||||
assert_eq!(
|
||||
tx.validate(Weighting::AsTransaction, verifier_cache.clone()),
|
||||
Err(Error::CutThrough),
|
||||
);
|
||||
|
||||
// Transaction should fail lightweight "read" validation due to cut-through.
|
||||
assert_eq!(tx.validate_read(), Err(Error::CutThrough));
|
||||
|
||||
// Apply cut-through to eliminate the offending input and output.
|
||||
let mut inputs: Vec<_> = tx.inputs().into();
|
||||
let mut outputs = tx.outputs().to_vec();
|
||||
let (inputs, outputs, _, _) = transaction::cut_through(&mut inputs[..], &mut outputs[..])?;
|
||||
|
||||
tx.body = tx
|
||||
.body
|
||||
.replace_inputs(inputs.into())
|
||||
.replace_outputs(outputs);
|
||||
|
||||
// Transaction validates successfully after applying cut-through.
|
||||
tx.validate(Weighting::AsTransaction, verifier_cache.clone())?;
|
||||
|
||||
// Transaction validates via lightweight "read" validation as well.
|
||||
tx.validate_read()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test coverage for verifying cut-through during transaction validation.
|
||||
// It is not valid for a transaction to spend an output and produce a new output with the same commitment.
|
||||
// This test covers the case where a coinbase output is spent, producing a plain output with the same commitment.
|
||||
#[test]
|
||||
fn test_verify_cut_through_coinbase() -> Result<(), Error> {
|
||||
global::set_local_chain_type(global::ChainTypes::UserTesting);
|
||||
|
||||
let keychain = ExtKeychain::from_random_seed(false)?;
|
||||
|
||||
let key_id1 = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
|
||||
let key_id2 = ExtKeychain::derive_key_id(1, 2, 0, 0, 0);
|
||||
let key_id3 = ExtKeychain::derive_key_id(1, 3, 0, 0, 0);
|
||||
|
||||
let builder = ProofBuilder::new(&keychain);
|
||||
|
||||
let mut tx = build::transaction(
|
||||
KernelFeatures::Plain { fee: 0 },
|
||||
&[
|
||||
build::coinbase_input(consensus::REWARD, key_id1.clone()),
|
||||
build::coinbase_input(consensus::REWARD, key_id2.clone()),
|
||||
build::output(60_000_000_000, key_id1.clone()),
|
||||
build::output(50_000_000_000, key_id2.clone()),
|
||||
build::output(10_000_000_000, key_id3.clone()),
|
||||
],
|
||||
&keychain,
|
||||
&builder,
|
||||
)
|
||||
.expect("valid tx");
|
||||
|
||||
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
|
||||
|
||||
// Transaction should fail validation due to cut-through.
|
||||
assert_eq!(
|
||||
tx.validate(Weighting::AsTransaction, verifier_cache.clone()),
|
||||
Err(Error::CutThrough),
|
||||
);
|
||||
|
||||
// Transaction should fail lightweight "read" validation due to cut-through.
|
||||
assert_eq!(tx.validate_read(), Err(Error::CutThrough));
|
||||
|
||||
// Apply cut-through to eliminate the offending input and output.
|
||||
let mut inputs: Vec<_> = tx.inputs().into();
|
||||
let mut outputs = tx.outputs().to_vec();
|
||||
let (inputs, outputs, _, _) = transaction::cut_through(&mut inputs[..], &mut outputs[..])?;
|
||||
|
||||
tx.body = tx
|
||||
.body
|
||||
.replace_inputs(inputs.into())
|
||||
.replace_outputs(outputs);
|
||||
|
||||
// Transaction validates successfully after applying cut-through.
|
||||
tx.validate(Weighting::AsTransaction, verifier_cache.clone())?;
|
||||
|
||||
// Transaction validates via lightweight "read" validation as well.
|
||||
tx.validate_read()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue