2018-05-30 23:57:13 +03:00
|
|
|
// Copyright 2018 The Grin Developers
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
//! Common test functions
|
|
|
|
|
2018-12-08 02:59:40 +03:00
|
|
|
use self::chain::store::ChainStore;
|
|
|
|
use self::chain::types::Tip;
|
|
|
|
use self::core::core::hash::{Hash, Hashed};
|
|
|
|
use self::core::core::verifier_cache::VerifierCache;
|
|
|
|
use self::core::core::{Block, BlockHeader, BlockSums, Committed, Transaction};
|
|
|
|
use self::core::libtx;
|
|
|
|
use self::keychain::{ExtKeychain, Keychain};
|
|
|
|
use self::pool::types::*;
|
|
|
|
use self::pool::TransactionPool;
|
|
|
|
use self::util::secp::pedersen::Commitment;
|
|
|
|
use self::util::RwLock;
|
|
|
|
use grin_chain as chain;
|
|
|
|
use grin_core as core;
|
|
|
|
use grin_keychain as keychain;
|
|
|
|
use grin_pool as pool;
|
|
|
|
use grin_store as store;
|
|
|
|
use grin_util as util;
|
2018-09-24 11:24:10 +03:00
|
|
|
use std::collections::HashSet;
|
2018-05-30 23:57:13 +03:00
|
|
|
use std::fs;
|
2018-10-20 03:13:07 +03:00
|
|
|
use std::sync::Arc;
|
2018-05-30 23:57:13 +03:00
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct ChainAdapter {
|
2018-06-22 11:08:06 +03:00
|
|
|
pub store: Arc<ChainStore>,
|
2018-09-24 11:24:10 +03:00
|
|
|
pub utxo: Arc<RwLock<HashSet<Commitment>>>,
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl ChainAdapter {
|
|
|
|
pub fn init(db_root: String) -> Result<ChainAdapter, String> {
|
|
|
|
let target_dir = format!("target/{}", db_root);
|
2018-06-22 11:08:06 +03:00
|
|
|
let db_env = Arc::new(store::new_env(target_dir.clone()));
|
|
|
|
let chain_store =
|
|
|
|
ChainStore::new(db_env).map_err(|e| format!("failed to init chain_store, {:?}", e))?;
|
2018-05-30 23:57:13 +03:00
|
|
|
let store = Arc::new(chain_store);
|
2018-09-24 11:24:10 +03:00
|
|
|
let utxo = Arc::new(RwLock::new(HashSet::new()));
|
2018-05-30 23:57:13 +03:00
|
|
|
|
2018-09-24 11:24:10 +03:00
|
|
|
Ok(ChainAdapter { store, utxo })
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_db_for_block(&self, block: &Block) {
|
|
|
|
let header = &block.header;
|
2018-11-01 12:51:32 +03:00
|
|
|
let tip = Tip::from_header(header);
|
2018-09-24 11:24:10 +03:00
|
|
|
let batch = self.store.batch().unwrap();
|
2018-11-01 12:51:32 +03:00
|
|
|
|
|
|
|
batch.save_block_header(header).unwrap();
|
2018-09-24 11:24:10 +03:00
|
|
|
batch.save_head(&tip).unwrap();
|
|
|
|
|
|
|
|
// Retrieve previous block_sums from the db.
|
2018-11-01 12:51:32 +03:00
|
|
|
let prev_sums = if let Ok(prev_sums) = batch.get_block_sums(&tip.prev_block_h) {
|
2018-09-24 11:24:10 +03:00
|
|
|
prev_sums
|
|
|
|
} else {
|
|
|
|
BlockSums::default()
|
|
|
|
};
|
|
|
|
|
|
|
|
// Overage is based purely on the new block.
|
|
|
|
// Previous block_sums have taken all previous overage into account.
|
|
|
|
let overage = header.overage();
|
|
|
|
|
|
|
|
// Offset on the other hand is the total kernel offset from the new block.
|
|
|
|
let offset = header.total_kernel_offset();
|
|
|
|
|
|
|
|
// Verify the kernel sums for the block_sums with the new block applied.
|
2018-12-08 02:59:40 +03:00
|
|
|
let (utxo_sum, kernel_sum) = (prev_sums, block as &dyn Committed)
|
2018-09-24 11:24:10 +03:00
|
|
|
.verify_kernel_sums(overage, offset)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let block_sums = BlockSums {
|
|
|
|
utxo_sum,
|
|
|
|
kernel_sum,
|
|
|
|
};
|
|
|
|
batch.save_block_sums(&header.hash(), &block_sums).unwrap();
|
|
|
|
|
|
|
|
batch.commit().unwrap();
|
|
|
|
|
|
|
|
{
|
2018-10-20 03:13:07 +03:00
|
|
|
let mut utxo = self.utxo.write();
|
2018-09-24 11:24:10 +03:00
|
|
|
for x in block.inputs() {
|
|
|
|
utxo.remove(&x.commitment());
|
|
|
|
}
|
|
|
|
for x in block.outputs() {
|
|
|
|
utxo.insert(x.commitment());
|
|
|
|
}
|
|
|
|
}
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl BlockChain for ChainAdapter {
|
2018-08-20 16:48:05 +03:00
|
|
|
fn chain_head(&self) -> Result<BlockHeader, PoolError> {
|
|
|
|
self.store
|
|
|
|
.head_header()
|
|
|
|
.map_err(|_| PoolError::Other(format!("failed to get chain head")))
|
|
|
|
}
|
|
|
|
|
2018-09-24 11:24:10 +03:00
|
|
|
fn get_block_header(&self, hash: &Hash) -> Result<BlockHeader, PoolError> {
|
|
|
|
self.store
|
|
|
|
.get_block_header(hash)
|
|
|
|
.map_err(|_| PoolError::Other(format!("failed to get block header")))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_block_sums(&self, hash: &Hash) -> Result<BlockSums, PoolError> {
|
|
|
|
self.store
|
|
|
|
.get_block_sums(hash)
|
|
|
|
.map_err(|_| PoolError::Other(format!("failed to get block sums")))
|
|
|
|
}
|
|
|
|
|
2018-09-25 13:01:19 +03:00
|
|
|
fn validate_tx(&self, tx: &Transaction) -> Result<(), pool::PoolError> {
|
2018-10-20 03:13:07 +03:00
|
|
|
let utxo = self.utxo.read();
|
2018-09-24 11:24:10 +03:00
|
|
|
|
|
|
|
for x in tx.outputs() {
|
|
|
|
if utxo.contains(&x.commitment()) {
|
|
|
|
return Err(PoolError::Other(format!("output commitment not unique")));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for x in tx.inputs() {
|
|
|
|
if !utxo.contains(&x.commitment()) {
|
|
|
|
return Err(PoolError::Other(format!("not in utxo set")));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mocking this check out for these tests.
|
|
|
|
// We will test the Merkle proof verification logic elsewhere.
|
|
|
|
fn verify_coinbase_maturity(&self, _tx: &Transaction) -> Result<(), PoolError> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Mocking this out for these tests.
|
|
|
|
fn verify_tx_lock_height(&self, _tx: &Transaction) -> Result<(), PoolError> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-30 17:44:34 +03:00
|
|
|
pub fn test_setup(
|
2018-12-08 02:59:40 +03:00
|
|
|
chain: Arc<dyn BlockChain>,
|
|
|
|
verifier_cache: Arc<RwLock<dyn VerifierCache>>,
|
2018-08-30 17:44:34 +03:00
|
|
|
) -> TransactionPool {
|
2018-05-30 23:57:13 +03:00
|
|
|
TransactionPool::new(
|
|
|
|
PoolConfig {
|
|
|
|
accept_fee_base: 0,
|
|
|
|
max_pool_size: 50,
|
2018-11-05 15:51:52 +03:00
|
|
|
max_stempool_size: 50,
|
2018-11-14 21:22:08 +03:00
|
|
|
mineable_max_weight: 10_000,
|
2018-05-30 23:57:13 +03:00
|
|
|
},
|
|
|
|
chain.clone(),
|
2018-08-30 17:44:34 +03:00
|
|
|
verifier_cache.clone(),
|
2018-05-30 23:57:13 +03:00
|
|
|
Arc::new(NoopAdapter {}),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn test_transaction_spending_coinbase<K>(
|
|
|
|
keychain: &K,
|
2018-05-30 23:57:13 +03:00
|
|
|
header: &BlockHeader,
|
|
|
|
output_values: Vec<u64>,
|
2018-06-08 08:21:54 +03:00
|
|
|
) -> Transaction
|
|
|
|
where
|
|
|
|
K: Keychain,
|
|
|
|
{
|
2018-05-30 23:57:13 +03:00
|
|
|
let output_sum = output_values.iter().sum::<u64>() as i64;
|
|
|
|
|
|
|
|
let coinbase_reward: u64 = 60_000_000_000;
|
|
|
|
|
|
|
|
let fees: i64 = coinbase_reward as i64 - output_sum;
|
|
|
|
assert!(fees >= 0);
|
|
|
|
|
|
|
|
let mut tx_elements = Vec::new();
|
|
|
|
|
|
|
|
// single input spending a single coinbase (deterministic key_id aka height)
|
|
|
|
{
|
2018-10-10 12:11:01 +03:00
|
|
|
let key_id = ExtKeychain::derive_key_id(1, header.height as u32, 0, 0, 0);
|
2018-08-16 02:20:33 +03:00
|
|
|
tx_elements.push(libtx::build::coinbase_input(coinbase_reward, key_id));
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
for output_value in output_values {
|
2018-10-10 12:11:01 +03:00
|
|
|
let key_id = ExtKeychain::derive_key_id(1, output_value as u32, 0, 0, 0);
|
2018-05-30 23:57:13 +03:00
|
|
|
tx_elements.push(libtx::build::output(output_value, key_id));
|
|
|
|
}
|
|
|
|
|
|
|
|
tx_elements.push(libtx::build::with_fee(fees as u64));
|
|
|
|
|
2018-06-08 08:21:54 +03:00
|
|
|
libtx::build::transaction(tx_elements, keychain).unwrap()
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
|
2018-06-08 08:21:54 +03:00
|
|
|
pub fn test_transaction<K>(
|
|
|
|
keychain: &K,
|
2018-05-30 23:57:13 +03:00
|
|
|
input_values: Vec<u64>,
|
|
|
|
output_values: Vec<u64>,
|
2018-06-08 08:21:54 +03:00
|
|
|
) -> Transaction
|
|
|
|
where
|
|
|
|
K: Keychain,
|
|
|
|
{
|
2018-05-30 23:57:13 +03:00
|
|
|
let input_sum = input_values.iter().sum::<u64>() as i64;
|
|
|
|
let output_sum = output_values.iter().sum::<u64>() as i64;
|
|
|
|
|
|
|
|
let fees: i64 = input_sum - output_sum;
|
|
|
|
assert!(fees >= 0);
|
|
|
|
|
|
|
|
let mut tx_elements = Vec::new();
|
|
|
|
|
|
|
|
for input_value in input_values {
|
2018-10-10 12:11:01 +03:00
|
|
|
let key_id = ExtKeychain::derive_key_id(1, input_value as u32, 0, 0, 0);
|
2018-05-30 23:57:13 +03:00
|
|
|
tx_elements.push(libtx::build::input(input_value, key_id));
|
|
|
|
}
|
|
|
|
|
|
|
|
for output_value in output_values {
|
2018-10-10 12:11:01 +03:00
|
|
|
let key_id = ExtKeychain::derive_key_id(1, output_value as u32, 0, 0, 0);
|
2018-05-30 23:57:13 +03:00
|
|
|
tx_elements.push(libtx::build::output(output_value, key_id));
|
|
|
|
}
|
|
|
|
tx_elements.push(libtx::build::with_fee(fees as u64));
|
|
|
|
|
2018-06-08 08:21:54 +03:00
|
|
|
libtx::build::transaction(tx_elements, keychain).unwrap()
|
2018-05-30 23:57:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn test_source() -> TxSource {
|
|
|
|
TxSource {
|
|
|
|
debug_name: format!("test"),
|
|
|
|
identifier: format!("127.0.0.1"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn clean_output_dir(db_root: String) {
|
|
|
|
if let Err(e) = fs::remove_dir_all(format!("target/{}", db_root)) {
|
|
|
|
println!("cleaning output dir failed - {:?}", e)
|
|
|
|
}
|
|
|
|
}
|