mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
cleanup various build warnings (#714)
* cleanup various build warnings * use serde_json in tests only
This commit is contained in:
parent
c63aa70a0b
commit
4022b82817
10 changed files with 50 additions and 44 deletions
|
@ -27,7 +27,6 @@ use serde;
|
|||
use serde::ser::SerializeStruct;
|
||||
use serde::de::MapAccess;
|
||||
use std::fmt;
|
||||
use serde_json;
|
||||
|
||||
macro_rules! no_dup {
|
||||
($field: ident) => {
|
||||
|
@ -580,8 +579,13 @@ pub struct PoolInfo {
|
|||
pub total_size: usize,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_output() {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use serde_json;
|
||||
|
||||
#[test]
|
||||
fn serialize_output() {
|
||||
let hex_output = "{\
|
||||
\"output_type\":\"Coinbase\",\
|
||||
\"commit\":\"083eafae5d61a85ab07b12e1a51b3918d8e6de11fc6cde641d54af53608aa77b9f\",\
|
||||
|
@ -593,12 +597,13 @@ fn serialize_output() {
|
|||
let deserialized: OutputPrintable = serde_json::from_str(&hex_output).unwrap();
|
||||
let serialized = serde_json::to_string(&deserialized).unwrap();
|
||||
assert_eq!(serialized, hex_output);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_utxo() {
|
||||
#[test]
|
||||
fn serialize_utxo() {
|
||||
let hex_commit = "{\"commit\":\"083eafae5d61a85ab07b12e1a51b3918d8e6de11fc6cde641d54af53608aa77b9f\"}";
|
||||
let deserialized: Utxo = serde_json::from_str(&hex_commit).unwrap();
|
||||
let serialized = serde_json::to_string(&deserialized).unwrap();
|
||||
assert_eq!(serialized, hex_commit);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -395,6 +395,7 @@ impl Chain {
|
|||
sumtrees.is_unspent(output_ref)
|
||||
}
|
||||
|
||||
/// Validate the current chain state.
|
||||
pub fn validate(&self) -> Result<(), Error> {
|
||||
let header = self.store.head_header()?;
|
||||
let mut sumtrees = self.sumtrees.write().unwrap();
|
||||
|
@ -506,7 +507,7 @@ impl Chain {
|
|||
{
|
||||
let mut head = self.head.lock().unwrap();
|
||||
*head = Tip::from_block(&header);
|
||||
self.store.save_body_head(&head);
|
||||
self.store.save_body_head(&head)?;
|
||||
self.store.save_header_height(&header)?;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ use core::core::{Block, BlockHeader};
|
|||
use core::consensus::TargetError;
|
||||
use core::core::target::Difficulty;
|
||||
use grin_store::{self, option_to_not_found, to_key, Error, u64_to_key};
|
||||
use util::LOGGER;
|
||||
|
||||
const STORE_SUBPATH: &'static str = "chain";
|
||||
|
||||
|
|
|
@ -22,14 +22,14 @@ use std::ops::Deref;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use util::{secp, static_secp_instance};
|
||||
use util::static_secp_instance;
|
||||
use util::secp::pedersen::{RangeProof, Commitment};
|
||||
|
||||
use core::consensus::reward;
|
||||
use core::core::{Block, BlockHeader, SumCommit, Input, Output, OutputIdentifier, OutputFeatures, TxKernel};
|
||||
use core::core::pmmr::{self, HashSum, NoSum, Summable, PMMR};
|
||||
use core::core::hash::Hashed;
|
||||
use core::ser::{self, Readable};
|
||||
use core::ser;
|
||||
use grin_store;
|
||||
use grin_store::sumtree::{PMMRBackend, AppendOnlyFile};
|
||||
use types::ChainStore;
|
||||
|
|
|
@ -110,8 +110,6 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
|||
} else {
|
||||
// TODO - do we need to validate the header here?
|
||||
|
||||
let kernel_count = cb.kern_ids.len();
|
||||
|
||||
let txs = {
|
||||
let tx_pool = self.tx_pool.read().unwrap();
|
||||
tx_pool.retrieve_transactions(&cb)
|
||||
|
@ -301,9 +299,14 @@ impl p2p::ChainAdapter for NetToChainAdapter {
|
|||
/// If we're willing to accept that new state, the data stream will be
|
||||
/// read as a zip file, unzipped and the resulting state files should be
|
||||
/// rewound to the provided indexes.
|
||||
fn sumtrees_write(&self, h: Hash,
|
||||
rewind_to_output: u64, rewind_to_kernel: u64,
|
||||
sumtree_data: File, peer_addr: SocketAddr) -> bool {
|
||||
fn sumtrees_write(
|
||||
&self,
|
||||
h: Hash,
|
||||
rewind_to_output: u64,
|
||||
rewind_to_kernel: u64,
|
||||
sumtree_data: File,
|
||||
_peer_addr: SocketAddr,
|
||||
) -> bool {
|
||||
// TODO check whether we should accept any sumtree now
|
||||
if let Err(e) = w(&self.chain).
|
||||
sumtrees_write(h, rewind_to_output, rewind_to_kernel, sumtree_data) {
|
||||
|
|
|
@ -244,6 +244,7 @@ impl Server {
|
|||
})
|
||||
}
|
||||
|
||||
/// Stop the server.
|
||||
pub fn stop(&self) {
|
||||
self.p2p.stop();
|
||||
self.stop.store(true, Ordering::Relaxed);
|
||||
|
|
|
@ -19,10 +19,10 @@ use std::sync::atomic::{AtomicBool, Ordering};
|
|||
use time;
|
||||
|
||||
use chain;
|
||||
use core::core::hash::{Hash, Hashed, ZERO_HASH};
|
||||
use core::core::hash::{Hash, Hashed};
|
||||
use core::core::target::Difficulty;
|
||||
use core::global;
|
||||
use p2p::{self, Peer, Peers, ChainAdapter};
|
||||
use p2p::{self, Peer, Peers};
|
||||
use types::Error;
|
||||
use util::LOGGER;
|
||||
|
||||
|
@ -35,7 +35,6 @@ pub fn run_sync(
|
|||
fast_sync: bool,
|
||||
stop: Arc<AtomicBool>,
|
||||
) {
|
||||
|
||||
let chain = chain.clone();
|
||||
let _ = thread::Builder::new()
|
||||
.name("sync".to_string())
|
||||
|
@ -107,7 +106,7 @@ pub fn run_sync(
|
|||
for _ in 0..horizon-2 {
|
||||
sumtree_head = chain.get_block_header(&sumtree_head.previous).unwrap();
|
||||
}
|
||||
p.send_sumtrees_request(sumtree_head.height, sumtree_head.hash());
|
||||
p.send_sumtrees_request(sumtree_head.height, sumtree_head.hash()).unwrap();
|
||||
prev_state_sync = current_time;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::{Arc, RwLock, atomic};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
|
@ -33,19 +33,17 @@ pub struct Peers {
|
|||
pub adapter: Arc<ChainAdapter>,
|
||||
store: PeerStore,
|
||||
peers: RwLock<HashMap<SocketAddr, Arc<RwLock<Peer>>>>,
|
||||
config: P2PConfig,
|
||||
}
|
||||
|
||||
unsafe impl Send for Peers {}
|
||||
unsafe impl Sync for Peers {}
|
||||
|
||||
impl Peers {
|
||||
pub fn new(store: PeerStore, adapter: Arc<ChainAdapter>, config: P2PConfig) -> Peers {
|
||||
pub fn new(store: PeerStore, adapter: Arc<ChainAdapter>, _config: P2PConfig) -> Peers {
|
||||
Peers {
|
||||
adapter,
|
||||
store,
|
||||
peers: RwLock::new(HashMap::new()),
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -115,7 +115,7 @@ fn handle_sender_initiation(
|
|||
// Create a new aggsig context
|
||||
// this will create a new blinding sum and nonce, and store them
|
||||
let blind = blind_sum.secret_key(&keychain.secp())?;
|
||||
keychain.aggsig_create_context(&partial_tx.id, blind);
|
||||
keychain.aggsig_create_context(&partial_tx.id, blind)?;
|
||||
keychain.aggsig_add_output(&partial_tx.id, &key_id);
|
||||
|
||||
let sig_part = keychain.aggsig_calculate_partial_sig(
|
||||
|
|
|
@ -81,7 +81,7 @@ pub fn issue_send_tx(
|
|||
// Create a new aggsig context
|
||||
let tx_id = Uuid::new_v4();
|
||||
let skey = blind_offset.secret_key(&keychain.secp())?;
|
||||
keychain.aggsig_create_context(&tx_id, skey);
|
||||
keychain.aggsig_create_context(&tx_id, skey)?;
|
||||
|
||||
let partial_tx = build_partial_tx(&tx_id, keychain, amount_with_fee, kernel_offset, None, tx);
|
||||
|
||||
|
|
Loading…
Reference in a new issue