core: various cleanups, improvements, code comments (#38)

* core: cleanup slicing impls for Hash
* core: clean up Readable trait, implement Readable/Writeable for various integers
* core: change Hash debug output to hex
* core: correct warnings in all modules
This commit is contained in:
Merope Riddle 2017-04-10 06:17:23 +00:00 committed by Ignotus Peverell
parent 9e82fb1774
commit 39ddeb0a2a
16 changed files with 224 additions and 102 deletions

View file

@ -68,7 +68,7 @@ impl ser::Writeable for Tip {
}
}
impl ser::Readable<Tip> for Tip {
impl ser::Readable for Tip {
fn read(reader: &mut ser::Reader) -> Result<Tip, ser::Error> {
let height = try!(reader.read_u64());
let last = try!(Hash::read(reader));

View file

@ -21,7 +21,7 @@
use std::cmp;
use bigint::{BigInt, Sign, BigUint};
use bigint::{BigInt, Sign};
use core::target::Difficulty;
@ -72,7 +72,7 @@ pub fn next_target(ts: i64,
let soft_min = one.clone() <<
(((prev_cuckoo_sz - cmp::min(DEFAULT_SIZESHIFT, prev_cuckoo_sz)) *
8 + 16) as usize);
let prev_diff = BigInt::from_biguint(Sign::Plus, prev_diff.num);
let prev_diff = BigInt::from_biguint(Sign::Plus, prev_diff.into_biguint());
let (pdiff, clen) = if prev_diff > soft_min && prev_cuckoo_sz < MAX_SIZESHIFT {
(prev_diff / two, prev_cuckoo_sz + 1)
} else {
@ -89,7 +89,7 @@ pub fn next_target(ts: i64,
if new_diff < one {
(Difficulty::one(), clen)
} else {
(Difficulty { num: new_diff.to_biguint().unwrap() }, clen)
(Difficulty::from_biguint(new_diff.to_biguint().unwrap()), clen)
}
}

View file

@ -15,24 +15,24 @@
//! Blocks and blockheaders
use time;
use secp;
use secp::{Secp256k1, Signature, Message};
use secp::{self, Secp256k1};
use secp::key::SecretKey;
use std::collections::HashSet;
use core::Committed;
use core::{Input, Output, Proof, TxKernel, Transaction, COINBASE_KERNEL, COINBASE_OUTPUT};
use core::transaction::merkle_inputs_outputs;
use consensus::{REWARD, DEFAULT_SIZESHIFT};
use consensus::REWARD;
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::target::Difficulty;
use ser::{self, Readable, Reader, Writeable, Writer};
bitflags! {
/// Options for block validation
pub flags BlockFeatures: u8 {
const DEFAULT_BLOCK = 0b00000000,
}
/// Options for block validation
pub flags BlockFeatures: u8 {
/// No flags
const DEFAULT_BLOCK = 0b00000000,
}
}
/// Block header, fairly standard compared to other blockchains.
@ -45,6 +45,7 @@ pub struct BlockHeader {
pub timestamp: time::Tm,
/// Length of the cuckoo cycle used to mine this block.
pub cuckoo_len: u8,
/// Merkle root of the UTXO set
pub utxo_merkle: Hash,
/// Merkle tree of hashes for all inputs, outputs and kernels in the block
pub tx_merkle: Hash,
@ -102,7 +103,7 @@ impl Writeable for BlockHeader {
}
/// Deserialization of a block header
impl Readable<BlockHeader> for BlockHeader {
impl Readable for BlockHeader {
fn read(reader: &mut Reader) -> Result<BlockHeader, ser::Error> {
let height = try!(reader.read_u64());
let previous = try!(Hash::read(reader));
@ -138,9 +139,13 @@ impl Readable<BlockHeader> for BlockHeader {
/// bitcoin's schedule) and expressed as a global transaction fee (added v.H),
/// additive to the total of fees ever collected.
pub struct Block {
/// The header with metadata and commitments to the rest of the data
pub header: BlockHeader,
/// List of transaction inputs
pub inputs: Vec<Input>,
/// List of transaction outputs
pub outputs: Vec<Output>,
/// List of transaction kernels and associated proofs
pub kernels: Vec<TxKernel>,
}
@ -173,7 +178,7 @@ impl Writeable for Block {
/// Implementation of Readable for a block, defines how to read a full block
/// from a binary stream.
impl Readable<Block> for Block {
impl Readable for Block {
fn read(reader: &mut Reader) -> Result<Block, ser::Error> {
let header = try!(BlockHeader::read(reader));
@ -277,10 +282,12 @@ impl Block {
.compact())
}
/// Blockhash, computed using only the header
pub fn hash(&self) -> Hash {
self.header.hash()
}
/// Sum of all fees (inputs less outputs) in the block
pub fn total_fees(&self) -> u64 {
self.kernels.iter().map(|p| p.fee).sum()
}
@ -327,8 +334,8 @@ impl Block {
}
}
// Merges the 2 blocks, essentially appending the inputs, outputs and kernels.
// Also performs a compaction on the result.
/// Merges the 2 blocks, essentially appending the inputs, outputs and kernels.
/// Also performs a compaction on the result.
pub fn merge(&self, other: Block) -> Block {
let mut all_inputs = self.inputs.clone();
all_inputs.append(&mut other.inputs.clone());
@ -457,14 +464,12 @@ impl Block {
#[cfg(test)]
mod test {
use super::*;
use core::{Input, Output, Transaction};
use core::Transaction;
use core::build::{self, input, output, input_rand, output_rand, with_fee};
use core::hash::{Hash, Hashed};
use core::test::{tx1i1o, tx2i1o};
use core::test::tx2i1o;
use secp::{self, Secp256k1};
use secp::key::SecretKey;
use rand::Rng;
use rand::os::OsRng;
fn new_secp() -> Secp256k1 {

View file

@ -28,7 +28,6 @@
use byteorder::{ByteOrder, BigEndian};
use secp::{self, Secp256k1};
use secp::key::SecretKey;
use secp::pedersen::*;
use rand::os::OsRng;
use core::{Transaction, Input, Output, DEFAULT_OUTPUT};
@ -140,19 +139,19 @@ pub fn output_rand(value: u64) -> Box<Append> {
/// Sets the fee on the transaction being built.
pub fn with_fee(fee: u64) -> Box<Append> {
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) { (tx.with_fee(fee), sum) })
Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) { (tx.with_fee(fee), sum) })
}
/// Sets a known excess value on the transaction being built. Usually used in
/// combination with the initial_tx function when a new transaction is built
/// by adding to a pre-existing one.
pub fn with_excess(excess: SecretKey) -> Box<Append> {
Box::new(move |build, (tx, sum)| -> (Transaction, BlindSum) { (tx, sum.add(excess)) })
Box::new(move |_build, (tx, sum)| -> (Transaction, BlindSum) { (tx, sum.add(excess)) })
}
/// Sets an initial transaction to add to when building a new transaction.
pub fn initial_tx(tx: Transaction) -> Box<Append> {
Box::new(move |build, (_, sum)| -> (Transaction, BlindSum) { (tx.clone(), sum) })
Box::new(move |_build, (_, sum)| -> (Transaction, BlindSum) { (tx.clone(), sum) })
}
/// Builds a new transaction by combining all the combinators provided in a

View file

@ -17,21 +17,21 @@
//! Primary hash function used in the protocol
//!
use byteorder::{ByteOrder, BigEndian};
use std::fmt;
use std::{fmt, ops};
use tiny_keccak::Keccak;
use std::convert::AsRef;
use ser::{self, Reader, Readable, Writer, Writeable, Error, AsFixedBytes};
/// A hash consisting of all zeroes, used as a sentinel. No known preimage.
pub const ZERO_HASH: Hash = Hash([0; 32]);
/// A hash to uniquely (or close enough) identify one of the main blockchain
/// constructs. Used pervasively for blocks, transactions and ouputs.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Hash(pub [u8; 32]);
impl fmt::Display for Hash {
impl fmt::Debug for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in self.0[..].iter().cloned() {
try!(write!(f, "{:02x}", i));
@ -40,15 +40,57 @@ impl fmt::Display for Hash {
}
}
impl fmt::Display for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
impl Hash {
/// Converts the hash to a byte vector
pub fn to_vec(&self) -> Vec<u8> {
self.0.to_vec()
}
/// Converts the hash to a byte slice
pub fn to_slice(&self) -> &[u8] {
&self.0
}
}
impl ops::Index<usize> for Hash {
type Output = u8;
fn index(&self, idx: usize) -> &u8 {
&self.0[idx]
}
}
impl ops::Index<ops::Range<usize>> for Hash {
type Output = [u8];
fn index(&self, idx: ops::Range<usize>) -> &[u8] {
&self.0[idx]
}
}
impl ops::Index<ops::RangeTo<usize>> for Hash {
type Output = [u8];
fn index(&self, idx: ops::RangeTo<usize>) -> &[u8] {
&self.0[idx]
}
}
impl ops::Index<ops::RangeFrom<usize>> for Hash {
type Output = [u8];
fn index(&self, idx: ops::RangeFrom<usize>) -> &[u8] {
&self.0[idx]
}
}
impl ops::Index<ops::RangeFull> for Hash {
type Output = [u8];
fn index(&self, idx: ops::RangeFull) -> &[u8] {
&self.0[idx]
}
}
impl AsRef<[u8]> for Hash {
@ -57,7 +99,7 @@ impl AsRef<[u8]> for Hash {
}
}
impl Readable<Hash> for Hash {
impl Readable for Hash {
fn read(reader: &mut Reader) -> Result<Hash, ser::Error> {
let v = try!(reader.read_fixed_bytes(32));
let mut a = [0; 32];
@ -80,9 +122,17 @@ pub struct HashWriter {
}
impl HashWriter {
/// Consume the `HashWriter`, outputting its current hash into a 32-byte array
pub fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
/// Consume the `HashWriter`, outputting a `Hash` corresponding to its current state
pub fn into_hash(self) -> Hash {
let mut new_hash = ZERO_HASH;
self.state.finalize(&mut new_hash.0[..]);
new_hash
}
}
impl Default for HashWriter {
@ -104,6 +154,7 @@ impl ser::Writer for HashWriter {
/// A trait for types that have a canonical hash
pub trait Hashed {
/// Obtain the hash of the object
fn hash(&self) -> Hash;
}

View file

@ -31,7 +31,7 @@ use consensus::PROOFSIZE;
pub use self::block::{Block, BlockHeader, DEFAULT_BLOCK};
pub use self::transaction::{Transaction, Input, Output, TxKernel, COINBASE_KERNEL,
COINBASE_OUTPUT, DEFAULT_OUTPUT};
use self::hash::{Hash, Hashed, HashWriter, ZERO_HASH};
use self::hash::{Hash, Hashed, ZERO_HASH};
use ser::{Writeable, Writer, Reader, Readable, Error};
/// Implemented by types that hold inputs and outputs including Pedersen
@ -88,7 +88,7 @@ impl fmt::Debug for Proof {
for (i, val) in self.0[..].iter().enumerate() {
try!(write!(f, "{:x}", val));
if i < PROOFSIZE - 1 {
write!(f, " ");
try!(write!(f, " "));
}
}
write!(f, ")")
@ -138,7 +138,7 @@ impl Proof {
}
}
impl Readable<Proof> for Proof {
impl Readable for Proof {
fn read(reader: &mut Reader) -> Result<Proof, Error> {
let mut pow = [0u32; PROOFSIZE];
for n in 0..PROOFSIZE {
@ -211,7 +211,6 @@ mod test {
use secp::Secp256k1;
use secp::key::SecretKey;
use ser;
use rand::Rng;
use rand::os::OsRng;
use core::build::{self, input, output, input_rand, output_rand, with_fee, initial_tx,
with_excess};
@ -310,7 +309,6 @@ mod test {
#[test]
fn tx_build_exchange() {
let ref secp = new_secp();
let outh = ZERO_HASH;
let tx_alice: Transaction;
let blind_sum: SecretKey;

View file

@ -16,9 +16,7 @@
//! and
//! the related difficulty, defined as the maximum target divided by the hash.
use byteorder::{ByteOrder, BigEndian};
use std::fmt;
use std::error::Error;
use std::ops::Add;
use bigint::BigUint;
@ -35,7 +33,7 @@ pub const MAX_TARGET: [u8; 32] = [0xf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
/// The difficulty is defined as the maximum target divided by the block hash.
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Difficulty {
pub num: BigUint,
num: BigUint
}
impl Difficulty {
@ -46,17 +44,28 @@ impl Difficulty {
Difficulty { num: BigUint::new(vec![1]) }
}
/// Convert a `u32` into a `Difficulty`
pub fn from_num(num: u32) -> Difficulty {
Difficulty { num: BigUint::new(vec![num]) }
}
/// Convert a `BigUint` into a `Difficulty`
pub fn from_biguint(num: BigUint) -> Difficulty {
Difficulty { num: num }
}
/// Computes the difficulty from a hash. Divides the maximum target by the
/// provided hash.
pub fn from_hash(h: &Hash) -> Difficulty {
let max_target = BigUint::from_bytes_be(&MAX_TARGET);
let h_num = BigUint::from_bytes_be(h.to_slice());
let h_num = BigUint::from_bytes_be(&h[..]);
Difficulty { num: max_target / h_num }
}
/// Converts the difficulty into a bignum
pub fn into_biguint(self) -> BigUint {
self.num
}
}
impl Add<Difficulty> for Difficulty {
@ -74,7 +83,7 @@ impl Writeable for Difficulty {
}
}
impl Readable<Difficulty> for Difficulty {
impl Readable for Difficulty {
fn read(reader: &mut Reader) -> Result<Difficulty, ser::Error> {
let dlen = try!(reader.read_u8());
let data = try!(reader.read_fixed_bytes(dlen as usize));

View file

@ -16,7 +16,6 @@
use byteorder::{ByteOrder, BigEndian};
use secp::{self, Secp256k1, Message, Signature};
use secp::key::SecretKey;
use secp::pedersen::{RangeProof, Commitment};
use core::Committed;
@ -25,11 +24,13 @@ use core::hash::{Hash, Hashed};
use ser::{self, Reader, Writer, Readable, Writeable};
bitflags! {
/// Options for a kernel's structure or use
pub flags KernelFeatures: u8 {
const DEFAULT_KERNEL = 0b00000000,
const COINBASE_KERNEL = 0b00000001,
}
/// Options for a kernel's structure or use
pub flags KernelFeatures: u8 {
/// No flags
const DEFAULT_KERNEL = 0b00000000,
/// TODO what is this for?
const COINBASE_KERNEL = 0b00000001,
}
}
/// A proof that a transaction sums to zero. Includes both the transaction's
@ -62,7 +63,7 @@ impl Writeable for TxKernel {
}
}
impl Readable<TxKernel> for TxKernel {
impl Readable for TxKernel {
fn read(reader: &mut Reader) -> Result<TxKernel, ser::Error> {
Ok(TxKernel {
features:
@ -121,7 +122,7 @@ impl Writeable for Transaction {
/// Implementation of Readable for a transaction, defines how to read a full
/// transaction from a binary stream.
impl Readable<Transaction> for Transaction {
impl Readable for Transaction {
fn read(reader: &mut Reader) -> Result<Transaction, ser::Error> {
let (fee, excess_sig, input_len, output_len) =
ser_multiread!(reader, read_u64, read_vec, read_u64, read_u64);
@ -201,13 +202,6 @@ impl Transaction {
Transaction { fee: fee, ..self }
}
/// The hash of a transaction is the Merkle tree of its inputs and outputs
/// hashes. None of the rest is required.
fn hash(&mut self) -> Hash {
merkle_inputs_outputs(&self.inputs, &self.outputs)
}
/// The verification for a MimbleWimble transaction involves getting the
/// excess of summing all commitments and using it as a public key
/// to verify the embedded signature. The rational is that if the values
@ -258,7 +252,7 @@ impl Writeable for Input {
/// Implementation of Readable for a transaction Input, defines how to read
/// an Input from a binary stream.
impl Readable<Input> for Input {
impl Readable for Input {
fn read(reader: &mut Reader) -> Result<Input, ser::Error> {
Ok(Input(Commitment::read(reader)?))
}
@ -267,17 +261,20 @@ impl Readable<Input> for Input {
/// The input for a transaction, which spends a pre-existing output. The input
/// commitment is a reproduction of the commitment of the output it's spending.
impl Input {
/// Extracts the referenced commitment from a transaction output
pub fn commitment(&self) -> Commitment {
self.0
}
}
bitflags! {
/// Options for block validation
pub flags OutputFeatures: u8 {
const DEFAULT_OUTPUT = 0b00000000,
const COINBASE_OUTPUT = 0b00000001,
}
/// Options for block validation
pub flags OutputFeatures: u8 {
/// No flags
const DEFAULT_OUTPUT = 0b00000000,
/// Output is a coinbase output, has fixed amount and must not be spent until maturity
const COINBASE_OUTPUT = 0b00000001,
}
}
/// Output for a transaction, defining the new ownership of coins that are being
@ -287,8 +284,11 @@ bitflags! {
/// and the ownership of the private key.
#[derive(Debug, Copy, Clone)]
pub struct Output {
/// Options for an output's structure or use
pub features: OutputFeatures,
/// The homomorphic commitment representing the output's amount
pub commit: Commitment,
/// A proof that the commitment is in the right range
pub proof: RangeProof,
}
@ -307,7 +307,7 @@ impl Writeable for Output {
/// Implementation of Readable for a transaction Output, defines how to read
/// an Output from a binary stream.
impl Readable<Output> for Output {
impl Readable for Output {
fn read(reader: &mut Reader) -> Result<Output, ser::Error> {
Ok(Output {
features:

View file

@ -21,8 +21,8 @@ use consensus::DEFAULT_SIZESHIFT;
use core::hash::Hashed;
use core::target::Difficulty;
// Genesis block definition. It has no rewards, no inputs, no outputs, no
// fees and a height of zero.
/// Genesis block definition. It has no rewards, no inputs, no outputs, no
/// fees and a height of zero.
pub fn genesis() -> core::Block {
core::Block {
header: core::BlockHeader {

View file

@ -29,10 +29,13 @@ use pow::siphash::siphash24;
const MAXPATHLEN: usize = 8192;
/// A cuckoo-cycle related error
#[derive(Debug)]
pub enum Error {
PathError,
NoSolutionError,
/// Unable to find a short enough path
Path,
/// Unable to find a solution
NoSolution,
}
/// An edge in the Cuckoo graph, simply references two u64 nodes.
@ -42,6 +45,7 @@ struct Edge {
v: u64,
}
/// Cuckoo cycle context
pub struct Cuckoo {
mask: u64,
size: u64,
@ -164,6 +168,7 @@ enum CycleSol {
}
impl Miner {
/// Creates a new miner
pub fn new(header: &[u8], ease: u32, sizeshift: u32) -> Miner {
let cuckoo = Cuckoo::new(header, sizeshift);
let size = 1 << sizeshift;
@ -176,6 +181,7 @@ impl Miner {
}
}
/// Searches for a solution
pub fn mine(&mut self) -> Result<Proof, Error> {
let mut us = [0; MAXPATHLEN];
let mut vs = [0; MAXPATHLEN];
@ -199,7 +205,7 @@ impl Miner {
}
}
}
Err(Error::NoSolutionError)
Err(Error::NoSolution)
}
fn path(&self, mut u: u32, us: &mut [u32]) -> Result<u32, Error> {
@ -210,7 +216,7 @@ impl Miner {
while nu != 0 && us[(nu - 1) as usize] != u {
nu -= 1;
}
return Err(Error::PathError);
return Err(Error::Path);
}
us[nu as usize] = u;
u = self.graph[u as usize];

View file

@ -28,26 +28,25 @@ pub mod cuckoo;
use time;
use consensus::EASINESS;
use core::{BlockHeader, Proof};
use core::hash::{Hash, Hashed};
use core::BlockHeader;
use core::hash::Hashed;
use core::target::Difficulty;
use pow::cuckoo::{Cuckoo, Miner, Error};
use ser;
use ser::{Writeable, Writer};
/// Validates the proof of work of a given header.
pub fn verify(bh: &BlockHeader) -> bool {
verify_size(bh, bh.cuckoo_len as u32)
}
/// Validates the proof of work of a given header, and that the proof of work
/// satisfies the requirements of the header.
pub fn verify_size(bh: &BlockHeader, cuckoo_sz: u32) -> bool {
// make sure the pow hash shows a difficulty at least as large as the target
// difficulty
if bh.difficulty > bh.pow.to_difficulty() {
return false;
}
Cuckoo::new(bh.hash().to_slice(), cuckoo_sz).verify(bh.pow, EASINESS as u64)
Cuckoo::new(&bh.hash()[..], cuckoo_sz).verify(bh.pow, EASINESS as u64)
}
/// Runs a naive single-threaded proof of work computation over the provided
@ -64,6 +63,7 @@ pub fn pow20(bh: &mut BlockHeader, diff: Difficulty) -> Result<(), Error> {
pow_size(bh, diff, 20)
}
/// Actual pow function, takes an arbitrary pow size as input
pub fn pow_size(bh: &mut BlockHeader, diff: Difficulty, sizeshift: u32) -> Result<(), Error> {
let start_nonce = bh.nonce;
@ -75,7 +75,7 @@ pub fn pow_size(bh: &mut BlockHeader, diff: Difficulty, sizeshift: u32) -> Resul
// if we found a cycle (not guaranteed) and the proof hash is higher that the
// diff, we're all good
if let Ok(proof) = Miner::new(pow_hash.to_slice(), EASINESS, sizeshift).mine() {
if let Ok(proof) = Miner::new(&pow_hash[..], EASINESS, sizeshift).mine() {
if proof.to_difficulty() >= diff {
bh.pow = proof;
return Ok(());
@ -96,7 +96,6 @@ pub fn pow_size(bh: &mut BlockHeader, diff: Difficulty, sizeshift: u32) -> Resul
#[cfg(test)]
mod test {
use super::*;
use core::Proof;
use core::target::Difficulty;
use genesis;

View file

@ -34,7 +34,9 @@ pub enum Error {
IOErr(io::Error),
/// Expected a given value that wasn't found
UnexpectedData {
/// What we wanted
expected: Vec<u8>,
/// What we got
received: Vec<u8>,
},
/// Data wasn't in a consumable format
@ -177,13 +179,13 @@ pub trait Writeable {
/// Trait that every type that can be deserialized from binary must implement.
/// Reads directly to a Reader, a utility type thinly wrapping an
/// underlying Read implementation.
pub trait Readable<T> {
pub trait Readable where Self: Sized {
/// Reads the data necessary to this Readable from the provided reader
fn read(reader: &mut Reader) -> Result<T, Error>;
fn read(reader: &mut Reader) -> Result<Self, Error>;
}
/// Deserializes a Readeable from any std::io::Read implementation.
pub fn deserialize<T: Readable<T>>(mut source: &mut Read) -> Result<T, Error> {
pub fn deserialize<T: Readable>(mut source: &mut Read) -> Result<T, Error> {
let mut reader = BinReader { source: source };
T::read(&mut reader)
}
@ -258,7 +260,7 @@ impl<'a> Reader for BinReader<'a> {
}
impl Readable<Commitment> for Commitment {
impl Readable for Commitment {
fn read(reader: &mut Reader) -> Result<Commitment, Error> {
let a = try!(reader.read_fixed_bytes(PEDERSEN_COMMITMENT_SIZE));
let mut c = [0; PEDERSEN_COMMITMENT_SIZE];
@ -269,7 +271,7 @@ impl Readable<Commitment> for Commitment {
}
}
impl Readable<RangeProof> for RangeProof {
impl Readable for RangeProof {
fn read(reader: &mut Reader) -> Result<RangeProof, Error> {
let p = try!(reader.read_limited_vec(MAX_PROOF_SIZE));
let mut a = [0; MAX_PROOF_SIZE];
@ -301,9 +303,62 @@ impl<'a> Writer for BinWriter<'a> {
}
}
macro_rules! impl_int {
($int: ty, $w_fn: ident, $r_fn: ident) => {
impl Writeable for $int {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
writer.$w_fn(*self)
}
}
impl Readable for $int {
fn read(reader: &mut Reader) -> Result<$int, Error> {
reader.$r_fn()
}
}
}
}
impl_int!(u8, write_u8, read_u8);
impl_int!(u16, write_u16, read_u16);
impl_int!(u32, write_u32, read_u32);
impl_int!(u64, write_u64, read_u64);
impl_int!(i64, write_i64, read_i64);
impl<A: Writeable, B: Writeable> Writeable for (A, B) {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
try!(Writeable::write(&self.0, writer));
Writeable::write(&self.1, writer)
}
}
impl<A: Readable, B: Readable> Readable for (A, B) {
fn read(reader: &mut Reader) -> Result<(A, B), Error> {
Ok((try!(Readable::read(reader)),
try!(Readable::read(reader))))
}
}
impl<A: Writeable, B: Writeable, C: Writeable> Writeable for (A, B, C) {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), Error> {
try!(Writeable::write(&self.0, writer));
try!(Writeable::write(&self.1, writer));
Writeable::write(&self.2, writer)
}
}
impl<A: Readable, B: Readable, C: Readable> Readable for (A, B, C) {
fn read(reader: &mut Reader) -> Result<(A, B, C), Error> {
Ok((try!(Readable::read(reader)),
try!(Readable::read(reader)),
try!(Readable::read(reader))))
}
}
/// Useful marker trait on types that can be sized byte slices
pub trait AsFixedBytes: Sized + AsRef<[u8]> {}
impl<'a> AsFixedBytes for &'a [u8] {}
impl AsFixedBytes for Vec<u8> {}
impl AsFixedBytes for [u8; 1] {}
impl AsFixedBytes for [u8; 2] {}
@ -315,4 +370,4 @@ impl AsFixedBytes for ::core::hash::Hash {}
impl AsFixedBytes for ::secp::pedersen::RangeProof {}
impl AsFixedBytes for ::secp::key::SecretKey {}
impl AsFixedBytes for ::secp::Signature {}
impl AsFixedBytes for ::secp::pedersen::Commitment {}
impl AsFixedBytes for ::secp::pedersen::Commitment {}

View file

@ -72,7 +72,7 @@ impl Miner {
let mut iter_count = 0;
while head.hash() == latest_hash && time::get_time().sec < deadline {
let pow_hash = b.hash();
let mut miner = cuckoo::Miner::new(pow_hash.to_slice(),
let mut miner = cuckoo::Miner::new(&pow_hash[..],
consensus::EASINESS,
b.header.cuckoo_len as u32);
if let Ok(proof) = miner.mine() {

View file

@ -69,7 +69,7 @@ enum_from_primitive! {
/// the header first, handles its validation and then reads the Readable body,
/// allocating buffers of the right size.
pub fn read_msg<T>(conn: TcpStream) -> Box<Future<Item = (TcpStream, T), Error = Error>>
where T: Readable<T> + 'static
where T: Readable + 'static
{
let read_header = read_exact(conn, vec![0u8; HEADER_LEN as usize])
.from_err()
@ -157,7 +157,7 @@ impl Writeable for MsgHeader {
}
}
impl Readable<MsgHeader> for MsgHeader {
impl Readable for MsgHeader {
fn read(reader: &mut Reader) -> Result<MsgHeader, ser::Error> {
try!(reader.expect_u8(MAGIC[0]));
try!(reader.expect_u8(MAGIC[1]));
@ -209,7 +209,7 @@ impl Writeable for Hand {
}
}
impl Readable<Hand> for Hand {
impl Readable for Hand {
fn read(reader: &mut Reader) -> Result<Hand, ser::Error> {
let (version, capab, nonce) = ser_multiread!(reader, read_u32, read_u32, read_u64);
let total_diff = try!(Difficulty::read(reader));
@ -256,7 +256,7 @@ impl Writeable for Shake {
}
}
impl Readable<Shake> for Shake {
impl Readable for Shake {
fn read(reader: &mut Reader) -> Result<Shake, ser::Error> {
let (version, capab) = ser_multiread!(reader, read_u32, read_u32);
let total_diff = try!(Difficulty::read(reader));
@ -284,7 +284,7 @@ impl Writeable for GetPeerAddrs {
}
}
impl Readable<GetPeerAddrs> for GetPeerAddrs {
impl Readable for GetPeerAddrs {
fn read(reader: &mut Reader) -> Result<GetPeerAddrs, ser::Error> {
let capab = try!(reader.read_u32());
let capabilities = try!(Capabilities::from_bits(capab).ok_or(ser::Error::CorruptedData));
@ -308,7 +308,7 @@ impl Writeable for PeerAddrs {
}
}
impl Readable<PeerAddrs> for PeerAddrs {
impl Readable for PeerAddrs {
fn read(reader: &mut Reader) -> Result<PeerAddrs, ser::Error> {
let peer_count = try!(reader.read_u32());
if peer_count > MAX_PEER_ADDRS {
@ -341,7 +341,7 @@ impl Writeable for PeerError {
}
}
impl Readable<PeerError> for PeerError {
impl Readable for PeerError {
fn read(reader: &mut Reader) -> Result<PeerError, ser::Error> {
let (code, msg) = ser_multiread!(reader, read_u32, read_vec);
let message = try!(String::from_utf8(msg).map_err(|_| ser::Error::CorruptedData));
@ -378,7 +378,7 @@ impl Writeable for SockAddr {
}
}
impl Readable<SockAddr> for SockAddr {
impl Readable for SockAddr {
fn read(reader: &mut Reader) -> Result<SockAddr, ser::Error> {
let v4_or_v6 = try!(reader.read_u8());
if v4_or_v6 == 0 {
@ -422,7 +422,7 @@ impl Writeable for Locator {
}
}
impl Readable<Locator> for Locator {
impl Readable for Locator {
fn read(reader: &mut Reader) -> Result<Locator, ser::Error> {
let len = reader.read_u8()?;
let mut hashes = Vec::with_capacity(len as usize);
@ -448,7 +448,7 @@ impl Writeable for Headers {
}
}
impl Readable<Headers> for Headers {
impl Readable for Headers {
fn read(reader: &mut Reader) -> Result<Headers, ser::Error> {
let len = reader.read_u16()?;
let mut headers = Vec::with_capacity(len as usize);
@ -469,7 +469,7 @@ impl Writeable for Empty {
}
}
impl Readable<Empty> for Empty {
impl Readable for Empty {
fn read(reader: &mut Reader) -> Result<Empty, ser::Error> {
Ok(Empty {})
}

View file

@ -60,7 +60,7 @@ impl Writeable for PeerData {
}
}
impl Readable<PeerData> for PeerData {
impl Readable for PeerData {
fn read(reader: &mut Reader) -> Result<PeerData, ser::Error> {
let addr = SockAddr::read(reader)?;
let (capab, ua, fl) = ser_multiread!(reader, read_u32, read_vec, read_u8);

View file

@ -108,14 +108,14 @@ impl Store {
/// Gets a `Readable` value from the db, provided its key. Encapsulates
/// serialization.
pub fn get_ser<T: ser::Readable<T>>(&self, key: &[u8]) -> Result<Option<T>, Error> {
pub fn get_ser<T: ser::Readable>(&self, key: &[u8]) -> Result<Option<T>, Error> {
self.get_ser_limited(key, 0)
}
/// Gets a `Readable` value from the db, provided its key, allowing to
/// extract only partial data. The underlying Readable size must align
/// accordingly. Encapsulates serialization.
pub fn get_ser_limited<T: ser::Readable<T>>(&self,
pub fn get_ser_limited<T: ser::Readable>(&self,
key: &[u8],
len: usize)
-> Result<Option<T>, Error> {
@ -145,7 +145,7 @@ impl Store {
/// Produces an iterator of `Readable` types moving forward from the
/// provided
/// key.
pub fn iter<T: ser::Readable<T>>(&self, from: &[u8]) -> SerIterator<T> {
pub fn iter<T: ser::Readable>(&self, from: &[u8]) -> SerIterator<T> {
let db = self.rdb.read().unwrap();
SerIterator {
iter: db.iterator(IteratorMode::From(from, Direction::Forward)),
@ -196,14 +196,14 @@ impl<'a> Batch<'a> {
/// An iterator thad produces Readable instances back. Wraps the lower level
/// DBIterator and deserializes the returned values.
pub struct SerIterator<T>
where T: ser::Readable<T>
where T: ser::Readable
{
iter: DBIterator,
_marker: PhantomData<T>,
}
impl<T> Iterator for SerIterator<T>
where T: ser::Readable<T>
where T: ser::Readable
{
type Item = T;