merge from master (thiserror conversion update)

This commit is contained in:
Yeastplume 2022-07-14 11:56:18 +01:00
commit e13c9d1534
77 changed files with 1810 additions and 2228 deletions

View file

@ -28,13 +28,13 @@ The development team will be happy to help and guide you with any of these point
When you are starting to contribute to grin, we really would appreciate if you come by the gitter chat channels.
In case of problems with trying out grin, before starting to contribute, there's the [Support chat](https://gitter.im/grin_community/support). Write there about what you've done, what you want to do, and maybe paste logs through a text paste webservice.
In case of problems with trying out grin, before starting to contribute, there's the [grincoin#support](https://keybase.io/team/grincoin) on Keybase. Write there about what you've done, what you want to do, and maybe paste logs through a text paste webservice.
* Please [join the grin Lobby](https://gitter.im/grin_community/Lobby) to get a feeling for the community.
* And [see the developers chat](https://gitter.im/grin_community/dev) if you have questions about source code files.
* Please [join the grincoin#general on Keybase](https://keybase.io/team/grincoin) to get a feeling for the community.
* And see the developers chat channel [grincoin#dev on Keybase](https://keybase.io/team/grincoin) if you have questions about source code files.
If you explain what you're looking at and what you want to do, we'll try to help you along the way.
* Also see `docs/*.md` and the folder structure explanations, and [the wiki](https://github.com/mimblewimble/docs/wiki).
* Further information and discussions are in the [Forum](https://forum.grin.mw), the [website](https://grin.mw), the [mailing list](https://lists.launchpad.net/mimblewimble/) and news channels like the [@grincouncil](https://twitter.com/grincouncil) and a (mostly unfiltered!) Twitter bot that collects headlines, mailing list posts, and reddit posts related to Mimblewimble/Grin: [@grinmw](https://twitter.com/grinmw)
* See `docs/*.md` and the folder structure explanations, [the wiki](https://github.com/mimblewimble/docs/wiki) and the official [Grin documentation](https://docs.grin.mw/).
* Further information and discussions are in the [Forum](https://forum.grin.mw), the [website](https://grin.mw), the [mailing list](https://lists.launchpad.net/mimblewimble/) and news channels like the [Reddit/grincoin](https://www.reddit.com/r/grincoin/) and a (mostly unfiltered!) Twitter bot that collects headlines, mailing list posts, and reddit posts related to Mimblewimble/Grin: [@grinmw](https://twitter.com/grinmw)
## Testing

527
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -31,8 +31,6 @@ futures = "0.3.19"
serde_json = "1"
log = "0.4"
term = "0.6"
failure = "0.1"
failure_derive = "0.1"
grin_api = { path = "./api", version = "5.2.0-alpha.1" }
grin_config = { path = "./config", version = "5.2.0-alpha.1" }

View file

@ -11,8 +11,6 @@ edition = "2018"
[dependencies]
easy-jsonrpc-mw = "0.5.4"
failure = "0.1.1"
failure_derive = "0.1.1"
hyper = "0.13"
lazy_static = "1"
regex = "1"
@ -20,6 +18,7 @@ ring = "0.16"
serde = "1"
serde_derive = "1"
serde_json = "1"
thiserror = "1"
log = "0.4"
tokio = { version = "0.2", features = ["full"] }
tokio-rustls = "0.13"

View file

@ -14,9 +14,8 @@
//! High level JSON/HTTP client API
use crate::rest::{Error, ErrorKind};
use crate::rest::Error;
use crate::util::to_base64;
use failure::{Fail, ResultExt};
use hyper::body;
use hyper::header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE, USER_AGENT};
use hyper::{Body, Client, Request};
@ -181,9 +180,7 @@ fn build_request(
None => Body::empty(),
Some(json) => json.into(),
})
.map_err(|e| {
ErrorKind::RequestError(format!("Bad request {} {}: {}", method, url, e)).into()
})
.map_err(|e| Error::RequestError(format!("Bad request {} {}: {}", method, url, e)))
}
pub fn create_post_request<IN>(
@ -194,9 +191,8 @@ pub fn create_post_request<IN>(
where
IN: Serialize,
{
let json = serde_json::to_string(input).context(ErrorKind::Internal(
"Could not serialize data to JSON".to_owned(),
))?;
let json = serde_json::to_string(input)
.map_err(|e| Error::Internal(format!("Could not serialize data to JSON: {}", e)))?;
build_request(url, "POST", api_secret, Some(json))
}
@ -205,10 +201,8 @@ where
for<'de> T: Deserialize<'de>,
{
let data = send_request(req, timeout)?;
serde_json::from_str(&data).map_err(|e| {
e.context(ErrorKind::ResponseError("Cannot parse response".to_owned()))
.into()
})
serde_json::from_str(&data)
.map_err(|e| Error::ResponseError(format!("Cannot parse response {}", e)))
}
async fn handle_request_async<T>(req: Request<Body>) -> Result<T, Error>
@ -217,7 +211,7 @@ where
{
let data = send_request_async(req, TimeOut::default()).await?;
let ser = serde_json::from_str(&data)
.map_err(|e| e.context(ErrorKind::ResponseError("Cannot parse response".to_owned())))?;
.map_err(|e| Error::ResponseError(format!("Cannot parse response {}", e)))?;
Ok(ser)
}
@ -237,10 +231,10 @@ async fn send_request_async(req: Request<Body>, timeout: TimeOut) -> Result<Stri
let resp = client
.request(req)
.await
.map_err(|e| ErrorKind::RequestError(format!("Cannot make request: {}", e)))?;
.map_err(|e| Error::RequestError(format!("Cannot make request: {}", e)))?;
if !resp.status().is_success() {
return Err(ErrorKind::RequestError(format!(
return Err(Error::RequestError(format!(
"Wrong response code: {} with data {:?}",
resp.status(),
resp.body()
@ -250,7 +244,7 @@ async fn send_request_async(req: Request<Body>, timeout: TimeOut) -> Result<Stri
let raw = body::to_bytes(resp)
.await
.map_err(|e| ErrorKind::RequestError(format!("Cannot read response body: {}", e)))?;
.map_err(|e| Error::RequestError(format!("Cannot read response body: {}", e)))?;
Ok(String::from_utf8_lossy(&raw).to_string())
}
@ -260,6 +254,6 @@ pub fn send_request(req: Request<Body>, timeout: TimeOut) -> Result<String, Erro
.basic_scheduler()
.enable_all()
.build()
.map_err(|e| ErrorKind::RequestError(format!("{}", e)))?;
.map_err(|e| Error::RequestError(format!("{}", e)))?;
rt.block_on(send_request_async(req, timeout))
}

View file

@ -19,7 +19,7 @@ use crate::core::core::transaction::Transaction;
use crate::foreign::Foreign;
use crate::pool::PoolEntry;
use crate::pool::{BlockChain, PoolAdapter};
use crate::rest::ErrorKind;
use crate::rest::Error;
use crate::types::{
BlockHeaderPrintable, BlockPrintable, LocatedTxKernel, OutputListing, OutputPrintable, Tip,
Version,
@ -126,7 +126,7 @@ pub trait ForeignRpc: Sync + Send {
height: Option<u64>,
hash: Option<String>,
commit: Option<String>,
) -> Result<BlockHeaderPrintable, ErrorKind>;
) -> Result<BlockHeaderPrintable, Error>;
/**
Networked version of [Foreign::get_block](struct.Foreign.html#method.get_block).
@ -244,7 +244,7 @@ pub trait ForeignRpc: Sync + Send {
height: Option<u64>,
hash: Option<String>,
commit: Option<String>,
) -> Result<BlockPrintable, ErrorKind>;
) -> Result<BlockPrintable, Error>;
/**
Networked version of [Foreign::get_version](struct.Foreign.html#method.get_version).
@ -277,7 +277,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn get_version(&self) -> Result<Version, ErrorKind>;
fn get_version(&self) -> Result<Version, Error>;
/**
Networked version of [Foreign::get_tip](struct.Foreign.html#method.get_tip).
@ -312,7 +312,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn get_tip(&self) -> Result<Tip, ErrorKind>;
fn get_tip(&self) -> Result<Tip, Error>;
/**
Networked version of [Foreign::get_kernel](struct.Foreign.html#method.get_kernel).
@ -355,7 +355,7 @@ pub trait ForeignRpc: Sync + Send {
excess: String,
min_height: Option<u64>,
max_height: Option<u64>,
) -> Result<LocatedTxKernel, ErrorKind>;
) -> Result<LocatedTxKernel, Error>;
/**
Networked version of [Foreign::get_outputs](struct.Foreign.html#method.get_outputs).
@ -442,7 +442,7 @@ pub trait ForeignRpc: Sync + Send {
end_height: Option<u64>,
include_proof: Option<bool>,
include_merkle_proof: Option<bool>,
) -> Result<Vec<OutputPrintable>, ErrorKind>;
) -> Result<Vec<OutputPrintable>, Error>;
/**
Networked version of [Foreign::get_unspent_outputs](struct.Foreign.html#method.get_unspent_outputs).
@ -503,7 +503,7 @@ pub trait ForeignRpc: Sync + Send {
end_index: Option<u64>,
max: u64,
include_proof: Option<bool>,
) -> Result<OutputListing, ErrorKind>;
) -> Result<OutputListing, Error>;
/**
Networked version of [Foreign::get_pmmr_indices](struct.Foreign.html#method.get_pmmr_indices).
@ -540,7 +540,7 @@ pub trait ForeignRpc: Sync + Send {
&self,
start_block_height: u64,
end_block_height: Option<u64>,
) -> Result<OutputListing, ErrorKind>;
) -> Result<OutputListing, Error>;
/**
Networked version of [Foreign::get_pool_size](struct.Foreign.html#method.get_pool_size).
@ -570,7 +570,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn get_pool_size(&self) -> Result<usize, ErrorKind>;
fn get_pool_size(&self) -> Result<usize, Error>;
/**
Networked version of [Foreign::get_stempool_size](struct.Foreign.html#method.get_stempool_size).
@ -600,7 +600,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn get_stempool_size(&self) -> Result<usize, ErrorKind>;
fn get_stempool_size(&self) -> Result<usize, Error>;
/**
Networked version of [Foreign::get_unconfirmed_transactions](struct.Foreign.html#method.get_unconfirmed_transactions).
@ -673,7 +673,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn get_unconfirmed_transactions(&self) -> Result<Vec<PoolEntry>, ErrorKind>;
fn get_unconfirmed_transactions(&self) -> Result<Vec<PoolEntry>, Error>;
/**
Networked version of [Foreign::push_transaction](struct.Foreign.html#method.push_transaction).
@ -738,7 +738,7 @@ pub trait ForeignRpc: Sync + Send {
# );
```
*/
fn push_transaction(&self, tx: Transaction, fluff: Option<bool>) -> Result<(), ErrorKind>;
fn push_transaction(&self, tx: Transaction, fluff: Option<bool>) -> Result<(), Error>;
}
impl<B, P> ForeignRpc for Foreign<B, P>
@ -751,36 +751,36 @@ where
height: Option<u64>,
hash: Option<String>,
commit: Option<String>,
) -> Result<BlockHeaderPrintable, ErrorKind> {
) -> Result<BlockHeaderPrintable, Error> {
let mut parsed_hash: Option<Hash> = None;
if let Some(hash) = hash {
let vec = util::from_hex(&hash)
.map_err(|e| ErrorKind::Argument(format!("invalid block hash: {}", e)))?;
.map_err(|e| Error::Argument(format!("invalid block hash: {}", e)))?;
parsed_hash = Some(Hash::from_vec(&vec));
}
Foreign::get_header(self, height, parsed_hash, commit).map_err(|e| e.kind().clone())
Foreign::get_header(self, height, parsed_hash, commit)
}
fn get_block(
&self,
height: Option<u64>,
hash: Option<String>,
commit: Option<String>,
) -> Result<BlockPrintable, ErrorKind> {
) -> Result<BlockPrintable, Error> {
let mut parsed_hash: Option<Hash> = None;
if let Some(hash) = hash {
let vec = util::from_hex(&hash)
.map_err(|e| ErrorKind::Argument(format!("invalid block hash: {}", e)))?;
.map_err(|e| Error::Argument(format!("invalid block hash: {}", e)))?;
parsed_hash = Some(Hash::from_vec(&vec));
}
Foreign::get_block(self, height, parsed_hash, commit).map_err(|e| e.kind().clone())
Foreign::get_block(self, height, parsed_hash, commit)
}
fn get_version(&self) -> Result<Version, ErrorKind> {
Foreign::get_version(self).map_err(|e| e.kind().clone())
fn get_version(&self) -> Result<Version, Error> {
Foreign::get_version(self)
}
fn get_tip(&self) -> Result<Tip, ErrorKind> {
Foreign::get_tip(self).map_err(|e| e.kind().clone())
fn get_tip(&self) -> Result<Tip, Error> {
Foreign::get_tip(self)
}
fn get_kernel(
@ -788,8 +788,8 @@ where
excess: String,
min_height: Option<u64>,
max_height: Option<u64>,
) -> Result<LocatedTxKernel, ErrorKind> {
Foreign::get_kernel(self, excess, min_height, max_height).map_err(|e| e.kind().clone())
) -> Result<LocatedTxKernel, Error> {
Foreign::get_kernel(self, excess, min_height, max_height)
}
fn get_outputs(
@ -799,7 +799,7 @@ where
end_height: Option<u64>,
include_proof: Option<bool>,
include_merkle_proof: Option<bool>,
) -> Result<Vec<OutputPrintable>, ErrorKind> {
) -> Result<Vec<OutputPrintable>, Error> {
Foreign::get_outputs(
self,
commits,
@ -808,7 +808,6 @@ where
include_proof,
include_merkle_proof,
)
.map_err(|e| e.kind().clone())
}
fn get_unspent_outputs(
@ -817,33 +816,31 @@ where
end_index: Option<u64>,
max: u64,
include_proof: Option<bool>,
) -> Result<OutputListing, ErrorKind> {
) -> Result<OutputListing, Error> {
Foreign::get_unspent_outputs(self, start_index, end_index, max, include_proof)
.map_err(|e| e.kind().clone())
}
fn get_pmmr_indices(
&self,
start_block_height: u64,
end_block_height: Option<u64>,
) -> Result<OutputListing, ErrorKind> {
) -> Result<OutputListing, Error> {
Foreign::get_pmmr_indices(self, start_block_height, end_block_height)
.map_err(|e| e.kind().clone())
}
fn get_pool_size(&self) -> Result<usize, ErrorKind> {
Foreign::get_pool_size(self).map_err(|e| e.kind().clone())
fn get_pool_size(&self) -> Result<usize, Error> {
Foreign::get_pool_size(self)
}
fn get_stempool_size(&self) -> Result<usize, ErrorKind> {
Foreign::get_stempool_size(self).map_err(|e| e.kind().clone())
fn get_stempool_size(&self) -> Result<usize, Error> {
Foreign::get_stempool_size(self)
}
fn get_unconfirmed_transactions(&self) -> Result<Vec<PoolEntry>, ErrorKind> {
Foreign::get_unconfirmed_transactions(self).map_err(|e| e.kind().clone())
fn get_unconfirmed_transactions(&self) -> Result<Vec<PoolEntry>, Error> {
Foreign::get_unconfirmed_transactions(self)
}
fn push_transaction(&self, tx: Transaction, fluff: Option<bool>) -> Result<(), ErrorKind> {
Foreign::push_transaction(self, tx, fluff).map_err(|e| e.kind().clone())
fn push_transaction(&self, tx: Transaction, fluff: Option<bool>) -> Result<(), Error> {
Foreign::push_transaction(self, tx, fluff)
}
}
@ -854,7 +851,7 @@ macro_rules! doctest_helper_json_rpc_foreign_assert_response {
// create temporary grin server, run jsonrpc request on node api, delete server, return
// json response.
{
{
/*use grin_servers::test_framework::framework::run_doctest;
use grin_util as util;
use serde_json;
@ -888,6 +885,6 @@ macro_rules! doctest_helper_json_rpc_foreign_assert_response {
serde_json::to_string_pretty(&expected_response).unwrap()
);
}*/
}
}
};
}

View file

@ -20,7 +20,6 @@ use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::util;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use regex::Regex;
use std::sync::Weak;
@ -43,33 +42,33 @@ impl HeaderHandler {
if let Ok(height) = input.parse() {
match w(&self.chain)?.get_header_by_height(height) {
Ok(header) => return Ok(BlockHeaderPrintable::from_header(&header)),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
check_block_param(&input)?;
let vec = util::from_hex(&input)
.map_err(|e| ErrorKind::Argument(format!("invalid input: {}", e)))?;
let vec =
util::from_hex(&input).map_err(|e| Error::Argument(format!("invalid input: {}", e)))?;
let h = Hash::from_vec(&vec);
let header = w(&self.chain)?
.get_block_header(&h)
.context(ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
Ok(BlockHeaderPrintable::from_header(&header))
}
fn get_header_for_output(&self, commit_id: String) -> Result<BlockHeaderPrintable, Error> {
let oid = match get_output(&self.chain, &commit_id)? {
Some((_, o)) => o,
None => return Err(ErrorKind::NotFound.into()),
None => return Err(Error::NotFound),
};
match w(&self.chain)?.get_header_for_output(oid.commitment()) {
Ok(header) => Ok(BlockHeaderPrintable::from_header(&header)),
Err(_) => Err(ErrorKind::NotFound.into()),
Err(_) => Err(Error::NotFound),
}
}
pub fn get_header_v2(&self, h: &Hash) -> Result<BlockHeaderPrintable, Error> {
let chain = w(&self.chain)?;
let header = chain.get_block_header(h).context(ErrorKind::NotFound)?;
let header = chain.get_block_header(h).map_err(|_| Error::NotFound)?;
Ok(BlockHeaderPrintable::from_header(&header))
}
@ -83,7 +82,7 @@ impl HeaderHandler {
if let Some(height) = height {
match w(&self.chain)?.get_header_by_height(height) {
Ok(header) => return Ok(header.hash()),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
if let Some(hash) = hash {
@ -92,14 +91,16 @@ impl HeaderHandler {
if let Some(commit) = commit {
let oid = match get_output_v2(&self.chain, &commit, false, false)? {
Some((_, o)) => o,
None => return Err(ErrorKind::NotFound.into()),
None => return Err(Error::NotFound),
};
match w(&self.chain)?.get_header_for_output(oid.commitment()) {
Ok(header) => return Ok(header.hash()),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
Err(ErrorKind::Argument("not a valid hash, height or output commit".to_owned()).into())
Err(Error::Argument(
"not a valid hash, height or output commit".to_owned(),
))
}
}
@ -132,16 +133,16 @@ impl BlockHandler {
include_merkle_proof: bool,
) -> Result<BlockPrintable, Error> {
let chain = w(&self.chain)?;
let block = chain.get_block(h).context(ErrorKind::NotFound)?;
let block = chain.get_block(h).map_err(|_| Error::NotFound)?;
BlockPrintable::from_block(&block, &chain, include_proof, include_merkle_proof)
.map_err(|_| ErrorKind::Internal("chain error".to_owned()).into())
.map_err(|_| Error::Internal("chain error".to_owned()))
}
fn get_compact_block(&self, h: &Hash) -> Result<CompactBlockPrintable, Error> {
let chain = w(&self.chain)?;
let block = chain.get_block(h).context(ErrorKind::NotFound)?;
let block = chain.get_block(h).map_err(|_| Error::NotFound)?;
CompactBlockPrintable::from_compact_block(&block.into(), &chain)
.map_err(|_| ErrorKind::Internal("chain error".to_owned()).into())
.map_err(|_| Error::Internal("chain error".to_owned()))
}
// Try to decode the string as a height or a hash.
@ -149,12 +150,12 @@ impl BlockHandler {
if let Ok(height) = input.parse() {
match w(&self.chain)?.get_header_by_height(height) {
Ok(header) => return Ok(header.hash()),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
check_block_param(&input)?;
let vec = util::from_hex(&input)
.map_err(|e| ErrorKind::Argument(format!("invalid input: {}", e)))?;
let vec =
util::from_hex(&input).map_err(|e| Error::Argument(format!("invalid input: {}", e)))?;
Ok(Hash::from_vec(&vec))
}
@ -168,7 +169,7 @@ impl BlockHandler {
if let Some(height) = height {
match w(&self.chain)?.get_header_by_height(height) {
Ok(header) => return Ok(header.hash()),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
if let Some(hash) = hash {
@ -177,14 +178,16 @@ impl BlockHandler {
if let Some(commit) = commit {
let oid = match get_output_v2(&self.chain, &commit, false, false)? {
Some((_, o)) => o,
None => return Err(ErrorKind::NotFound.into()),
None => return Err(Error::NotFound),
};
match w(&self.chain)?.get_header_for_output(oid.commitment()) {
Ok(header) => return Ok(header.hash()),
Err(_) => return Err(ErrorKind::NotFound.into()),
Err(_) => return Err(Error::NotFound),
}
}
Err(ErrorKind::Argument("not a valid hash, height or output commit".to_owned()).into())
Err(Error::Argument(
"not a valid hash, height or output commit".to_owned(),
))
}
}
@ -193,7 +196,7 @@ fn check_block_param(input: &str) -> Result<(), Error> {
static ref RE: Regex = Regex::new(r"[0-9a-fA-F]{64}").unwrap();
}
if !RE.is_match(&input) {
return Err(ErrorKind::Argument("Not a valid hash or height.".to_owned()).into());
return Err(Error::Argument("Not a valid hash or height.".to_owned()));
}
Ok(())
}

View file

@ -21,7 +21,6 @@ use crate::types::*;
use crate::util;
use crate::util::secp::pedersen::Commitment;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use std::sync::Weak;
@ -35,7 +34,7 @@ impl ChainHandler {
pub fn get_tip(&self) -> Result<Tip, Error> {
let head = w(&self.chain)?
.head()
.map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?;
.map_err(|e| Error::Internal(format!("can't get head: {}", e)))?;
Ok(Tip::from_tip(head))
}
}
@ -56,7 +55,7 @@ impl ChainValidationHandler {
pub fn validate_chain(&self, fast_validation: bool) -> Result<(), Error> {
w(&self.chain)?
.validate(fast_validation)
.map_err(|_| ErrorKind::Internal("chain error".to_owned()).into())
.map_err(|_| Error::Internal("chain error".to_owned()))
}
}
@ -144,11 +143,10 @@ impl OutputHandler {
// First check the commits length
for commit in &commits {
if commit.len() != 66 {
return Err(ErrorKind::RequestError(format!(
return Err(Error::RequestError(format!(
"invalid commit length for {}",
commit
))
.into());
)));
}
}
for commit in commits {
@ -202,7 +200,7 @@ impl OutputHandler {
let chain = w(&self.chain)?;
let outputs = chain
.unspent_outputs_by_pmmr_index(start_index, max, end_index)
.context(ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
let out = OutputListing {
last_retrieved_index: outputs.0,
highest_index: outputs.1,
@ -219,7 +217,7 @@ impl OutputHandler {
)
})
.collect::<Result<Vec<_>, _>>()
.context(ErrorKind::Internal("chain error".to_owned()))?,
.map_err(|_| Error::Internal("chain error".to_owned()))?,
};
Ok(out)
}
@ -258,14 +256,14 @@ impl OutputHandler {
) -> Result<BlockOutputs, Error> {
let header = w(&self.chain)?
.get_header_by_height(block_height)
.map_err(|_| ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
// TODO - possible to compact away blocks we care about
// in the period between accepting the block and refreshing the wallet
let chain = w(&self.chain)?;
let block = chain
.get_block(&header.hash())
.map_err(|_| ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
let outputs = block
.outputs()
.iter()
@ -274,7 +272,7 @@ impl OutputHandler {
OutputPrintable::from_output(output, &chain, Some(&header), include_proof, true)
})
.collect::<Result<Vec<_>, _>>()
.context(ErrorKind::Internal("cain error".to_owned()))?;
.map_err(|_| Error::Internal("chain error".to_owned()))?;
Ok(BlockOutputs {
header: BlockHeaderDifficultyInfo::from_header(&header),
@ -291,14 +289,14 @@ impl OutputHandler {
) -> Result<Vec<OutputPrintable>, Error> {
let header = w(&self.chain)?
.get_header_by_height(block_height)
.map_err(|_| ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
// TODO - possible to compact away blocks we care about
// in the period between accepting the block and refreshing the wallet
let chain = w(&self.chain)?;
let block = chain
.get_block(&header.hash())
.map_err(|_| ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
let outputs = block
.outputs()
.iter()
@ -313,7 +311,7 @@ impl OutputHandler {
)
})
.collect::<Result<Vec<_>, _>>()
.context(ErrorKind::Internal("cain error".to_owned()))?;
.map_err(|_| Error::Internal("chain error".to_owned()))?;
Ok(outputs)
}
@ -408,11 +406,11 @@ impl KernelHandler {
.trim_end_matches('/')
.rsplit('/')
.next()
.ok_or_else(|| ErrorKind::RequestError("missing excess".into()))?;
let excess = util::from_hex(excess)
.map_err(|_| ErrorKind::RequestError("invalid excess hex".into()))?;
.ok_or_else(|| Error::RequestError("missing excess".into()))?;
let excess =
util::from_hex(excess).map_err(|_| Error::RequestError("invalid excess hex".into()))?;
if excess.len() != 33 {
return Err(ErrorKind::RequestError("invalid excess length".into()).into());
return Err(Error::RequestError("invalid excess length".into()));
}
let excess = Commitment::from_vec(excess);
@ -427,18 +425,18 @@ impl KernelHandler {
if let Some(h) = params.get("min_height") {
let h = h
.parse()
.map_err(|_| ErrorKind::RequestError("invalid minimum height".into()))?;
.map_err(|_| Error::RequestError("invalid minimum height".into()))?;
// Default is genesis
min_height = if h == 0 { None } else { Some(h) };
}
if let Some(h) = params.get("max_height") {
let h = h
.parse()
.map_err(|_| ErrorKind::RequestError("invalid maximum height".into()))?;
.map_err(|_| Error::RequestError("invalid maximum height".into()))?;
// Default is current head
let head_height = chain
.head()
.map_err(|e| ErrorKind::Internal(format!("{}", e)))?
.map_err(|e| Error::Internal(format!("{}", e)))?
.height;
max_height = if h >= head_height { None } else { Some(h) };
}
@ -446,7 +444,7 @@ impl KernelHandler {
let kernel = chain
.get_kernel_height(&excess, min_height, max_height)
.map_err(|e| ErrorKind::Internal(format!("{}", e)))?
.map_err(|e| Error::Internal(format!("{}", e)))?
.map(|(tx_kernel, height, mmr_index)| LocatedTxKernel {
tx_kernel,
height,
@ -462,22 +460,22 @@ impl KernelHandler {
max_height: Option<u64>,
) -> Result<LocatedTxKernel, Error> {
let excess = util::from_hex(&excess)
.map_err(|_| ErrorKind::RequestError("invalid excess hex".into()))?;
.map_err(|_| Error::RequestError("invalid excess hex".into()))?;
if excess.len() != 33 {
return Err(ErrorKind::RequestError("invalid excess length".into()).into());
return Err(Error::RequestError("invalid excess length".into()));
}
let excess = Commitment::from_vec(excess);
let chain = w(&self.chain)?;
let kernel = chain
.get_kernel_height(&excess, min_height, max_height)
.map_err(|e| ErrorKind::Internal(format!("{}", e)))?
.map_err(|e| Error::Internal(format!("{}", e)))?
.map(|(tx_kernel, height, mmr_index)| LocatedTxKernel {
tx_kernel,
height,
mmr_index,
});
kernel.ok_or_else(|| ErrorKind::NotFound.into())
kernel.ok_or(Error::NotFound)
}
}

View file

@ -74,7 +74,7 @@ impl PeerHandler {
if let Some(addr) = addr {
let peer_addr = PeerAddr(addr);
let peer_data: PeerData = w(&self.peers)?.get_peer(peer_addr).map_err(|e| {
let e: Error = ErrorKind::Internal(format!("get peer error: {:?}", e)).into();
let e: Error = Error::Internal(format!("get peer error: {:?}", e));
e
})?;
return Ok(vec![peer_data]);
@ -87,14 +87,14 @@ impl PeerHandler {
let peer_addr = PeerAddr(addr);
w(&self.peers)?
.ban_peer(peer_addr, ReasonForBan::ManualBan)
.map_err(|e| ErrorKind::Internal(format!("ban peer error: {:?}", e)).into())
.map_err(|e| Error::Internal(format!("ban peer error: {:?}", e)))
}
pub fn unban_peer(&self, addr: SocketAddr) -> Result<(), Error> {
let peer_addr = PeerAddr(addr);
w(&self.peers)?
.unban_peer(peer_addr)
.map_err(|e| ErrorKind::Internal(format!("unban peer error: {:?}", e)).into())
.map_err(|e| Error::Internal(format!("unban peer error: {:?}", e)))
}
}

View file

@ -23,7 +23,6 @@ use crate::types::*;
use crate::util;
use crate::util::RwLock;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use std::sync::Weak;
@ -97,10 +96,10 @@ where
let header = tx_pool
.blockchain
.chain_head()
.context(ErrorKind::Internal("Failed to get chain head".to_owned()))?;
.map_err(|e| Error::Internal(format!("Failed to get chain head: {}", e)))?;
tx_pool
.add_to_pool(source, tx, !fluff.unwrap_or(false), &header)
.context(ErrorKind::Internal("Failed to update pool".to_owned()))?;
.map_err(|e| Error::Internal(format!("Failed to update pool: {}", e)))?;
Ok(())
}
}
@ -134,13 +133,13 @@ where
let wrapper: TxWrapper = parse_body(req).await?;
let tx_bin = util::from_hex(&wrapper.tx_hex)
.map_err(|e| ErrorKind::RequestError(format!("Bad request: {}", e)))?;
.map_err(|e| Error::RequestError(format!("Bad request: {}", e)))?;
// All wallet api interaction explicitly uses protocol version 1 for now.
let version = ProtocolVersion(1);
let tx: Transaction =
ser::deserialize(&mut &tx_bin[..], version, DeserializationMode::default())
.map_err(|e| ErrorKind::RequestError(format!("Bad request: {}", e)))?;
.map_err(|e| Error::RequestError(format!("Bad request: {}", e)))?;
let source = pool::TxSource::PushApi;
info!(
@ -156,10 +155,10 @@ where
let header = tx_pool
.blockchain
.chain_head()
.context(ErrorKind::Internal("Failed to get chain head".to_owned()))?;
.map_err(|e| Error::Internal(format!("Failed to get chain head: {}", e)))?;
tx_pool
.add_to_pool(source, tx, !fluff, &header)
.context(ErrorKind::Internal("Failed to update pool".to_owned()))?;
.map_err(|e| Error::Internal(format!("Failed to update pool: {}", e)))?;
Ok(())
}

View file

@ -50,7 +50,7 @@ impl StatusHandler {
pub fn get_status(&self) -> Result<Status, Error> {
let head = w(&self.chain)?
.head()
.map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?;
.map_err(|e| Error::Internal(format!("can't get head: {}", e)))?;
let sync_status = w(&self.sync_state)?.status();
let (api_sync_status, api_sync_info) = sync_status_to_api(sync_status);
Ok(Status::from_tip_and_peers(

View file

@ -20,7 +20,6 @@ use crate::types::*;
use crate::util;
use crate::util::secp::pedersen::Commitment;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use std::sync::Weak;
@ -48,10 +47,8 @@ impl TxHashSetHandler {
// gets roots
fn get_roots(&self) -> Result<TxHashSet, Error> {
let chain = w(&self.chain)?;
let res = TxHashSet::from_head(&chain).context(ErrorKind::Internal(
"failed to read roots from txhashset".to_owned(),
))?;
Ok(res)
TxHashSet::from_head(&chain)
.map_err(|e| Error::Internal(format!("failed to read roots from txhashset: {}", e)))
}
// gets last n outputs inserted in to the tree
@ -86,7 +83,7 @@ impl TxHashSetHandler {
let chain = w(&self.chain)?;
let outputs = chain
.unspent_outputs_by_pmmr_index(start_index, max, end_index)
.context(ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
let out = OutputListing {
last_retrieved_index: outputs.0,
highest_index: outputs.1,
@ -95,7 +92,7 @@ impl TxHashSetHandler {
.iter()
.map(|x| OutputPrintable::from_output(x, &chain, None, true, true))
.collect::<Result<Vec<_>, _>>()
.context(ErrorKind::Internal("chain error".to_owned()))?,
.map_err(|e| Error::Internal(format!("chain error: {}", e)))?,
};
Ok(out)
}
@ -109,7 +106,7 @@ impl TxHashSetHandler {
let chain = w(&self.chain)?;
let range = chain
.block_height_range_to_pmmr_indices(start_block_height, end_block_height)
.context(ErrorKind::NotFound)?;
.map_err(|_| Error::NotFound)?;
let out = OutputListing {
last_retrieved_index: range.0,
highest_index: range.1,
@ -122,12 +119,12 @@ impl TxHashSetHandler {
// (to avoid having to create a new type to pass around)
fn get_merkle_proof_for_output(&self, id: &str) -> Result<OutputPrintable, Error> {
let c = util::from_hex(id)
.map_err(|_| ErrorKind::Argument(format!("Not a valid commitment: {}", id)))?;
.map_err(|_| Error::Argument(format!("Not a valid commitment: {}", id)))?;
let commit = Commitment::from_vec(c);
let chain = w(&self.chain)?;
let output_pos = chain.get_output_pos(&commit).context(ErrorKind::NotFound)?;
let merkle_proof = chain::Chain::get_merkle_proof_for_pos(&chain, commit)
.map_err(|_| ErrorKind::NotFound)?;
let output_pos = chain.get_output_pos(&commit).map_err(|_| Error::NotFound)?;
let merkle_proof =
chain::Chain::get_merkle_proof_for_pos(&chain, commit).map_err(|_| Error::NotFound)?;
Ok(OutputPrintable {
output_type: OutputType::Coinbase,
commit: Commitment::from_vec(vec![]),

View file

@ -26,7 +26,7 @@ use std::sync::{Arc, Weak};
// boilerplate of dealing with `Weak`.
pub fn w<T>(weak: &Weak<T>) -> Result<Arc<T>, Error> {
weak.upgrade()
.ok_or_else(|| ErrorKind::Internal("failed to upgrade weak reference".to_owned()).into())
.ok_or_else(|| Error::Internal("failed to upgrade weak reference".to_owned()))
}
/// Internal function to retrieves an output by a given commitment
@ -35,7 +35,7 @@ fn get_unspent(
id: &str,
) -> Result<Option<(OutputIdentifier, CommitPos)>, Error> {
let c = util::from_hex(id)
.map_err(|_| ErrorKind::Argument(format!("Not a valid commitment: {}", id)))?;
.map_err(|_| Error::Argument(format!("Not a valid commitment: {}", id)))?;
let commit = Commitment::from_vec(c);
let res = chain.get_unspent(commit)?;
Ok(res)

View file

@ -33,7 +33,7 @@ impl VersionHandler {
pub fn get_version(&self) -> Result<Version, Error> {
let head = w(&self.chain)?
.head_header()
.map_err(|e| ErrorKind::Internal(format!("can't get head: {}", e)))?;
.map_err(|e| Error::Internal(format!("can't get head: {}", e)))?;
Ok(Version {
node_version: CRATE_VERSION.to_owned(),

View file

@ -19,8 +19,6 @@ use grin_pool as pool;
use grin_util as util;
#[macro_use]
extern crate failure_derive;
#[macro_use]
extern crate lazy_static;

View file

@ -112,8 +112,8 @@ impl Owner {
}
pub fn reset_chain_head(&self, hash: String) -> Result<(), Error> {
let hash = Hash::from_hex(&hash)
.map_err(|_| ErrorKind::RequestError("invalid header hash".into()))?;
let hash =
Hash::from_hex(&hash).map_err(|_| Error::RequestError("invalid header hash".into()))?;
let handler = ChainResetHandler {
chain: self.chain.clone(),
sync_state: self.sync_state.clone(),
@ -122,8 +122,8 @@ impl Owner {
}
pub fn invalidate_header(&self, hash: String) -> Result<(), Error> {
let hash = Hash::from_hex(&hash)
.map_err(|_| ErrorKind::RequestError("invalid header hash".into()))?;
let hash =
Hash::from_hex(&hash).map_err(|_| Error::RequestError("invalid header hash".into()))?;
let handler = ChainResetHandler {
chain: self.chain.clone(),
sync_state: self.sync_state.clone(),

View file

@ -17,7 +17,7 @@
use crate::owner::Owner;
use crate::p2p::types::PeerInfoDisplay;
use crate::p2p::PeerData;
use crate::rest::ErrorKind;
use crate::rest::Error;
use crate::types::Status;
use std::net::SocketAddr;
@ -70,7 +70,7 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn get_status(&self) -> Result<Status, ErrorKind>;
fn get_status(&self) -> Result<Status, Error>;
/**
Networked version of [Owner::validate_chain](struct.Owner.html#method.validate_chain).
@ -100,7 +100,7 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn validate_chain(&self, assume_valid_rangeproofs_kernels: bool) -> Result<(), ErrorKind>;
fn validate_chain(&self, assume_valid_rangeproofs_kernels: bool) -> Result<(), Error>;
/**
Networked version of [Owner::compact_chain](struct.Owner.html#method.compact_chain).
@ -130,11 +130,11 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn compact_chain(&self) -> Result<(), ErrorKind>;
fn compact_chain(&self) -> Result<(), Error>;
fn reset_chain_head(&self, hash: String) -> Result<(), ErrorKind>;
fn reset_chain_head(&self, hash: String) -> Result<(), Error>;
fn invalidate_header(&self, hash: String) -> Result<(), ErrorKind>;
fn invalidate_header(&self, hash: String) -> Result<(), Error>;
/**
Networked version of [Owner::get_peers](struct.Owner.html#method.get_peers).
@ -176,7 +176,7 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn get_peers(&self, peer_addr: Option<SocketAddr>) -> Result<Vec<PeerData>, ErrorKind>;
fn get_peers(&self, peer_addr: Option<SocketAddr>) -> Result<Vec<PeerData>, Error>;
/**
Networked version of [Owner::get_connected_peers](struct.Owner.html#method.get_connected_peers).
@ -295,7 +295,7 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn get_connected_peers(&self) -> Result<Vec<PeerInfoDisplay>, ErrorKind>;
fn get_connected_peers(&self) -> Result<Vec<PeerInfoDisplay>, Error>;
/**
Networked version of [Owner::ban_peer](struct.Owner.html#method.ban_peer).
@ -325,7 +325,7 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn ban_peer(&self, peer_addr: SocketAddr) -> Result<(), ErrorKind>;
fn ban_peer(&self, peer_addr: SocketAddr) -> Result<(), Error>;
/**
Networked version of [Owner::unban_peer](struct.Owner.html#method.unban_peer).
@ -355,44 +355,44 @@ pub trait OwnerRpc: Sync + Send {
# );
```
*/
fn unban_peer(&self, peer_addr: SocketAddr) -> Result<(), ErrorKind>;
fn unban_peer(&self, peer_addr: SocketAddr) -> Result<(), Error>;
}
impl OwnerRpc for Owner {
fn get_status(&self) -> Result<Status, ErrorKind> {
Owner::get_status(self).map_err(|e| e.kind().clone())
fn get_status(&self) -> Result<Status, Error> {
Owner::get_status(self)
}
fn validate_chain(&self, assume_valid_rangeproofs_kernels: bool) -> Result<(), ErrorKind> {
Owner::validate_chain(self, assume_valid_rangeproofs_kernels).map_err(|e| e.kind().clone())
fn validate_chain(&self, assume_valid_rangeproofs_kernels: bool) -> Result<(), Error> {
Owner::validate_chain(self, assume_valid_rangeproofs_kernels)
}
fn reset_chain_head(&self, hash: String) -> Result<(), ErrorKind> {
Owner::reset_chain_head(self, hash).map_err(|e| e.kind().clone())
fn reset_chain_head(&self, hash: String) -> Result<(), Error> {
Owner::reset_chain_head(self, hash)
}
fn invalidate_header(&self, hash: String) -> Result<(), ErrorKind> {
Owner::invalidate_header(self, hash).map_err(|e| e.kind().clone())
fn invalidate_header(&self, hash: String) -> Result<(), Error> {
Owner::invalidate_header(self, hash)
}
fn compact_chain(&self) -> Result<(), ErrorKind> {
Owner::compact_chain(self).map_err(|e| e.kind().clone())
fn compact_chain(&self) -> Result<(), Error> {
Owner::compact_chain(self)
}
fn get_peers(&self, addr: Option<SocketAddr>) -> Result<Vec<PeerData>, ErrorKind> {
Owner::get_peers(self, addr).map_err(|e| e.kind().clone())
fn get_peers(&self, addr: Option<SocketAddr>) -> Result<Vec<PeerData>, Error> {
Owner::get_peers(self, addr)
}
fn get_connected_peers(&self) -> Result<Vec<PeerInfoDisplay>, ErrorKind> {
Owner::get_connected_peers(self).map_err(|e| e.kind().clone())
fn get_connected_peers(&self) -> Result<Vec<PeerInfoDisplay>, Error> {
Owner::get_connected_peers(self)
}
fn ban_peer(&self, addr: SocketAddr) -> Result<(), ErrorKind> {
Owner::ban_peer(self, addr).map_err(|e| e.kind().clone())
fn ban_peer(&self, addr: SocketAddr) -> Result<(), Error> {
Owner::ban_peer(self, addr)
}
fn unban_peer(&self, addr: SocketAddr) -> Result<(), ErrorKind> {
Owner::unban_peer(self, addr).map_err(|e| e.kind().clone())
fn unban_peer(&self, addr: SocketAddr) -> Result<(), Error> {
Owner::unban_peer(self, addr)
}
}

View file

@ -20,7 +20,6 @@
use crate::router::{Handler, HandlerObj, ResponseFuture, Router, RouterError};
use crate::web::response;
use failure::{Backtrace, Context, Fail, ResultExt};
use futures::channel::oneshot;
use futures::TryStreamExt;
use hyper::server::accept;
@ -28,7 +27,6 @@ use hyper::service::make_service_fn;
use hyper::{Body, Request, Server, StatusCode};
use rustls::internal::pemfile;
use std::convert::Infallible;
use std::fmt::{self, Display};
use std::fs::File;
use std::net::SocketAddr;
use std::sync::Arc;
@ -39,76 +37,28 @@ use tokio::stream::StreamExt;
use tokio_rustls::TlsAcceptor;
/// Errors that can be returned by an ApiEndpoint implementation.
#[derive(Debug)]
pub struct Error {
inner: Context<ErrorKind>,
}
#[derive(Clone, Eq, PartialEq, Debug, Fail, Serialize, Deserialize)]
pub enum ErrorKind {
#[fail(display = "Internal error: {}", _0)]
#[derive(Clone, Eq, PartialEq, Debug, thiserror::Error, Serialize, Deserialize)]
pub enum Error {
#[error("Internal error: {0}")]
Internal(String),
#[fail(display = "Bad arguments: {}", _0)]
#[error("Bad arguments: {0}")]
Argument(String),
#[fail(display = "Not found.")]
#[error("Not found.")]
NotFound,
#[fail(display = "Request error: {}", _0)]
#[error("Request error: {0}")]
RequestError(String),
#[fail(display = "ResponseError error: {}", _0)]
#[error("ResponseError error: {0}")]
ResponseError(String),
#[fail(display = "Router error: {}", _0)]
Router(RouterError),
}
impl Fail for Error {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}
impl Error {
pub fn kind(&self) -> &ErrorKind {
self.inner.get_context()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
}
}
}
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner: inner }
}
}
impl From<RouterError> for Error {
fn from(error: RouterError) -> Error {
Error {
inner: Context::new(ErrorKind::Router(error)),
}
}
#[error("Router error: {source}")]
Router {
#[from]
source: RouterError,
},
}
impl From<crate::chain::Error> for Error {
fn from(error: crate::chain::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Internal(error.to_string())),
}
Error::Internal(error.to_string())
}
}
@ -128,27 +78,24 @@ impl TLSConfig {
}
fn load_certs(&self) -> Result<Vec<rustls::Certificate>, Error> {
let certfile = File::open(&self.certificate).context(ErrorKind::Internal(format!(
"failed to open file {}",
self.certificate
)))?;
let certfile = File::open(&self.certificate).map_err(|e| {
Error::Internal(format!("failed to open file {} {}", self.certificate, e))
})?;
let mut reader = io::BufReader::new(certfile);
pemfile::certs(&mut reader)
.map_err(|_| ErrorKind::Internal("failed to load certificate".to_string()).into())
.map_err(|_| Error::Internal("failed to load certificate".to_string()))
}
fn load_private_key(&self) -> Result<rustls::PrivateKey, Error> {
let keyfile = File::open(&self.private_key).context(ErrorKind::Internal(format!(
"failed to open file {}",
self.private_key
)))?;
let keyfile = File::open(&self.private_key)
.map_err(|e| Error::Internal(format!("failed to open private key file {}", e)))?;
let mut reader = io::BufReader::new(keyfile);
let keys = pemfile::pkcs8_private_keys(&mut reader)
.map_err(|_| ErrorKind::Internal("failed to load private key".to_string()))?;
.map_err(|_| Error::Internal("failed to load private key".to_string()))?;
if keys.len() != 1 {
return Err(ErrorKind::Internal("expected a single private key".to_string()).into());
return Err(Error::Internal("expected a single private key".to_string()));
}
Ok(keys[0].clone())
}
@ -158,9 +105,7 @@ impl TLSConfig {
let key = self.load_private_key()?;
let mut cfg = rustls::ServerConfig::new(rustls::NoClientAuth::new());
cfg.set_single_cert(certs, key)
.context(ErrorKind::Internal(
"set single certificate failed".to_string(),
))?;
.map_err(|e| Error::Internal(format!("set single certificate failed {}", e)))?;
Ok(Arc::new(cfg))
}
}
@ -202,10 +147,9 @@ impl ApiServer {
api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>),
) -> Result<thread::JoinHandle<()>, Error> {
if self.shutdown_sender.is_some() {
return Err(ErrorKind::Internal(
return Err(Error::Internal(
"Can't start HTTP API server, it's running already".to_string(),
)
.into());
));
}
let rx = &mut api_chan.1;
let tx = &mut api_chan.0;
@ -238,7 +182,7 @@ impl ApiServer {
eprintln!("HTTP API server error: {}", e)
}
})
.map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
.map_err(|_| Error::Internal("failed to spawn API thread".to_string()))
}
/// Starts the TLS ApiServer at the provided address.
@ -251,10 +195,9 @@ impl ApiServer {
api_chan: &'static mut (oneshot::Sender<()>, oneshot::Receiver<()>),
) -> Result<thread::JoinHandle<()>, Error> {
if self.shutdown_sender.is_some() {
return Err(ErrorKind::Internal(
return Err(Error::Internal(
"Can't start HTTPS API server, it's running already".to_string(),
)
.into());
));
}
let rx = &mut api_chan.1;
@ -296,7 +239,7 @@ impl ApiServer {
eprintln!("HTTP API server error: {}", e)
}
})
.map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
.map_err(|_| Error::Internal("failed to spawn API thread".to_string()))
}
/// Stops the API server, it panics in case of error

View file

@ -86,13 +86,13 @@ pub trait Handler {
}
}
#[derive(Clone, Fail, Eq, Debug, PartialEq, Serialize, Deserialize)]
#[derive(Clone, thiserror::Error, Eq, Debug, PartialEq, Serialize, Deserialize)]
pub enum RouterError {
#[fail(display = "Route already exists")]
#[error("Route already exists")]
RouteAlreadyExists,
#[fail(display = "Route not found")]
#[error("Route not found")]
RouteNotFound,
#[fail(display = "Value not found")]
#[error("Value not found")]
NoValue,
}

View file

@ -16,10 +16,10 @@ where
{
let raw = body::to_bytes(req.into_body())
.await
.map_err(|e| ErrorKind::RequestError(format!("Failed to read request: {}", e)))?;
.map_err(|e| Error::RequestError(format!("Failed to read request: {}", e)))?;
serde_json::from_reader(raw.bytes())
.map_err(|e| ErrorKind::RequestError(format!("Invalid request body: {}", e)).into())
.map_err(|e| Error::RequestError(format!("Invalid request body: {}", e)))
}
/// Convert Result to ResponseFuture
@ -29,16 +29,14 @@ where
{
match res {
Ok(s) => json_response_pretty(&s),
Err(e) => match e.kind() {
ErrorKind::Argument(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),
ErrorKind::RequestError(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),
ErrorKind::NotFound => response(StatusCode::NOT_FOUND, ""),
ErrorKind::Internal(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),
ErrorKind::ResponseError(msg) => {
response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone())
}
Err(e) => match e {
Error::Argument(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),
Error::RequestError(msg) => response(StatusCode::BAD_REQUEST, msg.clone()),
Error::NotFound => response(StatusCode::NOT_FOUND, ""),
Error::Internal(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),
Error::ResponseError(msg) => response(StatusCode::INTERNAL_SERVER_ERROR, msg.clone()),
// place holder
ErrorKind::Router(_) => response(StatusCode::INTERNAL_SERVER_ERROR, ""),
Error::Router { .. } => response(StatusCode::INTERNAL_SERVER_ERROR, ""),
},
}
}
@ -147,7 +145,7 @@ macro_rules! must_get_query(
($req: expr) =>(
match $req.uri().query() {
Some(q) => q,
None => return Err(ErrorKind::RequestError("no query string".to_owned()).into()),
None => return Err(Error::RequestError("no query string".to_owned())),
}
));
@ -158,7 +156,7 @@ macro_rules! parse_param(
None => $default,
Some(val) => match val.parse() {
Ok(val) => val,
Err(_) => return Err(ErrorKind::RequestError(format!("invalid value of parameter {}", $name)).into()),
Err(_) => return Err(Error::RequestError(format!("invalid value of parameter {}", $name))),
}
}
));

View file

@ -67,9 +67,6 @@ jobs:
CI_JOB: release
PLATFORM: macos
steps:
- script: |
brew uninstall llvm
displayName: macOS Uninstall LLVM
- template: '.ci/test.yml'
- template: '.ci/release.yml'
- job: windows

View file

@ -13,13 +13,12 @@ edition = "2018"
bit-vec = "0.6"
bitflags = "1"
byteorder = "1"
failure = "0.1"
failure_derive = "0.1"
croaring = "0.4.6"
enum_primitive = "0.1"
log = "0.4"
serde = "1"
serde_derive = "1"
thiserror = "1"
chrono = "0.4.11"
lru-cache = "0.1"
lazy_static = "1"

View file

@ -23,7 +23,7 @@ use crate::core::core::{
use crate::core::global;
use crate::core::pow;
use crate::core::ser::ProtocolVersion;
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::pipe;
use crate::store;
use crate::txhashset;
@ -376,11 +376,11 @@ impl Chain {
pub fn is_known(&self, header: &BlockHeader) -> Result<(), Error> {
let head = self.head()?;
if head.hash() == header.hash() {
return Err(ErrorKind::Unfit("duplicate block".into()).into());
return Err(Error::Unfit("duplicate block".into()));
}
if header.total_difficulty() <= head.total_difficulty {
if self.block_exists(header.hash())? {
return Err(ErrorKind::Unfit("duplicate block".into()).into());
return Err(Error::Unfit("duplicate block".into()));
}
}
Ok(())
@ -416,7 +416,7 @@ impl Chain {
},
);
Err(ErrorKind::Orphan.into())
Err(Error::Orphan)
}
/// Attempt to add a new block to the chain.
@ -681,7 +681,7 @@ impl Chain {
if tx.lock_height() <= height {
Ok(())
} else {
Err(ErrorKind::TxLockHeight.into())
Err(Error::TxLockHeight)
}
}
@ -1091,7 +1091,7 @@ impl Chain {
let fork_point = self.fork_point()?;
if !self.check_txhashset_needed(&fork_point)? {
warn!("txhashset_write: txhashset received but it's not needed! ignored.");
return Err(ErrorKind::InvalidTxHashSet("not needed".to_owned()).into());
return Err(Error::InvalidTxHashSet("not needed".to_owned()));
}
let header = match self.get_block_header(&h) {
@ -1369,10 +1369,9 @@ impl Chain {
let rangeproofs =
txhashset.rangeproofs_by_pmmr_index(start_index, max_count, max_pmmr_index);
if outputs.0 != rangeproofs.0 || outputs.1.len() != rangeproofs.1.len() {
return Err(ErrorKind::TxHashSetErr(String::from(
return Err(Error::TxHashSetErr(String::from(
"Output and rangeproof sets don't match",
))
.into());
)));
}
let mut output_vec: Vec<Output> = vec![];
for (ref x, &y) in outputs.1.iter().zip(rangeproofs.1.iter()) {
@ -1411,56 +1410,56 @@ impl Chain {
pub fn head(&self) -> Result<Tip, Error> {
self.store
.head()
.map_err(|e| ErrorKind::StoreErr(e, "chain head".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain head".to_owned()))
}
/// Tail of the block chain in this node after compact (cross-block cut-through)
pub fn tail(&self) -> Result<Tip, Error> {
self.store
.tail()
.map_err(|e| ErrorKind::StoreErr(e, "chain tail".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain tail".to_owned()))
}
/// Tip (head) of the header chain.
pub fn header_head(&self) -> Result<Tip, Error> {
self.store
.header_head()
.map_err(|e| ErrorKind::StoreErr(e, "header head".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "header head".to_owned()))
}
/// Block header for the chain head
pub fn head_header(&self) -> Result<BlockHeader, Error> {
self.store
.head_header()
.map_err(|e| ErrorKind::StoreErr(e, "chain head header".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain head header".to_owned()))
}
/// Gets a block by hash
pub fn get_block(&self, h: &Hash) -> Result<Block, Error> {
self.store
.get_block(h)
.map_err(|e| ErrorKind::StoreErr(e, "chain get block".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain get block".to_owned()))
}
/// Gets a block header by hash
pub fn get_block_header(&self, h: &Hash) -> Result<BlockHeader, Error> {
self.store
.get_block_header(h)
.map_err(|e| ErrorKind::StoreErr(e, "chain get header".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain get header".to_owned()))
}
/// Get previous block header.
pub fn get_previous_header(&self, header: &BlockHeader) -> Result<BlockHeader, Error> {
self.store
.get_previous_header(header)
.map_err(|e| ErrorKind::StoreErr(e, "chain get previous header".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain get previous header".to_owned()))
}
/// Get block_sums by header hash.
pub fn get_block_sums(&self, h: &Hash) -> Result<BlockSums, Error> {
self.store
.get_block_sums(h)
.map_err(|e| ErrorKind::StoreErr(e, "chain get block_sums".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain get block_sums".to_owned()))
}
/// Gets the block header at the provided height.
@ -1482,7 +1481,7 @@ impl Chain {
let txhashset = self.txhashset.read();
let (_, pos) = match txhashset.get_unspent(commit)? {
Some(o) => o,
None => return Err(ErrorKind::OutputNotFound.into()),
None => return Err(Error::OutputNotFound),
};
let hash = header_pmmr.get_header_hash_by_height(pos.height)?;
Ok(self.get_block_header(&hash)?)
@ -1584,13 +1583,13 @@ impl Chain {
fn is_on_current_chain<T: Into<Tip>>(&self, x: T, head: Tip) -> Result<(), Error> {
let x: Tip = x.into();
if x.height > head.height {
return Err(ErrorKind::Other("not on current chain".to_string()).into());
return Err(Error::Other("not on current chain".to_string()));
}
if x.hash() == self.get_header_hash_by_height(x.height)? {
Ok(())
} else {
Err(ErrorKind::Other("not on current chain".to_string()).into())
Err(Error::Other("not on current chain".to_string()))
}
}
@ -1624,7 +1623,7 @@ impl Chain {
pub fn block_exists(&self, h: Hash) -> Result<bool, Error> {
self.store
.block_exists(&h)
.map_err(|e| ErrorKind::StoreErr(e, "chain block exists".to_owned()).into())
.map_err(|e| Error::StoreErr(e, "chain block exists".to_owned()))
}
}
@ -1789,7 +1788,7 @@ fn setup_head(
info!("init: saved genesis: {:?}", genesis.hash());
}
Err(e) => return Err(ErrorKind::StoreErr(e, "chain init load head".to_owned()).into()),
Err(e) => return Err(Error::StoreErr(e, "chain init load head".to_owned())),
};
batch.commit()?;
Ok(())

View file

@ -19,293 +19,204 @@ use crate::core::ser;
use crate::keychain;
use crate::util::secp;
use crate::util::secp::pedersen::Commitment;
use failure::{Backtrace, Context, Fail};
use grin_store as store;
use std::fmt::{self, Display};
use std::io;
/// Error definition
#[derive(Debug, Fail)]
pub struct Error {
inner: Context<ErrorKind>,
}
/// Chain error definitions
#[derive(Clone, Eq, PartialEq, Debug, Fail)]
pub enum ErrorKind {
#[derive(Clone, Eq, PartialEq, Debug, thiserror::Error)]
pub enum Error {
/// The block doesn't fit anywhere in our chain
#[fail(display = "Block is unfit: {}", _0)]
#[error("Block is unfit: {0}")]
Unfit(String),
/// Special case of orphan blocks
#[fail(display = "Orphan")]
#[error("Orphan")]
Orphan,
/// Difficulty is too low either compared to ours or the block PoW hash
#[fail(display = "Difficulty is too low compared to ours or the block PoW hash")]
#[error("Difficulty is too low compared to ours or the block PoW hash")]
DifficultyTooLow,
/// Addition of difficulties on all previous block is wrong
#[fail(display = "Addition of difficulties on all previous blocks is wrong")]
#[error("Addition of difficulties on all previous blocks is wrong")]
WrongTotalDifficulty,
/// Block header edge_bits is lower than our min
#[fail(display = "Cuckoo Size too small")]
#[error("Cuckoo Size too small")]
LowEdgebits,
/// Scaling factor between primary and secondary PoW is invalid
#[fail(display = "Wrong scaling factor")]
#[error("Wrong scaling factor")]
InvalidScaling,
/// The proof of work is invalid
#[fail(display = "Invalid PoW")]
#[error("Invalid PoW")]
InvalidPow,
/// Peer abusively sending us an old block we already have
#[fail(display = "Old Block")]
#[error("Old Block")]
OldBlock,
/// The block doesn't sum correctly or a tx signature is invalid
#[fail(display = "Invalid Block Proof")]
InvalidBlockProof(block::Error),
#[error("Invalid Block Proof")]
InvalidBlockProof {
#[from]
/// Conversion
source: block::Error,
},
/// Block time is too old
#[fail(display = "Invalid Block Time")]
#[error("Invalid Block Time")]
InvalidBlockTime,
/// Block height is invalid (not previous + 1)
#[fail(display = "Invalid Block Height")]
#[error("Invalid Block Height")]
InvalidBlockHeight,
/// One of the root hashes in the block is invalid
#[fail(display = "Invalid Root")]
#[error("Invalid Root")]
InvalidRoot,
/// One of the MMR sizes in the block header is invalid
#[fail(display = "Invalid MMR Size")]
#[error("Invalid MMR Size")]
InvalidMMRSize,
/// Error from underlying keychain impl
#[fail(display = "Keychain Error")]
Keychain(keychain::Error),
#[error("Keychain Error")]
Keychain {
#[from]
/// Conversion
source: keychain::Error,
},
/// Error from underlying secp lib
#[fail(display = "Secp Lib Error")]
Secp(secp::Error),
#[error("Secp Lib Error")]
Secp {
#[from]
/// Conversion
source: secp::Error,
},
/// One of the inputs in the block has already been spent
#[fail(display = "Already Spent: {:?}", _0)]
#[error("Already Spent: {0:?}")]
AlreadySpent(Commitment),
/// An output with that commitment already exists (should be unique)
#[fail(display = "Duplicate Commitment: {:?}", _0)]
#[error("Duplicate Commitment: {0:?}")]
DuplicateCommitment(Commitment),
/// Attempt to spend a coinbase output before it sufficiently matures.
#[fail(display = "Attempt to spend immature coinbase")]
#[error("Attempt to spend immature coinbase")]
ImmatureCoinbase,
/// Error validating a Merkle proof (coinbase output)
#[fail(display = "Error validating merkle proof")]
#[error("Error validating merkle proof")]
MerkleProof,
/// Output not found
#[fail(display = "Output not found")]
#[error("Output not found")]
OutputNotFound,
/// Rangeproof not found
#[fail(display = "Rangeproof not found")]
#[error("Rangeproof not found")]
RangeproofNotFound,
/// Tx kernel not found
#[fail(display = "Tx kernel not found")]
#[error("Tx kernel not found")]
TxKernelNotFound,
/// output spent
#[fail(display = "Output is spent")]
#[error("Output is spent")]
OutputSpent,
/// Invalid block version, either a mistake or outdated software
#[fail(display = "Invalid Block Version: {:?}", _0)]
#[error("Invalid Block Version: {0:?}")]
InvalidBlockVersion(block::HeaderVersion),
/// We've been provided a bad txhashset
#[fail(display = "Invalid TxHashSet: {}", _0)]
#[error("Invalid TxHashSet: {0}")]
InvalidTxHashSet(String),
/// Internal issue when trying to save or load data from store
#[fail(display = "Store Error: {}, reason: {}", _1, _0)]
#[error("Store Error: {1}, reason: {0}")]
StoreErr(store::Error, String),
/// Internal issue when trying to save or load data from append only files
#[fail(display = "File Read Error: {}", _0)]
#[error("File Read Error: {0}")]
FileReadErr(String),
/// Error serializing or deserializing a type
#[fail(display = "Serialization Error")]
SerErr(ser::Error),
#[error("Serialization Error")]
SerErr {
#[from]
/// Conversion
source: ser::Error,
},
/// Error with the txhashset
#[fail(display = "TxHashSetErr: {}", _0)]
#[error("TxHashSetErr: {0}")]
TxHashSetErr(String),
/// Tx not valid based on lock_height.
#[fail(display = "Transaction Lock Height")]
#[error("Transaction Lock Height")]
TxLockHeight,
/// Tx is not valid due to NRD relative_height restriction.
#[fail(display = "NRD Relative Height")]
#[error("NRD Relative Height")]
NRDRelativeHeight,
/// No chain exists and genesis block is required
#[fail(display = "Genesis Block Required")]
#[error("Genesis Block Required")]
GenesisBlockRequired,
/// Error from underlying tx handling
#[fail(display = "Transaction Validation Error: {:?}", _0)]
Transaction(transaction::Error),
#[error("Transaction Validation Error: {source:?}")]
Transaction {
/// Conversion
#[from]
source: transaction::Error,
},
/// Error from underlying block handling
#[fail(display = "Block Validation Error: {:?}", _0)]
#[error("Block Validation Error: {0:?}")]
Block(block::Error),
/// Attempt to retrieve a header at a height greater than
/// the max allowed by u64 limits
#[fail(display = "Invalid Header Height: {}", _0)]
#[error("Invalid Header Height: {0:?}")]
InvalidHeaderHeight(u64),
/// Anything else
#[fail(display = "Other Error: {}", _0)]
#[error("Other Error: {0}")]
Other(String),
/// Error from summing and verifying kernel sums via committed trait.
#[fail(display = "Committed Trait: Error summing and verifying kernel sums")]
Committed(committed::Error),
#[error("Committed Trait: Error summing and verifying kernel sums")]
Committed {
#[from]
/// Conversion
source: committed::Error,
},
/// We cannot process data once the Grin server has been stopped.
#[fail(display = "Stopped (Grin Shutting Down)")]
#[error("Stopped (Grin Shutting Down)")]
Stopped,
/// Internal Roaring Bitmap error
#[fail(display = "Roaring Bitmap error")]
#[error("Roaring Bitmap error")]
Bitmap,
/// Error during chain sync
#[fail(display = "Sync error")]
#[error("Sync error")]
SyncError(String),
/// PIBD segment related error
#[fail(display = "Segment error")]
SegmentError(segment::SegmentError),
#[error("Segment error")]
SegmentError {
#[from]
/// Conversion
source: segment::SegmentError,
},
/// We've decided to halt the PIBD process due to lack of supporting peers or
/// otherwise failing to progress for a certain amount of time
#[fail(display = "Aborting PIBD error")]
#[error("Aborting PIBD error")]
AbortingPIBDError,
/// The segmenter is associated to a different block header
#[fail(display = "Segmenter header mismatch")]
#[error("Segmenter header mismatch")]
SegmenterHeaderMismatch,
/// Segment height not within allowed range
#[fail(display = "Invalid segment height")]
#[error("Invalid segment height")]
InvalidSegmentHeight,
/// Other issue with segment
#[fail(display = "Invalid segment: {}", _0)]
#[error("Invalid segment: {0}")]
InvalidSegment(String),
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let cause = match self.cause() {
Some(c) => format!("{}", c),
None => String::from("Unknown"),
};
let backtrace = match self.backtrace() {
Some(b) => format!("{}", b),
None => String::from("Unknown"),
};
let output = format!(
"{} \n Cause: {} \n Backtrace: {}",
self.inner, cause, backtrace
);
Display::fmt(&output, f)
}
}
impl Error {
/// get kind
pub fn kind(&self) -> ErrorKind {
self.inner.get_context().clone()
}
/// get cause
pub fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
/// get backtrace
pub fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
/// Whether the error is due to a block that was intrinsically wrong
pub fn is_bad_data(&self) -> bool {
// shorter to match on all the "not the block's fault" errors
match self.kind() {
ErrorKind::Unfit(_)
| ErrorKind::Orphan
| ErrorKind::StoreErr(_, _)
| ErrorKind::SerErr(_)
| ErrorKind::TxHashSetErr(_)
| ErrorKind::GenesisBlockRequired
| ErrorKind::Other(_) => false,
match self {
Error::Unfit(_)
| Error::Orphan
| Error::StoreErr(_, _)
| Error::SerErr { .. }
| Error::TxHashSetErr(_)
| Error::GenesisBlockRequired
| Error::Other(_) => false,
_ => true,
}
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
}
}
}
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner: inner }
}
}
impl From<block::Error> for Error {
fn from(error: block::Error) -> Error {
let ec = error.clone();
Error {
inner: error.context(ErrorKind::InvalidBlockProof(ec)),
}
}
}
impl From<store::Error> for Error {
fn from(error: store::Error) -> Error {
let ec = error.clone();
Error {
//inner: error.context();Context::new(ErrorKind::StoreErr(error.clone(),
// format!("{:?}", error))),
inner: error.context(ErrorKind::StoreErr(ec.clone(), format!("{:?}", ec))),
}
}
}
impl From<keychain::Error> for Error {
fn from(error: keychain::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Keychain(error)),
}
}
}
impl From<transaction::Error> for Error {
fn from(error: transaction::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Transaction(error)),
}
}
}
impl From<committed::Error> for Error {
fn from(error: committed::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Committed(error)),
}
Error::StoreErr(error.clone(), format!("{:?}", error))
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error {
inner: Context::new(ErrorKind::TxHashSetErr(e.to_string())),
}
}
}
impl From<ser::Error> for Error {
fn from(error: ser::Error) -> Error {
Error {
inner: Context::new(ErrorKind::SerErr(error)),
}
}
}
impl From<segment::SegmentError> for Error {
fn from(error: segment::SegmentError) -> Error {
Error {
inner: Context::new(ErrorKind::SegmentError(error)),
}
}
}
impl From<secp::Error> for Error {
fn from(e: secp::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Secp(e)),
}
Error::TxHashSetErr(e.to_string())
}
}

View file

@ -47,7 +47,7 @@ pub mod types;
// Re-export the base interface
pub use crate::chain::{Chain, MAX_ORPHAN_SIZE};
pub use crate::error::{Error, ErrorKind};
pub use crate::error::Error;
pub use crate::store::ChainStore;
pub use crate::types::{
BlockStatus, ChainAdapter, Options, SyncState, SyncStatus, Tip, TxHashsetDownloadStats,

View file

@ -22,7 +22,7 @@ use crate::core::core::{
};
use crate::core::global;
use crate::core::pow;
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::store;
use crate::txhashset;
use crate::types::{CommitPos, Options, Tip};
@ -63,14 +63,14 @@ fn validate_pow_only(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result
return Ok(());
}
if !header.pow.is_primary() && !header.pow.is_secondary() {
return Err(ErrorKind::LowEdgebits.into());
return Err(Error::LowEdgebits);
}
if (ctx.pow_verifier)(header).is_err() {
error!(
"pipe: error validating header with cuckoo edge_bits {}",
header.pow.edge_bits(),
);
return Err(ErrorKind::InvalidPow.into());
return Err(Error::InvalidPow);
}
Ok(())
}
@ -289,7 +289,7 @@ pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) ->
fn check_known_head(header: &BlockHeader, head: &Tip) -> Result<(), Error> {
let bh = header.hash();
if bh == head.last_block_h || bh == head.prev_block_h {
return Err(ErrorKind::Unfit("already known in head".to_string()).into());
return Err(Error::Unfit("already known in head".to_string()));
}
Ok(())
}
@ -306,16 +306,16 @@ fn check_known_store(
// TODO - we flag this as an "abusive peer" but only in the case
// where we have the full block in our store.
// So this is not a particularly exhaustive check.
Err(ErrorKind::OldBlock.into())
Err(Error::OldBlock)
} else {
Err(ErrorKind::Unfit("already known in store".to_string()).into())
Err(Error::Unfit("already known in store".to_string()))
}
}
Ok(false) => {
// Not yet processed this block, we can proceed.
Ok(())
}
Err(e) => Err(ErrorKind::StoreErr(e, "pipe get this block".to_owned()).into()),
Err(e) => Err(Error::StoreErr(e, "pipe get this block".to_owned())),
}
}
@ -352,7 +352,9 @@ pub fn validate_header_denylist(header: &BlockHeader, denylist: &[Hash]) -> Resu
);
if denylist.contains(&header.hash()) {
return Err(ErrorKind::Block(block::Error::Other("header hash denied".into())).into());
return Err(Error::Block(block::Error::Other(
"header hash denied".into(),
)));
} else {
return Ok(());
}
@ -370,18 +372,18 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
// This header height must increase the height from the previous header by exactly 1.
if header.height != prev.height + 1 {
return Err(ErrorKind::InvalidBlockHeight.into());
return Err(Error::InvalidBlockHeight);
}
// This header must have a valid header version for its height.
if !consensus::valid_header_version(header.height, header.version) {
return Err(ErrorKind::InvalidBlockVersion(header.version).into());
return Err(Error::InvalidBlockVersion(header.version));
}
if header.timestamp <= prev.timestamp {
// prevent time warp attacks and some timestamp manipulations by forcing strict
// time progression
return Err(ErrorKind::InvalidBlockTime.into());
return Err(Error::InvalidBlockTime);
}
// We can determine output and kernel counts for this block based on mmr sizes from previous header.
@ -395,13 +397,13 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
// Each block must contain at least 1 kernel and 1 output for the block reward.
if num_outputs == 0 || num_kernels == 0 {
return Err(ErrorKind::InvalidMMRSize.into());
return Err(Error::InvalidMMRSize);
}
// Block header is invalid (and block is invalid) if this lower bound is too heavy for a full block.
let weight = TransactionBody::weight_by_iok(0, num_outputs, num_kernels);
if weight > global::max_block_weight() {
return Err(ErrorKind::Block(block::Error::TooHeavy).into());
return Err(Error::Block(block::Error::TooHeavy));
}
// verify the proof of work and related parameters
@ -416,13 +418,13 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
validate_pow_only(header, ctx)?;
if header.total_difficulty() <= prev.total_difficulty() {
return Err(ErrorKind::DifficultyTooLow.into());
return Err(Error::DifficultyTooLow);
}
let target_difficulty = header.total_difficulty() - prev.total_difficulty();
if header.pow.to_difficulty(header.height) < target_difficulty {
return Err(ErrorKind::DifficultyTooLow.into());
return Err(Error::DifficultyTooLow);
}
// explicit check to ensure total_difficulty has increased by exactly
@ -437,7 +439,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
target_difficulty.to_num(),
next_header_info.difficulty.to_num()
);
return Err(ErrorKind::WrongTotalDifficulty.into());
return Err(Error::WrongTotalDifficulty);
}
// check the secondary PoW scaling factor if applicable
if header.version < HeaderVersion(5)
@ -447,7 +449,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
"validate_header: header secondary scaling {} != {}",
header.pow.secondary_scaling, next_header_info.secondary_scaling
);
return Err(ErrorKind::InvalidScaling.into());
return Err(Error::InvalidScaling);
}
}
@ -456,9 +458,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
fn validate_block(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let prev = ctx.batch.get_previous_header(&block.header)?;
block
.validate(&prev.total_kernel_offset)
.map_err(ErrorKind::InvalidBlockProof)?;
block.validate(&prev.total_kernel_offset)?;
Ok(())
}
@ -530,7 +530,7 @@ fn update_body_tail(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Er
let tip = Tip::from_header(bh);
batch
.save_body_tail(&tip)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save body tail".to_owned()))?;
.map_err(|e| Error::StoreErr(e, "pipe save body tail".to_owned()))?;
debug!("body tail {} @ {}", bh.hash(), bh.height);
Ok(())
}
@ -539,14 +539,14 @@ fn update_body_tail(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Er
fn add_block_header(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_block_header(bh)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header".to_owned()))?;
.map_err(|e| Error::StoreErr(e, "pipe save header".to_owned()))?;
Ok(())
}
fn update_header_head(head: &Tip, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_header_head(&head)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header head".to_owned()))?;
.map_err(|e| Error::StoreErr(e, "pipe save header head".to_owned()))?;
debug!(
"header head updated to {} at {}",
@ -559,7 +559,7 @@ fn update_header_head(head: &Tip, batch: &store::Batch<'_>) -> Result<(), Error>
fn update_head(head: &Tip, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_body_head(&head)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save body".to_owned()))?;
.map_err(|e| Error::StoreErr(e, "pipe save body".to_owned()))?;
debug!("head updated to {} at {}", head.last_block_h, head.height);
@ -595,7 +595,7 @@ pub fn rewind_and_apply_header_fork(
for h in fork_hashes {
let header = batch
.get_block_header(&h)
.map_err(|e| ErrorKind::StoreErr(e, "getting forked headers".to_string()))?;
.map_err(|e| Error::StoreErr(e, "getting forked headers".to_string()))?;
// Re-validate every header being re-applied.
// This makes it possible to check all header hashes against the ctx specific "denylist".
@ -646,7 +646,7 @@ pub fn rewind_and_apply_fork(
for h in fork_hashes {
let fb = batch
.get_block(&h)
.map_err(|e| ErrorKind::StoreErr(e, "getting forked blocks".to_string()))?;
.map_err(|e| Error::StoreErr(e, "getting forked blocks".to_string()))?;
// Re-verify coinbase maturity along this fork.
verify_coinbase_maturity(&fb, ext, batch)?;

View file

@ -23,7 +23,7 @@ use crate::core::core::hash::{DefaultHashable, Hash};
use crate::core::core::pmmr::segment::{Segment, SegmentIdentifier, SegmentProof};
use crate::core::core::pmmr::{self, Backend, ReadablePMMR, ReadonlyPMMR, VecBackend, PMMR};
use crate::core::ser::{self, PMMRable, Readable, Reader, Writeable, Writer};
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use enum_primitive::FromPrimitive;
/// The "bitmap accumulator" allows us to commit to a specific bitmap by splitting it into
@ -156,7 +156,7 @@ impl BitmapAccumulator {
let mut pmmr = PMMR::at(&mut self.backend, last_pos);
let rewind_pos = pmmr::insertion_to_pmmr_index(chunk_idx);
pmmr.rewind(rewind_pos, &Bitmap::create())
.map_err(ErrorKind::Other)?;
.map_err(Error::Other)?;
Ok(())
}
@ -178,7 +178,7 @@ impl BitmapAccumulator {
let last_pos = self.backend.size();
PMMR::at(&mut self.backend, last_pos)
.push(&chunk)
.map_err(|e| ErrorKind::Other(e).into())
.map_err(Error::Other)
}
/// The root hash of the bitmap accumulator MMR.

View file

@ -16,7 +16,7 @@
use crate::core::core::pmmr::{ReadablePMMR, ReadonlyPMMR, RewindablePMMR};
use crate::core::core::{BlockHeader, TxKernel};
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use grin_store::pmmr::PMMRBackend;
/// Rewindable (but readonly) view of the kernel set (based on kernel MMR).
@ -40,7 +40,7 @@ impl<'a> RewindableKernelView<'a> {
pub fn rewind(&mut self, header: &BlockHeader) -> Result<(), Error> {
self.pmmr
.rewind(header.kernel_mmr_size)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
// Update our header to reflect the one we rewound to.
self.header = header.clone();
@ -57,9 +57,9 @@ impl<'a> RewindableKernelView<'a> {
let root = self
.readonly_pmmr()
.root()
.map_err(|_| ErrorKind::InvalidRoot)?;
.map_err(|_| Error::InvalidRoot)?;
if root != self.header.kernel_root {
return Err(ErrorKind::InvalidTxHashSet(format!(
return Err(Error::InvalidTxHashSet(format!(
"Kernel root at {} does not match",
self.header.height
))

View file

@ -19,7 +19,7 @@ use std::{sync::Arc, time::Instant};
use crate::core::core::hash::Hash;
use crate::core::core::pmmr::ReadablePMMR;
use crate::core::core::{BlockHeader, OutputIdentifier, Segment, SegmentIdentifier, TxKernel};
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::txhashset::{BitmapAccumulator, BitmapChunk, TxHashSet};
use crate::util::secp::pedersen::RangeProof;
use crate::util::RwLock;
@ -74,14 +74,14 @@ impl Segmenter {
fn output_root(&self) -> Result<Hash, Error> {
let txhashset = self.txhashset.read();
let pmmr = txhashset.output_pmmr_at(&self.header);
let root = pmmr.root().map_err(&ErrorKind::TxHashSetErr)?;
let root = pmmr.root().map_err(&Error::TxHashSetErr)?;
Ok(root)
}
/// The root of the bitmap snapshot PMMR.
fn bitmap_root(&self) -> Result<Hash, Error> {
let pmmr = self.bitmap_snapshot.readonly_pmmr();
let root = pmmr.root().map_err(&ErrorKind::TxHashSetErr)?;
let root = pmmr.root().map_err(&Error::TxHashSetErr)?;
Ok(root)
}

View file

@ -27,7 +27,7 @@ use crate::core::core::{
};
use crate::core::global;
use crate::core::ser::{PMMRable, ProtocolVersion};
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::linked_list::{ListIndex, PruneableListIndex, RewindableListIndex};
use crate::store::{self, Batch, ChainStore};
use crate::txhashset::bitmap_accumulator::{BitmapAccumulator, BitmapChunk};
@ -140,7 +140,7 @@ impl PMMRHandle<BlockHeader> {
head.hash(),
head.height
);
return Err(ErrorKind::Other("header PMMR inconsistent".to_string()).into());
return Err(Error::Other("header PMMR inconsistent".to_string()));
}
// use next header pos to find our size.
@ -165,14 +165,14 @@ impl PMMRHandle<BlockHeader> {
/// Get the header hash at the specified height based on the current header MMR state.
pub fn get_header_hash_by_height(&self, height: u64) -> Result<Hash, Error> {
if height >= self.size {
return Err(ErrorKind::InvalidHeaderHeight(height).into());
return Err(Error::InvalidHeaderHeight(height));
}
let pos = pmmr::insertion_to_pmmr_index(height);
let header_pmmr = ReadonlyPMMR::at(&self.backend, self.size);
if let Some(entry) = header_pmmr.get_data(pos) {
Ok(entry.hash())
} else {
Err(ErrorKind::Other("get header hash by height".to_string()).into())
Err(Error::Other("get header hash by height".to_string()))
}
}
@ -180,14 +180,14 @@ impl PMMRHandle<BlockHeader> {
/// Find the last leaf pos based on MMR size and return its header hash.
pub fn head_hash(&self) -> Result<Hash, Error> {
if self.size == 0 {
return Err(ErrorKind::Other("MMR empty, no head".to_string()).into());
return Err(Error::Other("MMR empty, no head".to_string()));
}
let header_pmmr = ReadonlyPMMR::at(&self.backend, self.size);
let leaf_pos = pmmr::bintree_rightmost(self.size - 1);
if let Some(entry) = header_pmmr.get_data(leaf_pos) {
Ok(entry.hash())
} else {
Err(ErrorKind::Other("failed to find head hash".to_string()).into())
Err(Error::Other("failed to find head hash".to_string()))
}
}
@ -314,7 +314,9 @@ impl TxHashSet {
commit_index,
})
} else {
Err(ErrorKind::TxHashSetErr("failed to open kernel PMMR".to_string()).into())
Err(Error::TxHashSetErr(
"failed to open kernel PMMR".to_string(),
))
}
}
@ -358,7 +360,7 @@ impl TxHashSet {
}
}
Ok(None) => Ok(None),
Err(e) => Err(ErrorKind::StoreErr(e, "txhashset unspent check".to_string()).into()),
Err(e) => Err(Error::StoreErr(e, "txhashset unspent check".to_string())),
}
}
@ -502,7 +504,7 @@ impl TxHashSet {
let pos0 = self.commit_index.get_output_pos(&commit)?;
PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.size)
.merkle_proof(pos0)
.map_err(|_| ErrorKind::MerkleProof.into())
.map_err(|_| Error::MerkleProof)
}
/// Compact the MMR data files and flush the rm logs
@ -1031,7 +1033,7 @@ impl<'a> HeaderExtension<'a> {
if let Some(hash) = self.get_header_hash_by_height(height) {
Ok(batch.get_block_header(&hash)?)
} else {
Err(ErrorKind::Other("get header by height".to_string()).into())
Err(Error::Other("get header by height".to_string()))
}
}
@ -1059,7 +1061,7 @@ impl<'a> HeaderExtension<'a> {
/// This may be either the header MMR or the sync MMR depending on the
/// extension.
pub fn apply_header(&mut self, header: &BlockHeader) -> Result<(), Error> {
self.pmmr.push(header).map_err(&ErrorKind::TxHashSetErr)?;
self.pmmr.push(header).map_err(&Error::TxHashSetErr)?;
self.head = Tip::from_header(header);
Ok(())
}
@ -1078,7 +1080,7 @@ impl<'a> HeaderExtension<'a> {
let header_pos = 1 + pmmr::insertion_to_pmmr_index(header.height);
self.pmmr
.rewind(header_pos, &Bitmap::create())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
// Update our head to reflect the header we rewound to.
self.head = Tip::from_header(header);
@ -1093,7 +1095,7 @@ impl<'a> HeaderExtension<'a> {
/// The root of the header MMR for convenience.
pub fn root(&self) -> Result<Hash, Error> {
Ok(self.pmmr.root().map_err(|_| ErrorKind::InvalidRoot)?)
Ok(self.pmmr.root().map_err(|_| Error::InvalidRoot)?)
}
/// Validate the prev_root of the header against the root of the current header MMR.
@ -1104,7 +1106,7 @@ impl<'a> HeaderExtension<'a> {
return Ok(());
}
if self.root()? != header.prev_root {
Err(ErrorKind::InvalidRoot.into())
Err(Error::InvalidRoot)
} else {
Ok(())
}
@ -1313,11 +1315,11 @@ impl<'a> Extension<'a> {
Ok(true) => {
self.rproof_pmmr
.prune(pos.pos - 1)
.map_err(ErrorKind::TxHashSetErr)?;
.map_err(Error::TxHashSetErr)?;
Ok(())
}
Ok(false) => Err(ErrorKind::AlreadySpent(commit).into()),
Err(e) => Err(ErrorKind::TxHashSetErr(e).into()),
Ok(false) => Err(Error::AlreadySpent(commit)),
Err(e) => Err(Error::TxHashSetErr(e)),
}
}
@ -1327,7 +1329,7 @@ impl<'a> Extension<'a> {
if let Ok(pos0) = batch.get_output_pos(&commit) {
if let Some(out_mmr) = self.output_pmmr.get_data(pos0) {
if out_mmr.commitment() == commit {
return Err(ErrorKind::DuplicateCommitment(commit).into());
return Err(Error::DuplicateCommitment(commit));
}
}
}
@ -1335,27 +1337,27 @@ impl<'a> Extension<'a> {
let output_pos = self
.output_pmmr
.push(&out.identifier())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
// push the rangeproof to the MMR.
let rproof_pos = self
.rproof_pmmr
.push(&out.proof())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
// The output and rproof MMRs should be exactly the same size
// and we should have inserted to both in exactly the same pos.
{
if self.output_pmmr.unpruned_size() != self.rproof_pmmr.unpruned_size() {
return Err(
ErrorKind::Other("output vs rproof MMRs different sizes".to_string()).into(),
);
return Err(Error::Other(
"output vs rproof MMRs different sizes".to_string(),
));
}
if output_pos != rproof_pos {
return Err(
ErrorKind::Other("output vs rproof MMRs different pos".to_string()).into(),
);
return Err(Error::Other(
"output vs rproof MMRs different pos".to_string(),
));
}
}
Ok(1 + output_pos)
@ -1419,18 +1421,18 @@ impl<'a> Extension<'a> {
// Roll back the genesis output
self.output_pmmr
.rewind(0, &Bitmap::create())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
self.output_pmmr
.push_pruned_subtree(hashes[idx], pos0)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
}
OrderedHashLeafNode::Leaf(idx, pos0) => {
if pos0 == self.output_pmmr.size {
self.output_pmmr
.push(&leaf_data[idx])
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
let pmmr_index = pmmr::pmmr_leaf_to_insertion_index(pos0);
match pmmr_index {
@ -1463,18 +1465,18 @@ impl<'a> Extension<'a> {
// Roll back the genesis output
self.rproof_pmmr
.rewind(0, &Bitmap::create())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
self.rproof_pmmr
.push_pruned_subtree(hashes[idx], pos0)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
}
OrderedHashLeafNode::Leaf(idx, pos0) => {
if pos0 == self.rproof_pmmr.size {
self.rproof_pmmr
.push(&leaf_data[idx])
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
let pmmr_index = pmmr::pmmr_leaf_to_insertion_index(pos0);
match pmmr_index {
@ -1517,7 +1519,7 @@ impl<'a> Extension<'a> {
for insert in self.sort_pmmr_hashes_and_leaves(vec![], leaf_pos, Some(0)) {
match insert {
OrderedHashLeafNode::Hash(_, _) => {
return Err(ErrorKind::InvalidSegment(
return Err(Error::InvalidSegment(
"Kernel PMMR is non-prunable, should not have hash data".to_string(),
)
.into());
@ -1526,7 +1528,7 @@ impl<'a> Extension<'a> {
if pos0 == self.kernel_pmmr.size {
self.kernel_pmmr
.push(&leaf_data[idx])
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
}
}
}
@ -1539,7 +1541,7 @@ impl<'a> Extension<'a> {
let pos = self
.kernel_pmmr
.push(kernel)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
Ok(1 + pos)
}
@ -1560,7 +1562,7 @@ impl<'a> Extension<'a> {
let merkle_proof = self
.output_pmmr
.merkle_proof(pos0)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
Ok(merkle_proof)
}
@ -1572,12 +1574,8 @@ impl<'a> Extension<'a> {
/// across).
pub fn snapshot(&mut self, batch: &Batch<'_>) -> Result<(), Error> {
let header = batch.get_block_header(&self.head.last_block_h)?;
self.output_pmmr
.snapshot(&header)
.map_err(ErrorKind::Other)?;
self.rproof_pmmr
.snapshot(&header)
.map_err(ErrorKind::Other)?;
self.output_pmmr.snapshot(&header).map_err(Error::Other)?;
self.rproof_pmmr.snapshot(&header).map_err(Error::Other)?;
Ok(())
}
@ -1712,13 +1710,13 @@ impl<'a> Extension<'a> {
let bitmap: Bitmap = spent_pos.iter().map(|x| *x as u32).collect();
self.output_pmmr
.rewind(output_pos, &bitmap)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
self.rproof_pmmr
.rewind(output_pos, &bitmap)
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
self.kernel_pmmr
.rewind(kernel_pos, &Bitmap::create())
.map_err(&ErrorKind::TxHashSetErr)?;
.map_err(&Error::TxHashSetErr)?;
Ok(())
}
@ -1727,20 +1725,11 @@ impl<'a> Extension<'a> {
pub fn roots(&self) -> Result<TxHashSetRoots, Error> {
Ok(TxHashSetRoots {
output_roots: OutputRoots {
pmmr_root: self
.output_pmmr
.root()
.map_err(|_| ErrorKind::InvalidRoot)?,
pmmr_root: self.output_pmmr.root().map_err(|_| Error::InvalidRoot)?,
bitmap_root: self.bitmap_accumulator.root(),
},
rproof_root: self
.rproof_pmmr
.root()
.map_err(|_| ErrorKind::InvalidRoot)?,
kernel_root: self
.kernel_pmmr
.root()
.map_err(|_| ErrorKind::InvalidRoot)?,
rproof_root: self.rproof_pmmr.root().map_err(|_| Error::InvalidRoot)?,
kernel_root: self.kernel_pmmr.root().map_err(|_| Error::InvalidRoot)?,
})
}
@ -1763,7 +1752,7 @@ impl<'a> Extension<'a> {
header.kernel_mmr_size,
) != self.sizes()
{
Err(ErrorKind::InvalidMMRSize.into())
Err(Error::InvalidMMRSize)
} else {
Ok(())
}
@ -1774,13 +1763,13 @@ impl<'a> Extension<'a> {
// validate all hashes and sums within the trees
if let Err(e) = self.output_pmmr.validate() {
return Err(ErrorKind::InvalidTxHashSet(e).into());
return Err(Error::InvalidTxHashSet(e));
}
if let Err(e) = self.rproof_pmmr.validate() {
return Err(ErrorKind::InvalidTxHashSet(e).into());
return Err(Error::InvalidTxHashSet(e));
}
if let Err(e) = self.kernel_pmmr.validate() {
return Err(ErrorKind::InvalidTxHashSet(e).into());
return Err(Error::InvalidTxHashSet(e));
}
debug!(
@ -1856,7 +1845,7 @@ impl<'a> Extension<'a> {
)?;
if let Some(ref s) = stop_state {
if s.is_stopped() {
return Err(ErrorKind::Stopped.into());
return Err(Error::Stopped.into());
}
}
@ -1864,7 +1853,7 @@ impl<'a> Extension<'a> {
self.verify_kernel_signatures(status, stop_state.clone())?;
if let Some(ref s) = stop_state {
if s.is_stopped() {
return Err(ErrorKind::Stopped.into());
return Err(Error::Stopped.into());
}
}
}
@ -1925,7 +1914,7 @@ impl<'a> Extension<'a> {
let kernel = self
.kernel_pmmr
.get_data(n)
.ok_or_else(|| ErrorKind::TxKernelNotFound)?;
.ok_or_else(|| Error::TxKernelNotFound)?;
tx_kernels.push(kernel);
}
@ -1992,8 +1981,8 @@ impl<'a> Extension<'a> {
// Output and corresponding rangeproof *must* exist.
// It is invalid for either to be missing and we fail immediately in this case.
match (output, proof) {
(None, _) => return Err(ErrorKind::OutputNotFound.into()),
(_, None) => return Err(ErrorKind::RangeproofNotFound.into()),
(None, _) => return Err(Error::OutputNotFound),
(_, None) => return Err(Error::RangeproofNotFound),
(Some(output), Some(proof)) => {
commits.push(output.commit);
proofs.push(proof);
@ -2179,7 +2168,7 @@ pub fn txhashset_replace(from: PathBuf, to: PathBuf) -> Result<(), Error> {
// rename the 'from' folder as the 'to' folder
if let Err(e) = fs::rename(from.join(TXHASHSET_SUBDIR), to.join(TXHASHSET_SUBDIR)) {
error!("hashset_replace fail on {}. err: {}", TXHASHSET_SUBDIR, e);
Err(ErrorKind::TxHashSetErr("txhashset replacing fail".to_string()).into())
Err(Error::TxHashSetErr("txhashset replacing fail".to_string()))
} else {
Ok(())
}
@ -2237,7 +2226,7 @@ fn apply_kernel_rules(kernel: &TxKernel, pos: CommitPos, batch: &Batch<'_>) -> R
pos.height, prev, relative_height
);
if diff < relative_height.into() {
return Err(ErrorKind::NRDRelativeHeight.into());
return Err(Error::NRDRelativeHeight);
}
}
debug!(

View file

@ -18,7 +18,7 @@ use crate::core::core::hash::{Hash, Hashed};
use crate::core::core::pmmr::{self, ReadablePMMR, ReadonlyPMMR};
use crate::core::core::{Block, BlockHeader, Inputs, Output, OutputIdentifier, Transaction};
use crate::core::global;
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::store::Batch;
use crate::types::CommitPos;
use crate::util::secp::pedersen::{Commitment, RangeProof};
@ -104,7 +104,7 @@ impl<'a> UTXOView<'a> {
Ok((out, pos))
} else {
error!("input mismatch: {:?}, {:?}, {:?}", out, pos, input);
Err(ErrorKind::Other("input mismatch".into()).into())
Err(Error::Other("input mismatch".into()))
}
})
})
@ -129,14 +129,13 @@ impl<'a> UTXOView<'a> {
return Ok((out, pos1));
} else {
error!("input mismatch: {:?}, {:?}, {:?}", out, pos1, input);
return Err(ErrorKind::Other(
return Err(Error::Other(
"input mismatch (output_pos index mismatch?)".into(),
)
.into());
));
}
}
}
Err(ErrorKind::AlreadySpent(input).into())
Err(Error::AlreadySpent(input))
}
// Output is valid if it would not result in a duplicate commitment in the output MMR.
@ -144,7 +143,7 @@ impl<'a> UTXOView<'a> {
if let Ok(pos0) = batch.get_output_pos(&output.commitment()) {
if let Some(out_mmr) = self.output_pmmr.get_data(pos0) {
if out_mmr.commitment() == output.commitment() {
return Err(ErrorKind::DuplicateCommitment(output.commitment()).into());
return Err(Error::DuplicateCommitment(output.commitment()));
}
}
}
@ -156,9 +155,9 @@ impl<'a> UTXOView<'a> {
match self.output_pmmr.get_data(pos0) {
Some(output_id) => match self.rproof_pmmr.get_data(pos0) {
Some(rproof) => Ok(output_id.into_output(rproof)),
None => Err(ErrorKind::RangeproofNotFound.into()),
None => Err(Error::RangeproofNotFound),
},
None => Err(ErrorKind::OutputNotFound.into()),
None => Err(Error::OutputNotFound),
}
}
@ -194,7 +193,7 @@ impl<'a> UTXOView<'a> {
// If we have not yet reached 1440 blocks then
// we can fail immediately as coinbase cannot be mature.
if height < global::coinbase_maturity() {
return Err(ErrorKind::ImmatureCoinbase.into());
return Err(Error::ImmatureCoinbase);
}
// Find the "cutoff" pos in the output MMR based on the
@ -206,7 +205,7 @@ impl<'a> UTXOView<'a> {
// If any output pos exceed the cutoff_pos
// we know they have not yet sufficiently matured.
if pos > cutoff_pos {
return Err(ErrorKind::ImmatureCoinbase.into());
return Err(Error::ImmatureCoinbase);
}
}
@ -231,7 +230,7 @@ impl<'a> UTXOView<'a> {
let header = batch.get_block_header(&hash)?;
Ok(header)
} else {
Err(ErrorKind::Other("get header by height".to_string()).into())
Err(Error::Other("get header by height".to_string()))
}
}
}

View file

@ -21,7 +21,7 @@ use crate::core::core::hash::{Hash, Hashed, ZERO_HASH};
use crate::core::core::{pmmr, Block, BlockHeader, HeaderVersion, SegmentTypeIdentifier};
use crate::core::pow::Difficulty;
use crate::core::ser::{self, PMMRIndexHashable, Readable, Reader, Writeable, Writer};
use crate::error::{Error, ErrorKind};
use crate::error::Error;
use crate::util::{RwLock, RwLockWriteGuard};
bitflags! {
@ -400,7 +400,7 @@ impl TxHashSetRoots {
|| header.range_proof_root != self.rproof_root
|| header.kernel_root != self.kernel_root
{
Err(ErrorKind::InvalidRoot.into())
Err(Error::InvalidRoot)
} else {
Ok(())
}

View file

@ -954,10 +954,12 @@ fn test_overflow_cached_rangeproof() {
let res = chain.process_block(next, chain::Options::SKIP_POW);
assert_eq!(
res.unwrap_err().kind(),
chain::ErrorKind::InvalidBlockProof(block::Error::Transaction(
transaction::Error::Secp(util::secp::Error::InvalidRangeProof)
))
res.unwrap_err(),
chain::Error::InvalidBlockProof {
source: block::Error::Transaction(transaction::Error::Secp(
util::secp::Error::InvalidRangeProof
))
}
);
}
clean_output_dir(".grin_overflow");

View file

@ -136,8 +136,8 @@ fn process_block_cut_through() -> Result<(), chain::Error> {
// Transaction will not validate against the chain (utxo).
assert_eq!(
chain.validate_tx(&tx).map_err(|e| e.kind()),
Err(chain::ErrorKind::DuplicateCommitment(commit)),
chain.validate_tx(&tx),
Err(chain::Error::DuplicateCommitment(commit)),
);
// Build a block with this single invalid transaction.
@ -166,12 +166,12 @@ fn process_block_cut_through() -> Result<(), chain::Error> {
let batch = store.batch()?;
let mut ctx = chain.new_ctx(Options::NONE, batch, &mut header_pmmr, &mut txhashset)?;
let res = pipe::process_block(&block, &mut ctx).map_err(|e| e.kind());
let res = pipe::process_block(&block, &mut ctx);
assert_eq!(
res,
Err(chain::ErrorKind::InvalidBlockProof(
block::Error::Transaction(transaction::Error::CutThrough)
))
Err(chain::Error::InvalidBlockProof {
source: block::Error::Transaction(transaction::Error::CutThrough)
})
);
}

View file

@ -14,7 +14,7 @@
mod chain_test_helper;
use self::chain_test_helper::{clean_output_dir, init_chain, mine_chain};
use chain::ErrorKind;
use chain::Error;
use chain::Tip;
use grin_chain as chain;
use grin_core::core::hash::Hashed;
@ -42,8 +42,8 @@ fn check_known() {
let chain = init_chain(chain_dir, genesis.clone());
let res = chain.process_block(latest.clone(), chain::Options::NONE);
assert_eq!(
res.unwrap_err().kind(),
ErrorKind::Unfit("duplicate block".to_string()).into()
res.unwrap_err(),
Error::Unfit("duplicate block".to_string())
);
}
@ -52,8 +52,8 @@ fn check_known() {
let chain = init_chain(chain_dir, genesis.clone());
let res = chain.process_block(genesis.clone(), chain::Options::NONE);
assert_eq!(
res.unwrap_err().kind(),
ErrorKind::Unfit("duplicate block".to_string()).into()
res.unwrap_err(),
Error::Unfit("duplicate block".to_string())
);
}

View file

@ -13,7 +13,7 @@
// limitations under the License.
use self::chain::types::NoopAdapter;
use self::chain::ErrorKind;
use self::chain::Error;
use self::core::core::KernelFeatures;
use self::core::global::{self, ChainTypes};
use self::core::libtx::{self, build, ProofBuilder};
@ -121,8 +121,8 @@ fn test_coinbase_maturity() {
// is not valid at the current block height given the current chain state.
match chain.verify_coinbase_maturity(&coinbase_txn.inputs()) {
Ok(_) => {}
Err(e) => match e.kind() {
ErrorKind::ImmatureCoinbase => {}
Err(e) => match e {
Error::ImmatureCoinbase => {}
_ => panic!("Expected transaction error with immature coinbase."),
},
}
@ -207,8 +207,8 @@ fn test_coinbase_maturity() {
// is not valid at the current block height given the current chain state.
match chain.verify_coinbase_maturity(&coinbase_txn.inputs()) {
Ok(_) => {}
Err(e) => match e.kind() {
ErrorKind::ImmatureCoinbase => {}
Err(e) => match e {
Error::ImmatureCoinbase => {}
_ => panic!("Expected transaction error with immature coinbase."),
},
}

View file

@ -19,7 +19,7 @@ use grin_keychain as keychain;
mod chain_test_helper;
use self::chain_test_helper::{clean_output_dir, mine_chain};
use crate::chain::{Chain, ErrorKind, Options};
use crate::chain::{Chain, Error, Options};
use crate::core::{
consensus,
core::{block, Block},
@ -71,12 +71,10 @@ fn test_header_weight_validation() {
// Note: We will validate this even if just processing the header.
header.output_mmr_size = 1_000;
let res = chain
.process_block_header(&header, Options::NONE)
.map_err(|e| e.kind());
let res = chain.process_block_header(&header, Options::NONE);
// Weight validation is done via transaction body and results in a slightly counter-intuitive tx error.
assert_eq!(res, Err(ErrorKind::Block(block::Error::TooHeavy)));
assert_eq!(res, Err(Error::Block(block::Error::TooHeavy)));
clean_output_dir(chain_dir);
}

View file

@ -14,8 +14,6 @@ blake2 = { package = "blake2-rfc", version = "0.2"}
byteorder = "1"
croaring = "0.4.6"
enum_primitive = "0.1"
failure = "0.1"
failure_derive = "0.1"
lazy_static = "1"
lru-cache = "0.1"
num = "0.2"
@ -24,6 +22,7 @@ rand = "0.6"
serde = "1"
serde_derive = "1"
siphasher = "0.3"
thiserror = "1"
log = "0.4"
chrono = { version = "0.4.11", features = ["serde"] }
zeroize = { version = "1.1", features =["zeroize_derive"] }

1418
core/fuzz/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -3,15 +3,12 @@ name = "grin_core-fuzz"
version = "0.0.3"
authors = ["Grin Developers <mimblewimble@lists.launchpad.net>"]
publish = false
[package.metadata]
cargo-fuzz = true
edition = "2018"
[dependencies]
libfuzzer-sys = "0.4.0"
grin_core = { path = ".."}
grin_keychain = { path = "../../keychain"}
[dependencies.libfuzzer-sys]
git = "https://github.com/rust-fuzz/libfuzzer-sys.git"
# Prevent this from interfering with workspaces
[workspace]

View file

@ -1,12 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::UntrustedBlock;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<UntrustedBlock, ser::Error> = ser::deserialize(&mut d, ser::ProtocolVersion(1));
let _t: Result<UntrustedBlock, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(1), DeserializationMode::Full);
});

View file

@ -1,12 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::UntrustedBlock;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<UntrustedBlock, ser::Error> = ser::deserialize(&mut d, ser::ProtocolVersion(2));
let _t: Result<UntrustedBlock, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(2), DeserializationMode::Full);
});

View file

@ -1,13 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::UntrustedCompactBlock;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<UntrustedCompactBlock, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(1));
ser::deserialize(&mut d, ser::ProtocolVersion(1), DeserializationMode::Full);
});

View file

@ -1,13 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::UntrustedCompactBlock;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<UntrustedCompactBlock, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(2));
ser::deserialize(&mut d, ser::ProtocolVersion(2), DeserializationMode::Full);
});

View file

@ -1,12 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::Transaction;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<Transaction, ser::Error> = ser::deserialize(&mut d, ser::ProtocolVersion(1));
let _t: Result<Transaction, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(1), DeserializationMode::Full);
});

View file

@ -1,12 +1,13 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
extern crate grin_core;
#[macro_use]
extern crate libfuzzer_sys;
use grin_core::core::Transaction;
use grin_core::ser;
use grin_core::ser::{self, DeserializationMode};
fuzz_target!(|data: &[u8]| {
let mut d = data.clone();
let _t: Result<Transaction, ser::Error> = ser::deserialize(&mut d, ser::ProtocolVersion(2));
let _t: Result<Transaction, ser::Error> =
ser::deserialize(&mut d, ser::ProtocolVersion(2), DeserializationMode::Full);
});

View file

@ -36,10 +36,10 @@ pub use self::pmmr::segment::*;
pub use self::transaction::*;
/// Common errors
#[derive(Fail, Debug)]
#[derive(thiserror::Error, Debug)]
pub enum Error {
/// Human readable represenation of amount is invalid
#[fail(display = "Amount string was invalid")]
#[error("Amount string was invalid")]
InvalidAmountString,
}

View file

@ -37,7 +37,7 @@ use util::from_hex;
use util::{secp, static_secp_instance};
/// Errors thrown by Block validation
#[derive(Debug, Clone, Eq, PartialEq, Fail)]
#[derive(Debug, Clone, Eq, PartialEq, thiserror::Error)]
pub enum Error {
/// The sum of output minus input commitments does not
/// match the sum of kernel commitments

View file

@ -14,26 +14,25 @@
//! The Committed trait and associated errors.
use failure::Fail;
use keychain::BlindingFactor;
use util::secp::key::SecretKey;
use util::secp::pedersen::Commitment;
use util::{secp, secp_static, static_secp_instance};
/// Errors from summing and verifying kernel excesses via committed trait.
#[derive(Debug, Clone, PartialEq, Eq, Fail, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error, Serialize, Deserialize)]
pub enum Error {
/// Keychain related error.
#[fail(display = "Keychain error {}", _0)]
#[error("Keychain error {0}")]
Keychain(keychain::Error),
/// Secp related error.
#[fail(display = "Secp error {}", _0)]
#[error("Secp error {0}")]
Secp(secp::Error),
/// Kernel sums do not equal output sums.
#[fail(display = "Kernel sum mismatch")]
#[error("Kernel sum mismatch")]
KernelSumMismatch,
/// Committed overage (fee or reward) is invalid
#[fail(display = "Invalid value")]
#[error("Invalid value")]
InvalidValue,
}

View file

@ -19,7 +19,7 @@ use crate::core::pmmr::{self, Backend, ReadablePMMR, ReadonlyPMMR};
use crate::ser::{Error, PMMRIndexHashable, PMMRable, Readable, Reader, Writeable, Writer};
use croaring::Bitmap;
use std::cmp::min;
use std::fmt::{self, Debug};
use std::fmt::Debug;
#[derive(Clone, Debug, Eq, PartialEq)]
/// Possible segment types, according to this desegmenter
@ -54,30 +54,23 @@ impl SegmentTypeIdentifier {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)]
/// Error related to segment creation or validation
pub enum SegmentError {
/// An expected leaf was missing
#[error("Missing leaf at pos {0}")]
MissingLeaf(u64),
/// An expected hash was missing
#[error("Missing hash at pos {0}")]
MissingHash(u64),
/// The segment does not exist
#[error("Segment does not exist")]
NonExistent,
/// Mismatch between expected and actual root hash
#[error("Root hash mismatch")]
Mismatch,
}
impl fmt::Display for SegmentError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
SegmentError::MissingLeaf(idx) => write!(f, "Missing leaf at pos {}", idx),
SegmentError::MissingHash(idx) => write!(f, "Missing hash at pos {}", idx),
SegmentError::NonExistent => write!(f, "Segment does not exist"),
SegmentError::Mismatch => write!(f, "Root hash mismatch"),
}
}
}
/// Tuple that defines a segment of a given PMMR
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct SegmentIdentifier {

View file

@ -30,8 +30,6 @@ extern crate serde_derive;
#[macro_use]
extern crate log;
#[macro_use]
extern crate failure_derive;
#[macro_use]
pub mod macros;
pub mod consensus;

View file

@ -16,7 +16,7 @@
//! This module interfaces into the underlying
//! [Rust Aggsig library](https://github.com/mimblewimble/rust-secp256k1-zkp/blob/master/src/aggsig.rs)
use crate::libtx::error::{Error, ErrorKind};
use crate::libtx::error::Error;
use keychain::{BlindingFactor, Identifier, Keychain, SwitchCommitmentType};
use util::secp::key::{PublicKey, SecretKey};
use util::secp::pedersen::Commitment;
@ -192,7 +192,7 @@ pub fn verify_partial_sig(
pubkey_sum,
true,
) {
return Err(ErrorKind::Signature("Signature validation error".to_string()).into());
return Err(Error::Signature("Signature validation error".to_string()));
}
Ok(())
}
@ -324,7 +324,7 @@ pub fn verify_single_from_commit(
) -> Result<(), Error> {
let pubkey = commit.to_pubkey(secp)?;
if !verify_single(secp, sig, msg, None, &pubkey, Some(&pubkey), false) {
return Err(ErrorKind::Signature("Signature validation error".to_string()).into());
return Err(Error::Signature("Signature validation error".to_string()));
}
Ok(())
}
@ -392,7 +392,7 @@ pub fn verify_completed_sig(
msg: &secp::Message,
) -> Result<(), Error> {
if !verify_single(secp, sig, msg, None, pubkey, pubkey_sum, true) {
return Err(ErrorKind::Signature("Signature validation error".to_string()).into());
return Err(Error::Signature("Signature validation error".to_string()));
}
Ok(())
}

View file

@ -14,96 +14,40 @@
//! libtx specific errors
use crate::core::transaction;
use failure::{Backtrace, Context, Fail};
use std::fmt::{self, Display};
use util::secp;
/// Lib tx error definition
#[derive(Debug)]
pub struct Error {
inner: Context<ErrorKind>,
}
#[derive(Clone, Debug, Eq, Fail, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, thiserror::Error, PartialEq, Serialize, Deserialize)]
/// Libwallet error types
pub enum ErrorKind {
pub enum Error {
/// SECP error
#[fail(display = "Secp Error")]
Secp(secp::Error),
#[error("Secp Error")]
Secp {
/// SECP error
#[from]
source: secp::Error,
},
/// Keychain error
#[fail(display = "Keychain Error")]
Keychain(keychain::Error),
#[error("Keychain Error")]
Keychain {
/// Keychain error
#[from]
source: keychain::Error,
},
/// Transaction error
#[fail(display = "Transaction Error")]
Transaction(transaction::Error),
#[error("Transaction Error")]
Transaction {
/// Transaction error
#[from]
source: transaction::Error,
},
/// Signature error
#[fail(display = "Signature Error")]
#[error("Signature Error")]
Signature(String),
/// Rangeproof error
#[fail(display = "Rangeproof Error")]
#[error("Rangeproof Error")]
RangeProof(String),
/// Other error
#[fail(display = "Other Error")]
#[error("Other Error")]
Other(String),
}
impl Fail for Error {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}
impl Error {
/// Return errorkind
pub fn kind(&self) -> ErrorKind {
self.inner.get_context().clone()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
}
}
}
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner }
}
}
impl From<secp::Error> for Error {
fn from(error: secp::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Secp(error)),
}
}
}
impl From<keychain::Error> for Error {
fn from(error: keychain::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Keychain(error)),
}
}
}
impl From<transaction::Error> for Error {
fn from(error: transaction::Error) -> Error {
Error {
inner: Context::new(ErrorKind::Transaction(error)),
}
}
}

View file

@ -32,7 +32,7 @@ use crate::core::Transaction;
use crate::global::get_accept_fee_base;
pub use self::proof::ProofBuilder;
pub use crate::libtx::error::{Error, ErrorKind};
pub use crate::libtx::error::Error;
/// Transaction fee calculation given numbers of inputs, outputs, and kernels
pub fn tx_fee(input_len: usize, output_len: usize, kernel_len: usize) -> u64 {

View file

@ -14,7 +14,7 @@
//! Rangeproof library functions
use crate::libtx::error::{Error, ErrorKind};
use crate::libtx::error::Error;
use blake2::blake2b::blake2b;
use keychain::extkey_bip32::BIP32GrinHasher;
use keychain::{Identifier, Keychain, SwitchCommitmentType, ViewKey};
@ -81,7 +81,7 @@ where
{
let nonce = b
.rewind_nonce(secp, &commit)
.map_err(|e| ErrorKind::RangeProof(e.to_string()))?;
.map_err(|e| Error::RangeProof(e.to_string()))?;
let info = secp.rewind_bullet_proof(commit, nonce, extra_data, proof);
if info.is_err() {
return Ok(None);
@ -91,7 +91,7 @@ where
let amount = info.value;
let check = b
.check_output(secp, &commit, amount, info.message)
.map_err(|e| ErrorKind::RangeProof(e.to_string()))?;
.map_err(|e| Error::RangeProof(e.to_string()))?;
Ok(check.map(|(id, switch)| (amount, id, switch)))
}
@ -164,7 +164,7 @@ where
};
let res = blake2b(32, &commit.0, hash);
SecretKey::from_slice(self.keychain.secp(), res.as_bytes())
.map_err(|e| ErrorKind::RangeProof(format!("Unable to create nonce: {:?}", e)).into())
.map_err(|e| Error::RangeProof(format!("Unable to create nonce: {:?}", e)))
}
}
@ -277,7 +277,7 @@ where
fn nonce(&self, commit: &Commitment) -> Result<SecretKey, Error> {
let res = blake2b(32, &commit.0, &self.root_hash);
SecretKey::from_slice(self.keychain.secp(), res.as_bytes())
.map_err(|e| ErrorKind::RangeProof(format!("Unable to create nonce: {:?}", e)).into())
.map_err(|e| Error::RangeProof(format!("Unable to create nonce: {:?}", e)))
}
}
@ -360,7 +360,7 @@ impl ProofBuild for ViewKey {
fn rewind_nonce(&self, secp: &Secp256k1, commit: &Commitment) -> Result<SecretKey, Error> {
let res = blake2b(32, &commit.0, &self.rewind_hash);
SecretKey::from_slice(secp, res.as_bytes())
.map_err(|e| ErrorKind::RangeProof(format!("Unable to create nonce: {:?}", e)).into())
.map_err(|e| Error::RangeProof(format!("Unable to create nonce: {:?}", e)))
}
fn private_nonce(&self, _secp: &Secp256k1, _commit: &Commitment) -> Result<SecretKey, Error> {

View file

@ -25,7 +25,7 @@
use crate::global;
use crate::pow::common::CuckooParams;
use crate::pow::error::{Error, ErrorKind};
use crate::pow::error::Error;
use crate::pow::siphash::siphash_block;
use crate::pow::{PoWContext, Proof};
@ -39,7 +39,7 @@ pub fn new_cuckaroo_ctx(edge_bits: u8, proof_size: usize) -> Result<Box<dyn PoWC
/// Error returned for cuckaroo request beyond HardFork4
pub fn no_cuckaroo_ctx() -> Result<Box<dyn PoWContext>, Error> {
Err(ErrorKind::Verification("no cuckaroo past HardFork4".to_owned()).into())
Err(Error::Verification("no cuckaroo past HardFork4".to_owned()))
}
/// Cuckaroo cycle context. Only includes the verifier for now.
@ -64,7 +64,7 @@ impl PoWContext for CuckarooContext {
fn verify(&self, proof: &Proof) -> Result<(), Error> {
let size = proof.proof_size();
if size != global::proofsize() {
return Err(ErrorKind::Verification("wrong cycle length".to_owned()).into());
return Err(Error::Verification("wrong cycle length".to_owned()).into());
}
let nonces = &proof.nonces;
let mut uvs = vec![0u64; 2 * size];
@ -78,10 +78,10 @@ impl PoWContext for CuckarooContext {
for n in 0..size {
if nonces[n] > self.params.edge_mask {
return Err(ErrorKind::Verification("edge too big".to_owned()).into());
return Err(Error::Verification("edge too big".to_owned()));
}
if n > 0 && nonces[n] <= nonces[n - 1] {
return Err(ErrorKind::Verification("edges not ascending".to_owned()).into());
return Err(Error::Verification("edges not ascending".to_owned()));
}
// 21 is standard siphash rotation constant
let edge: u64 = siphash_block(&self.params.siphash_keys, nonces[n], 21, false);
@ -102,7 +102,7 @@ impl PoWContext for CuckarooContext {
xor1 ^= v;
}
if xor0 | xor1 != 0 {
return Err(ErrorKind::Verification("endpoints don't match up".to_owned()).into());
return Err(Error::Verification("endpoints don't match up".to_owned()));
}
// make prev lists circular
for n in 0..size {
@ -130,13 +130,13 @@ impl PoWContext for CuckarooContext {
if uvs[k] == uvs[i] {
// find other edge endpoint matching one at i
if j != i {
return Err(ErrorKind::Verification("branch in cycle".to_owned()).into());
return Err(Error::Verification("branch in cycle".to_owned()));
}
j = k;
}
}
if j == i {
return Err(ErrorKind::Verification("cycle dead ends".to_owned()).into());
return Err(Error::Verification("cycle dead ends".to_owned()));
}
i = j ^ 1;
n += 1;
@ -147,7 +147,7 @@ impl PoWContext for CuckarooContext {
if n == size {
Ok(())
} else {
Err(ErrorKind::Verification("cycle too short".to_owned()).into())
Err(Error::Verification("cycle too short".to_owned()))
}
}
}

View file

@ -24,7 +24,7 @@
use crate::global;
use crate::pow::common::CuckooParams;
use crate::pow::error::{Error, ErrorKind};
use crate::pow::error::Error;
use crate::pow::siphash::siphash_block;
use crate::pow::{PoWContext, Proof};
@ -58,7 +58,7 @@ impl PoWContext for CuckaroodContext {
fn verify(&self, proof: &Proof) -> Result<(), Error> {
let size = proof.proof_size();
if size != global::proofsize() {
return Err(ErrorKind::Verification("wrong cycle length".to_owned()).into());
return Err(Error::Verification("wrong cycle length".to_owned()));
}
let nonces = &proof.nonces;
let mut uvs = vec![0u64; 2 * size];
@ -74,13 +74,13 @@ impl PoWContext for CuckaroodContext {
for n in 0..size {
let dir = (nonces[n] & 1) as usize;
if ndir[dir] >= size / 2 {
return Err(ErrorKind::Verification("edges not balanced".to_owned()).into());
return Err(Error::Verification("edges not balanced".to_owned()));
}
if nonces[n] > self.params.edge_mask {
return Err(ErrorKind::Verification("edge too big".to_owned()).into());
return Err(Error::Verification("edge too big".to_owned()));
}
if n > 0 && nonces[n] <= nonces[n - 1] {
return Err(ErrorKind::Verification("edges not ascending".to_owned()).into());
return Err(Error::Verification("edges not ascending".to_owned()));
}
// cuckarood uses a non-standard siphash rotation constant 25 as anti-ASIC tweak
let edge: u64 = siphash_block(&self.params.siphash_keys, nonces[n], 25, false);
@ -103,7 +103,7 @@ impl PoWContext for CuckaroodContext {
ndir[dir] += 1;
}
if xor0 | xor1 != 0 {
return Err(ErrorKind::Verification("endpoints don't match up".to_owned()).into());
return Err(Error::Verification("endpoints don't match up".to_owned()));
}
let mut n = 0;
let mut i = 0;
@ -120,14 +120,14 @@ impl PoWContext for CuckaroodContext {
if uvs[k] == uvs[i] {
// find reverse edge endpoint identical to one at i
if j != i {
return Err(ErrorKind::Verification("branch in cycle".to_owned()).into());
return Err(Error::Verification("branch in cycle".to_owned()));
}
j = k;
}
k = prev[k];
}
if j == i {
return Err(ErrorKind::Verification("cycle dead ends".to_owned()).into());
return Err(Error::Verification("cycle dead ends".to_owned()));
}
i = j ^ 1;
n += 1;
@ -138,7 +138,7 @@ impl PoWContext for CuckaroodContext {
if n == size {
Ok(())
} else {
Err(ErrorKind::Verification("cycle too short".to_owned()).into())
Err(Error::Verification("cycle too short".to_owned()))
}
}
}

View file

@ -23,7 +23,7 @@
use crate::global;
use crate::pow::common::CuckooParams;
use crate::pow::error::{Error, ErrorKind};
use crate::pow::error::Error;
use crate::pow::siphash::siphash_block;
use crate::pow::{PoWContext, Proof};
@ -57,7 +57,7 @@ impl PoWContext for CuckaroomContext {
fn verify(&self, proof: &Proof) -> Result<(), Error> {
let size = proof.proof_size();
if size != global::proofsize() {
return Err(ErrorKind::Verification("wrong cycle length".to_owned()).into());
return Err(Error::Verification("wrong cycle length".to_owned()));
}
let nonces = &proof.nonces;
let mut from = vec![0u64; size];
@ -71,10 +71,10 @@ impl PoWContext for CuckaroomContext {
for n in 0..size {
if nonces[n] > self.params.edge_mask {
return Err(ErrorKind::Verification("edge too big".to_owned()).into());
return Err(Error::Verification("edge too big".to_owned()));
}
if n > 0 && nonces[n] <= nonces[n - 1] {
return Err(ErrorKind::Verification("edges not ascending".to_owned()).into());
return Err(Error::Verification("edges not ascending".to_owned()));
}
// 21 is standard siphash rotation constant
let edge: u64 = siphash_block(&self.params.siphash_keys, nonces[n], 21, true);
@ -89,7 +89,7 @@ impl PoWContext for CuckaroomContext {
xor_to ^= to[n];
}
if xor_from != xor_to {
return Err(ErrorKind::Verification("endpoints don't match up".to_owned()).into());
return Err(Error::Verification("endpoints don't match up".to_owned()));
}
let mut visited = vec![false; size];
let mut n = 0;
@ -97,13 +97,13 @@ impl PoWContext for CuckaroomContext {
loop {
// follow cycle
if visited[i] {
return Err(ErrorKind::Verification("branch in cycle".to_owned()).into());
return Err(Error::Verification("branch in cycle".to_owned()));
}
visited[i] = true;
let mut k = head[(to[i] & mask) as usize];
loop {
if k == size {
return Err(ErrorKind::Verification("cycle dead ends".to_owned()).into());
return Err(Error::Verification("cycle dead ends".to_owned()));
}
if from[k] == to[i] {
break;
@ -120,7 +120,7 @@ impl PoWContext for CuckaroomContext {
if n == size {
Ok(())
} else {
Err(ErrorKind::Verification("cycle too short".to_owned()).into())
Err(Error::Verification("cycle too short".to_owned()))
}
}
}

View file

@ -24,7 +24,7 @@
use crate::global;
use crate::pow::common::CuckooParams;
use crate::pow::error::{Error, ErrorKind};
use crate::pow::error::Error;
use crate::pow::siphash::siphash_block;
use crate::pow::{PoWContext, Proof};
@ -58,7 +58,7 @@ impl PoWContext for CuckaroozContext {
fn verify(&self, proof: &Proof) -> Result<(), Error> {
let size = proof.proof_size();
if size != global::proofsize() {
return Err(ErrorKind::Verification("wrong cycle length".to_owned()).into());
return Err(Error::Verification("wrong cycle length".to_owned()));
}
let nonces = &proof.nonces;
let mut uvs = vec![0u64; 2 * size];
@ -70,10 +70,10 @@ impl PoWContext for CuckaroozContext {
for n in 0..size {
if nonces[n] > self.params.edge_mask {
return Err(ErrorKind::Verification("edge too big".to_owned()).into());
return Err(Error::Verification("edge too big".to_owned()));
}
if n > 0 && nonces[n] <= nonces[n - 1] {
return Err(ErrorKind::Verification("edges not ascending".to_owned()).into());
return Err(Error::Verification("edges not ascending".to_owned()));
}
// 21 is standard siphash rotation constant
let edge: u64 = siphash_block(&self.params.siphash_keys, nonces[n], 21, true);
@ -93,7 +93,7 @@ impl PoWContext for CuckaroozContext {
xoruv ^= uvs[2 * n] ^ uvs[2 * n + 1];
}
if xoruv != 0 {
return Err(ErrorKind::Verification("endpoints don't match up".to_owned()).into());
return Err(Error::Verification("endpoints don't match up".to_owned()));
}
// make prev lists circular
for n in 0..(2 * size) {
@ -117,13 +117,13 @@ impl PoWContext for CuckaroozContext {
if uvs[k] == uvs[i] {
// find other edge endpoint matching one at i
if j != i {
return Err(ErrorKind::Verification("branch in cycle".to_owned()).into());
return Err(Error::Verification("branch in cycle".to_owned()));
}
j = k;
}
}
if j == i {
return Err(ErrorKind::Verification("cycle dead ends".to_owned()).into());
return Err(Error::Verification("cycle dead ends".to_owned()));
}
i = j ^ 1;
n += 1;
@ -134,7 +134,7 @@ impl PoWContext for CuckaroozContext {
if n == self.params.proof_size {
Ok(())
} else {
Err(ErrorKind::Verification("cycle too short".to_owned()).into())
Err(Error::Verification("cycle too short".to_owned()))
}
}
}

View file

@ -14,7 +14,7 @@
//! Implementation of Cuckatoo Cycle designed by John Tromp.
use crate::global;
use crate::pow::common::{CuckooParams, Link};
use crate::pow::error::{Error, ErrorKind};
use crate::pow::error::Error;
use crate::pow::{PoWContext, Proof};
use byteorder::{BigEndian, WriteBytesExt};
use croaring::Bitmap;
@ -46,7 +46,7 @@ impl Graph {
/// Create a new graph with given parameters
pub fn new(max_edges: u64, max_sols: u32, proof_size: usize) -> Result<Graph, Error> {
if max_edges >= u64::max_value() / 2 {
return Err(ErrorKind::Verification("graph is to big to build".to_string()).into());
return Err(Error::Verification("graph is to big to build".to_string()));
}
let max_nodes = 2 * max_edges;
Ok(Graph {
@ -79,7 +79,7 @@ impl Graph {
/// Add an edge to the graph
pub fn add_edge(&mut self, u: u64, mut v: u64) -> Result<(), Error> {
if u >= self.max_nodes || v >= self.max_nodes {
return Err(ErrorKind::EdgeAddition.into());
return Err(Error::EdgeAddition);
}
v = v + self.max_nodes;
let adj_u = self.adj_list[(u ^ 1) as usize];
@ -92,7 +92,7 @@ impl Graph {
let ulink = self.links.len() as u64;
let vlink = (self.links.len() + 1) as u64;
if vlink == self.nil {
return Err(ErrorKind::EdgeAddition.into());
return Err(Error::EdgeAddition);
}
self.links.push(Link {
next: self.adj_list[u as usize],
@ -246,7 +246,7 @@ impl CuckatooContext {
self.verify_impl(&s)?;
}
if self.graph.solutions.is_empty() {
Err(ErrorKind::NoSolution.into())
Err(Error::NoSolution)
} else {
Ok(self.graph.solutions.clone())
}
@ -257,7 +257,7 @@ impl CuckatooContext {
pub fn verify_impl(&self, proof: &Proof) -> Result<(), Error> {
let size = proof.proof_size();
if size != global::proofsize() {
return Err(ErrorKind::Verification("wrong cycle length".to_owned()).into());
return Err(Error::Verification("wrong cycle length".to_owned()));
}
let nonces = &proof.nonces;
let mut uvs = vec![0u64; 2 * size];
@ -271,10 +271,10 @@ impl CuckatooContext {
for n in 0..size {
if nonces[n] > self.params.edge_mask {
return Err(ErrorKind::Verification("edge too big".to_owned()).into());
return Err(Error::Verification("edge too big".to_owned()));
}
if n > 0 && nonces[n] <= nonces[n - 1] {
return Err(ErrorKind::Verification("edges not ascending".to_owned()).into());
return Err(Error::Verification("edges not ascending".to_owned()));
}
let u = self.params.sipnode(nonces[n], 0)?;
let v = self.params.sipnode(nonces[n], 1)?;
@ -293,7 +293,7 @@ impl CuckatooContext {
xor1 ^= v;
}
if xor0 | xor1 != 0 {
return Err(ErrorKind::Verification("endpoints don't match up".to_owned()).into());
return Err(Error::Verification("endpoints don't match up".to_owned()));
}
// make prev lists circular
for n in 0..size {
@ -321,13 +321,13 @@ impl CuckatooContext {
if uvs[k] >> 1 == uvs[i] >> 1 {
// find other edge endpoint matching one at i
if j != i {
return Err(ErrorKind::Verification("branch in cycle".to_owned()).into());
return Err(Error::Verification("branch in cycle".to_owned()));
}
j = k;
}
}
if j == i || uvs[j] == uvs[i] {
return Err(ErrorKind::Verification("cycle dead ends".to_owned()).into());
return Err(Error::Verification("cycle dead ends".to_owned()));
}
i = j ^ 1;
n += 1;
@ -338,7 +338,7 @@ impl CuckatooContext {
if n == size {
Ok(())
} else {
Err(ErrorKind::Verification("cycle too short".to_owned()).into())
Err(Error::Verification("cycle too short".to_owned()))
}
}
}

View file

@ -13,83 +13,34 @@
// limitations under the License.
//! Cuckatoo specific errors
use failure::{Backtrace, Context, Fail};
use std::fmt::{self, Display};
use std::io;
/// Cuckatoo solver or validation error
#[derive(Debug)]
pub struct Error {
inner: Context<ErrorKind>,
}
#[derive(Clone, Debug, Eq, Fail, PartialEq)]
#[derive(Debug, thiserror::Error)]
/// Libwallet error types
pub enum ErrorKind {
pub enum Error {
/// Verification error
#[fail(display = "Verification Error: {}", _0)]
#[error("Verification Error: {0}")]
Verification(String),
/// IO Error
#[fail(display = "IO Error")]
IOError,
#[error("IO Error")]
IOError {
/// Io Error Convert
#[from]
source: std::io::Error,
},
/// Unexpected Edge Error
#[fail(display = "Edge Addition Error")]
#[error("Edge Addition Error")]
EdgeAddition,
/// Path Error
#[fail(display = "Path Error")]
#[error("Path Error")]
Path,
/// Invalid cycle
#[fail(display = "Invalid Cycle length: {}", _0)]
#[error("Invalid Cycle length: {0}")]
InvalidCycle(usize),
/// No Cycle
#[fail(display = "No Cycle")]
#[error("No Cycle")]
NoCycle,
/// No Solution
#[fail(display = "No Solution")]
#[error("No Solution")]
NoSolution,
}
impl Fail for Error {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}
impl Error {
/// Return errorkind
pub fn kind(&self) -> ErrorKind {
self.inner.get_context().clone()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
}
}
}
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner }
}
}
impl From<io::Error> for Error {
fn from(_error: io::Error) -> Error {
Error {
inner: Context::new(ErrorKind::IOError),
}
}
}

View file

@ -501,11 +501,7 @@ impl ChainAdapter for Peers {
hash, peer_info.addr,
);
self.ban_peer(peer_info.addr, ReasonForBan::BadBlock)
.map_err(|e| {
let err: chain::Error =
chain::ErrorKind::Other(format!("ban peer error :{:?}", e)).into();
err
})?;
.map_err(|e| chain::Error::Other(format!("ban peer error: {:?}", e)))?;
Ok(false)
} else {
Ok(true)
@ -526,11 +522,7 @@ impl ChainAdapter for Peers {
hash, peer_info.addr
);
self.ban_peer(peer_info.addr, ReasonForBan::BadCompactBlock)
.map_err(|e| {
let err: chain::Error =
chain::ErrorKind::Other(format!("ban peer error :{:?}", e)).into();
err
})?;
.map_err(|e| chain::Error::Other(format!("ban peer error: {:?}", e)))?;
Ok(false)
} else {
Ok(true)
@ -546,11 +538,7 @@ impl ChainAdapter for Peers {
// if the peer sent us a block header that's intrinsically bad
// they are either mistaken or malevolent, both of which require a ban
self.ban_peer(peer_info.addr, ReasonForBan::BadBlockHeader)
.map_err(|e| {
let err: chain::Error =
chain::ErrorKind::Other(format!("ban peer error :{:?}", e)).into();
err
})?;
.map_err(|e| chain::Error::Other(format!("ban peer error: {:?}", e)))?;
Ok(false)
} else {
Ok(true)
@ -566,11 +554,7 @@ impl ChainAdapter for Peers {
// if the peer sent us a block header that's intrinsically bad
// they are either mistaken or malevolent, both of which require a ban
self.ban_peer(peer_info.addr, ReasonForBan::BadBlockHeader)
.map_err(|e| {
let err: chain::Error =
chain::ErrorKind::Other(format!("ban peer error :{:?}", e)).into();
err
})?;
.map_err(|e| chain::Error::Other(format!("ban peer error: {:?}", e)))?;
Ok(false)
} else {
Ok(true)
@ -609,11 +593,7 @@ impl ChainAdapter for Peers {
peer_info.addr
);
self.ban_peer(peer_info.addr, ReasonForBan::BadTxHashSet)
.map_err(|e| {
let err: chain::Error =
chain::ErrorKind::Other(format!("ban peer error :{:?}", e)).into();
err
})?;
.map_err(|e| chain::Error::Other(format!("ban peer error: {:?}", e)))?;
Ok(true)
} else {
Ok(false)

View file

@ -14,10 +14,9 @@ blake2-rfc = "0.2"
rand = "0.6"
serde = "1"
serde_derive = "1"
thiserror = "1"
log = "0.4"
chrono = "0.4.11"
failure = "0.1"
failure_derive = "0.1"
grin_core = { path = "../core", version = "5.2.0-alpha.1" }
grin_keychain = { path = "../keychain", version = "5.2.0-alpha.1" }

450
pool/fuzz/Cargo.lock generated
View file

@ -1,5 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.13.0"
@ -35,9 +37,9 @@ dependencies = [
[[package]]
name = "arbitrary"
version = "0.4.5"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cb544f1057eaaff4b34f8c4dcf56fc3cd04debd291998405d135017a7c3c0f4"
checksum = "5a7924531f38b1970ff630f03eb20a2fde69db5c590c93b0f3482e95dcc5fd60"
[[package]]
name = "arc-swap"
@ -45,12 +47,6 @@ version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034"
[[package]]
name = "arrayref"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
[[package]]
name = "arrayvec"
version = "0.3.25"
@ -100,23 +96,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46254cf2fdcdf1badb5934448c1bcbe046a56537b3987d96c51a7afc5d03f293"
dependencies = [
"addr2line",
"cfg-if",
"cfg-if 0.1.10",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643"
dependencies = [
"byteorder",
"safemem",
]
[[package]]
name = "base64"
version = "0.12.3"
@ -124,14 +110,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
[[package]]
name = "bindgen"
version = "0.52.0"
name = "base64ct"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1c85344eb535a31b62f0af37be84441ba9e7f0f4111eb0530f43d15e513fe57"
checksum = "3bdca834647821e0b13d9539a8634eb62d3501b6b6c2cec1722786ee6671b851"
[[package]]
name = "bindgen"
version = "0.56.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2da379dbebc0b76ef63ca68d8fc6e71c0f13e59432e0987e508c1820e6ab5239"
dependencies = [
"bitflags 1.2.1",
"cexpr",
"cfg-if",
"clang-sys",
"clap",
"env_logger",
@ -139,8 +130,8 @@ dependencies = [
"lazycell",
"log",
"peeking_take_while",
"proc-macro2 1.0.18",
"quote 1.0.7",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
@ -177,26 +168,25 @@ dependencies = [
[[package]]
name = "block-buffer"
version = "0.3.3"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
dependencies = [
"arrayref",
"byte-tools",
"generic-array",
]
[[package]]
name = "byte-tools"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"
[[package]]
name = "byteorder"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
[[package]]
name = "bytes"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38"
[[package]]
name = "cc"
version = "1.0.58"
@ -205,9 +195,9 @@ checksum = "f9a06fb2e53271d7c279ec1efea6ab691c35a2ae67ec0d91d7acec0caf13b518"
[[package]]
name = "cexpr"
version = "0.3.6"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fce5b5fb86b0c57c20c834c1b412fd09c77c8a59b9473f86272709e78874cd1d"
checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
dependencies = [
"nom",
]
@ -218,6 +208,12 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.13"
@ -232,9 +228,9 @@ dependencies = [
[[package]]
name = "clang-sys"
version = "0.28.1"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81de550971c976f176130da4b2978d3b524eaa0fd9ac31f3ceb5ae1231fb4853"
checksum = "5a050e2153c5be08febd6734e29298e844fdb0fa21aeddd63b4eb7baa106c69b"
dependencies = [
"glob",
"libc",
@ -271,31 +267,40 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
[[package]]
name = "cpufeatures"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b"
dependencies = [
"libc",
]
[[package]]
name = "crc32fast"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
]
[[package]]
name = "croaring-mw"
version = "0.4.5"
name = "croaring"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcdee571ce4bf3e49c382de29c38bd33b9fa871e1358c7749b9dcc5dc2776221"
checksum = "a00d14ad7d8cc067d7a5c93e8563791bfec3f7182361db955530db11d94ed63c"
dependencies = [
"byteorder",
"croaring-sys-mw",
"croaring-sys",
"libc",
]
[[package]]
name = "croaring-sys-mw"
version = "0.4.5"
name = "croaring-sys"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea52c177269fa54c526b054dac8e623721de18143ebfd2ea84ffc023d6c271ee"
checksum = "c5d6a46501bb403a61e43bc7cd19977b4f9c54efd703949b00259cc61afb5a86"
dependencies = [
"bindgen",
"cc",
@ -304,19 +309,19 @@ dependencies = [
[[package]]
name = "crypto-mac"
version = "0.6.2"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7afa06d05a046c7a47c3a849907ec303504608c927f4e85f7bfff22b7180d971"
checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714"
dependencies = [
"constant_time_eq",
"generic-array",
"subtle",
]
[[package]]
name = "digest"
version = "0.7.6"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
]
@ -338,52 +343,24 @@ dependencies = [
[[package]]
name = "env_logger"
version = "0.7.1"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3"
dependencies = [
"atty",
"humantime",
"humantime 2.1.0",
"log",
"regex",
"termcolor",
]
[[package]]
name = "failure"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
dependencies = [
"backtrace",
"failure_derive",
]
[[package]]
name = "failure_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"syn 1.0.34",
"synstructure 0.12.4",
]
[[package]]
name = "fake-simd"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
[[package]]
name = "flate2"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68c90b0fc46cf89d227cc78b40e494ff81287a92dd07631e5af0d06fe3cf885e"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
"crc32fast",
"libc",
"miniz_oxide",
@ -409,11 +386,12 @@ checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2"
[[package]]
name = "generic-array"
version = "0.9.0"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
dependencies = [
"typenum",
"version_check",
]
[[package]]
@ -422,7 +400,7 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
"libc",
"wasi",
]
@ -441,16 +419,14 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "grin_chain"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"bit-vec",
"bitflags 1.2.1",
"byteorder",
"chrono",
"croaring-mw",
"croaring",
"enum_primitive",
"failure",
"failure_derive",
"grin_core",
"grin_keychain",
"grin_store",
@ -460,19 +436,19 @@ dependencies = [
"lru-cache",
"serde",
"serde_derive",
"thiserror",
]
[[package]]
name = "grin_core"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"blake2-rfc",
"byteorder",
"bytes",
"chrono",
"croaring-mw",
"croaring",
"enum_primitive",
"failure",
"failure_derive",
"grin_keychain",
"grin_util",
"lazy_static",
@ -484,12 +460,13 @@ dependencies = [
"serde",
"serde_derive",
"siphasher",
"zeroize 1.1.0",
"thiserror",
"zeroize",
]
[[package]]
name = "grin_keychain"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"blake2-rfc",
"byteorder",
@ -505,17 +482,15 @@ dependencies = [
"serde_derive",
"serde_json",
"sha2",
"zeroize 1.1.0",
"zeroize",
]
[[package]]
name = "grin_pool"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"blake2-rfc",
"chrono",
"failure",
"failure_derive",
"grin_core",
"grin_keychain",
"grin_util",
@ -523,6 +498,7 @@ dependencies = [
"rand 0.6.5",
"serde",
"serde_derive",
"thiserror",
]
[[package]]
@ -540,9 +516,9 @@ dependencies = [
[[package]]
name = "grin_secp256k1zkp"
version = "0.7.9"
version = "0.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c2e7431d1999f02112c2383c9d33e7a6212947abfba92c87ab7283ba667a8b"
checksum = "3af3c4c4829b3e2e7ee1d9a542833e4244912fbb887fabe44682558159b068a7"
dependencies = [
"arrayvec 0.3.25",
"cc",
@ -551,17 +527,15 @@ dependencies = [
"rustc-serialize",
"serde",
"serde_json",
"zeroize 0.9.3",
"zeroize",
]
[[package]]
name = "grin_store"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"byteorder",
"croaring-mw",
"failure",
"failure_derive",
"croaring",
"grin_core",
"grin_util",
"libc",
@ -571,14 +545,15 @@ dependencies = [
"serde",
"serde_derive",
"tempfile",
"thiserror",
]
[[package]]
name = "grin_util"
version = "4.2.0-alpha.1"
version = "5.2.0-alpha.1"
dependencies = [
"backtrace",
"base64 0.12.3",
"base64",
"byteorder",
"grin_secp256k1zkp",
"lazy_static",
@ -589,7 +564,7 @@ dependencies = [
"serde",
"serde_derive",
"walkdir",
"zeroize 1.1.0",
"zeroize",
"zip",
]
@ -604,9 +579,9 @@ dependencies = [
[[package]]
name = "hmac"
version = "0.6.3"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "733e1b3ac906631ca01ebb577e9bb0f5e37a454032b9036b5eaea4013ed6f99a"
checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b"
dependencies = [
"crypto-mac",
"digest",
@ -621,6 +596,12 @@ dependencies = [
"quick-error",
]
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "itoa"
version = "0.4.6"
@ -647,12 +628,13 @@ checksum = "a9f8082297d534141b30c8d39e9b1773713ab50fdbe4ff30f750d063b3bfd701"
[[package]]
name = "libfuzzer-sys"
version = "0.3.2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d718794b8e23533b9069bd2c4597d69e41cc7ab1c02700a502971aca0cdcf24"
checksum = "336244aaeab6a12df46480dc585802aa743a72d66b11937844c61bbca84c991d"
dependencies = [
"arbitrary",
"cc",
"once_cell",
]
[[package]]
@ -667,11 +649,11 @@ dependencies = [
[[package]]
name = "libloading"
version = "0.5.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b111a074963af1d37a139918ac6d49ad1d0d5e47f72fd55388619691a7d753"
checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
dependencies = [
"cc",
"cfg-if 1.0.0",
"winapi",
]
@ -708,7 +690,7 @@ version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
"serde",
]
@ -728,7 +710,7 @@ dependencies = [
"chrono",
"flate2",
"fnv",
"humantime",
"humantime 1.3.0",
"libc",
"log",
"log-mdc",
@ -785,9 +767,9 @@ checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "nom"
version = "4.2.3"
version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [
"memchr",
"version_check",
@ -891,6 +873,18 @@ version = "0.2.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4eae0151b9dacf24fcc170d9995e511669a082856a91f958a2fe380bfab3fb22"
[[package]]
name = "once_cell"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "opaque-debug"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "ordered-float"
version = "1.1.0"
@ -916,7 +910,7 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
"cloudabi",
"libc",
"redox_syscall",
@ -925,18 +919,26 @@ dependencies = [
]
[[package]]
name = "pbkdf2"
name = "password-hash"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c09cddfbfc98de7f76931acf44460972edb4023eb14d0c6d4018800e552d8e0"
checksum = "77e0b28ace46c5a396546bcf443bf422b57049617433d8854227352a4a9b24e7"
dependencies = [
"base64 0.9.3",
"byteorder",
"constant_time_eq",
"base64ct",
"rand_core 0.6.3",
"subtle",
]
[[package]]
name = "pbkdf2"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d95f5254224e617595d2cc3cc73ff0a5eaf2637519e25f03388154e9378b6ffa"
dependencies = [
"base64ct",
"crypto-mac",
"generic-array",
"hmac",
"rand 0.5.6",
"password-hash",
"sha2",
]
@ -946,12 +948,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
[[package]]
name = "podio"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b18befed8bc2b61abc79a457295e7e838417326da1586050b919414073977f19"
[[package]]
name = "ppv-lite86"
version = "0.2.8"
@ -960,20 +956,11 @@ checksum = "237a5ed80e274dbc66f86bd59c1e25edc039660be53194b5fe0a482e0f2612ea"
[[package]]
name = "proc-macro2"
version = "0.4.30"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
dependencies = [
"unicode-xid 0.1.0",
]
[[package]]
name = "proc-macro2"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "beae6331a816b1f65d04c45b078fd8e6c93e8071771f41b8163255bbd8d7c8fa"
dependencies = [
"unicode-xid 0.2.1",
"unicode-ident",
]
[[package]]
@ -982,22 +969,13 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
dependencies = [
"proc-macro2 0.4.30",
]
[[package]]
name = "quote"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
dependencies = [
"proc-macro2 1.0.18",
"proc-macro2",
]
[[package]]
@ -1089,6 +1067,12 @@ dependencies = [
"getrandom",
]
[[package]]
name = "rand_core"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
[[package]]
name = "rand_hc"
version = "0.1.0"
@ -1204,13 +1188,13 @@ dependencies = [
[[package]]
name = "ripemd160"
version = "0.7.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "482aa56cc68aaeccdaaff1cc5a72c247da8bbad3beb174ca5741f274c22883fb"
checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251"
dependencies = [
"block-buffer",
"byte-tools",
"digest",
"opaque-debug",
]
[[package]]
@ -1237,12 +1221,6 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "safemem"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
[[package]]
name = "same-file"
version = "1.0.6"
@ -1283,9 +1261,9 @@ version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"syn 1.0.34",
"proc-macro2",
"quote",
"syn",
]
[[package]]
@ -1313,14 +1291,15 @@ dependencies = [
[[package]]
name = "sha2"
version = "0.7.1"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0"
checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800"
dependencies = [
"block-buffer",
"byte-tools",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
"fake-simd",
"opaque-debug",
]
[[package]]
@ -1347,6 +1326,12 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "subtle"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "supercow"
version = "0.1.0"
@ -1355,36 +1340,13 @@ checksum = "171758edb47aa306a78dfa4ab9aeb5167405bd4e3dc2b64e88f6a84bbe98bd63"
[[package]]
name = "syn"
version = "0.15.44"
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"unicode-xid 0.1.0",
]
[[package]]
name = "syn"
version = "1.0.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "936cae2873c940d92e697597c5eee105fb570cd5689c695806f672883653349b"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"unicode-xid 0.2.1",
]
[[package]]
name = "synstructure"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"syn 0.15.44",
"unicode-xid 0.1.0",
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
@ -1393,10 +1355,10 @@ version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"syn 1.0.34",
"unicode-xid 0.2.1",
"proc-macro2",
"quote",
"syn",
"unicode-xid",
]
[[package]]
@ -1405,7 +1367,7 @@ version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
dependencies = [
"cfg-if",
"cfg-if 0.1.10",
"libc",
"rand 0.7.3",
"redox_syscall",
@ -1431,6 +1393,26 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread-id"
version = "3.3.0"
@ -1482,18 +1464,18 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
[[package]]
name = "unicode-ident"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
[[package]]
name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
[[package]]
name = "unicode-xid"
version = "0.2.1"
@ -1517,9 +1499,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.1.5"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "walkdir"
@ -1587,34 +1569,13 @@ dependencies = [
"linked-hash-map",
]
[[package]]
name = "zeroize"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45af6a010d13e4cf5b54c94ba5a2b2eba5596b9e46bf5875612d332a1f2b3f86"
dependencies = [
"zeroize_derive 0.9.3",
]
[[package]]
name = "zeroize"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cbac2ed2ba24cc90f5e06485ac8c7c1e5449fe8911aef4d8877218af021a5b8"
dependencies = [
"zeroize_derive 1.0.0",
]
[[package]]
name = "zeroize_derive"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "080616bd0e31f36095288bb0acdf1f78ef02c2fa15527d7e993f2a6c7591643e"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"syn 0.15.44",
"synstructure 0.10.2",
"zeroize_derive",
]
[[package]]
@ -1623,18 +1584,19 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de251eec69fc7c1bc3923403d18ececb929380e016afe103da75f396704f8ca2"
dependencies = [
"proc-macro2 1.0.18",
"quote 1.0.7",
"syn 1.0.34",
"synstructure 0.12.4",
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zip"
version = "0.5.6"
version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58287c28d78507f5f91f2a4cf1e8310e2c76fd4c6932f93ac60fd1ceb402db7d"
checksum = "93ab48844d61251bb3835145c521d88aa4031d7139e8485990f60ca911fa0815"
dependencies = [
"byteorder",
"crc32fast",
"podio",
"thiserror",
]

View file

@ -5,12 +5,9 @@ authors = ["Automatically generated"]
publish = false
edition = "2018"
[package.metadata]
cargo-fuzz = true
[dependencies]
chrono = "0.4.11"
libfuzzer-sys = "0.3"
libfuzzer-sys = "0.4.0"
grin_chain = { path = "../../chain" }
grin_core = { path = "../../core" }
grin_keychain = { path = "../../keychain" }

View file

@ -91,8 +91,8 @@ impl BlockChain for ChainAdapter {
fn validate_tx(&self, tx: &Transaction) -> Result<(), pool::PoolError> {
self.chain.validate_tx(tx).map_err(|e| match e.kind() {
chain::ErrorKind::Transaction(txe) => txe.into(),
chain::ErrorKind::NRDRelativeHeight => PoolError::NRDKernelRelativeHeight,
chain::Error::Transaction(txe) => txe.into(),
chain::Error::NRDRelativeHeight => PoolError::NRDKernelRelativeHeight,
_ => PoolError::Other("failed to validate tx".into()),
})
}

View file

@ -23,7 +23,6 @@ use self::core::core::transaction::{self, Transaction};
use self::core::core::{BlockHeader, BlockSums, Inputs, OutputIdentifier};
use self::core::global::DEFAULT_ACCEPT_FEE_BASE;
use chrono::prelude::*;
use failure::Fail;
use grin_core as core;
use grin_keychain as keychain;
@ -205,53 +204,53 @@ impl TxSource {
}
/// Possible errors when interacting with the transaction pool.
#[derive(Debug, Fail, PartialEq)]
#[derive(Debug, thiserror::Error, PartialEq)]
pub enum PoolError {
/// An invalid pool entry caused by underlying tx validation error
#[fail(display = "Invalid Tx {}", _0)]
#[error("Invalid Tx {0}")]
InvalidTx(transaction::Error),
/// An invalid pool entry caused by underlying block validation error
#[fail(display = "Invalid Block {}", _0)]
#[error("Invalid Block {0}")]
InvalidBlock(block::Error),
/// Underlying keychain error.
#[fail(display = "Keychain error {}", _0)]
#[error("Keychain error {0}")]
Keychain(keychain::Error),
/// Underlying "committed" error.
#[fail(display = "Committed error {}", _0)]
#[error("Committed error {0}")]
Committed(committed::Error),
/// Attempt to add a transaction to the pool with lock_height
/// greater than height of current block
#[fail(display = "Immature transaction")]
#[error("Immature transaction")]
ImmatureTransaction,
/// Attempt to spend a coinbase output before it has sufficiently matured.
#[fail(display = "Immature coinbase")]
#[error("Immature coinbase")]
ImmatureCoinbase,
/// Problem propagating a stem tx to the next Dandelion relay node.
#[fail(display = "Dandelion error")]
#[error("Dandelion error")]
DandelionError,
/// Transaction pool is over capacity, can't accept more transactions
#[fail(display = "Over capacity")]
#[error("Over capacity")]
OverCapacity,
/// Transaction fee is too low given its weight
#[fail(display = "Low fee transaction {}", _0)]
#[error("Low fee transaction {0}")]
LowFeeTransaction(u64),
/// Attempt to add a duplicate output to the pool.
#[fail(display = "Duplicate commitment")]
#[error("Duplicate commitment")]
DuplicateCommitment,
/// Attempt to add a duplicate tx to the pool.
#[fail(display = "Duplicate tx")]
#[error("Duplicate tx")]
DuplicateTx,
/// NRD kernels will not be accepted by the txpool/stempool pre-HF3.
#[fail(display = "NRD kernel pre-HF3")]
#[error("NRD kernel pre-HF3")]
NRDKernelPreHF3,
/// NRD kernels are not valid if disabled locally via "feature flag".
#[fail(display = "NRD kernel not enabled")]
#[error("NRD kernel not enabled")]
NRDKernelNotEnabled,
/// NRD kernels are not valid if relative_height rule not met.
#[fail(display = "NRD kernel relative height")]
#[error("NRD kernel relative height")]
NRDKernelRelativeHeight,
/// Other kinds of error (not yet pulled out into meaningful errors).
#[fail(display = "General pool error {}", _0)]
#[error("General pool error {0}")]
Other(String),
}

View file

@ -125,9 +125,9 @@ impl BlockChain for ChainAdapter {
}
fn validate_tx(&self, tx: &Transaction) -> Result<(), pool::PoolError> {
self.chain.validate_tx(tx).map_err(|e| match e.kind() {
chain::ErrorKind::Transaction(txe) => txe.into(),
chain::ErrorKind::NRDRelativeHeight => PoolError::NRDKernelRelativeHeight,
self.chain.validate_tx(tx).map_err(|e| match e {
chain::Error::Transaction { source: txe } => txe.into(),
chain::Error::NRDRelativeHeight => PoolError::NRDKernelRelativeHeight,
_ => PoolError::Other("failed to validate tx".into()),
})
}

View file

@ -205,7 +205,7 @@ where
.chain()
.process_block_header(&cb.header, chain::Options::NONE)
{
debug!("Invalid compact block header {}: {:?}", cb_hash, e.kind());
debug!("Invalid compact block header {}: {:?}", cb_hash, e);
return Ok(!e.is_bad_data());
}
@ -286,11 +286,7 @@ where
let res = self.chain().process_block_header(&bh, chain::Options::NONE);
if let Err(e) = res {
debug!(
"Block header {} refused by chain: {:?}",
bh.hash(),
e.kind()
);
debug!("Block header {} refused by chain: {:?}", bh.hash(), e);
if e.is_bad_data() {
return Ok(false);
} else {
@ -480,9 +476,9 @@ where
if is_bad_data {
self.chain().clean_txhashset_sandbox();
error!("Failed to save txhashset archive: bad data");
self.sync_state.set_sync_error(
chain::ErrorKind::TxHashSetErr("bad txhashset data".to_string()).into(),
);
self.sync_state.set_sync_error(chain::Error::TxHashSetErr(
"bad txhashset data".to_string(),
));
} else {
info!("Received valid txhashset data for {}.", h);
}
@ -511,11 +507,11 @@ where
id: SegmentIdentifier,
) -> Result<Segment<TxKernel>, chain::Error> {
if !KERNEL_SEGMENT_HEIGHT_RANGE.contains(&id.height) {
return Err(chain::ErrorKind::InvalidSegmentHeight.into());
return Err(chain::Error::InvalidSegmentHeight);
}
let segmenter = self.chain().segmenter()?;
if segmenter.header().hash() != hash {
return Err(chain::ErrorKind::SegmenterHeaderMismatch.into());
return Err(chain::Error::SegmenterHeaderMismatch);
}
segmenter.kernel_segment(id)
}
@ -526,11 +522,11 @@ where
id: SegmentIdentifier,
) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> {
if !BITMAP_SEGMENT_HEIGHT_RANGE.contains(&id.height) {
return Err(chain::ErrorKind::InvalidSegmentHeight.into());
return Err(chain::Error::InvalidSegmentHeight);
}
let segmenter = self.chain().segmenter()?;
if segmenter.header().hash() != hash {
return Err(chain::ErrorKind::SegmenterHeaderMismatch.into());
return Err(chain::Error::SegmenterHeaderMismatch);
}
segmenter.bitmap_segment(id)
}
@ -541,11 +537,11 @@ where
id: SegmentIdentifier,
) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> {
if !OUTPUT_SEGMENT_HEIGHT_RANGE.contains(&id.height) {
return Err(chain::ErrorKind::InvalidSegmentHeight.into());
return Err(chain::Error::InvalidSegmentHeight);
}
let segmenter = self.chain().segmenter()?;
if segmenter.header().hash() != hash {
return Err(chain::ErrorKind::SegmenterHeaderMismatch.into());
return Err(chain::Error::SegmenterHeaderMismatch);
}
segmenter.output_segment(id)
}
@ -556,11 +552,11 @@ where
id: SegmentIdentifier,
) -> Result<Segment<RangeProof>, chain::Error> {
if !RANGEPROOF_SEGMENT_HEIGHT_RANGE.contains(&id.height) {
return Err(chain::ErrorKind::InvalidSegmentHeight.into());
return Err(chain::Error::InvalidSegmentHeight);
}
let segmenter = self.chain().segmenter()?;
if segmenter.header().hash() != hash {
return Err(chain::ErrorKind::SegmenterHeaderMismatch.into());
return Err(chain::Error::SegmenterHeaderMismatch);
}
segmenter.rangeproof_segment(id)
}
@ -790,8 +786,8 @@ where
Ok(false)
}
Err(e) => {
match e.kind() {
chain::ErrorKind::Orphan => {
match e {
chain::Error::Orphan => {
if let Ok(previous) = previous {
// make sure we did not miss the parent block
if !self.chain().is_orphan(&previous.hash())
@ -804,11 +800,7 @@ where
Ok(true)
}
_ => {
debug!(
"process_block: block {} refused by chain: {}",
bhash,
e.kind()
);
debug!("process_block: block {} refused by chain: {}", bhash, e);
Ok(true)
}
}

View file

@ -225,10 +225,11 @@ impl StateSync {
if let SyncStatus::TxHashsetDownload { .. } = self.sync_state.status() {
if download_timeout {
error!("state_sync: TxHashsetDownload status timeout in 10 minutes!");
self.sync_state.set_sync_error(
chain::ErrorKind::SyncError(format!("{:?}", p2p::Error::Timeout))
.into(),
);
self.sync_state
.set_sync_error(chain::Error::SyncError(format!(
"{:?}",
p2p::Error::Timeout
)));
}
}
@ -240,7 +241,7 @@ impl StateSync {
}
Err(e) => self
.sync_state
.set_sync_error(chain::ErrorKind::SyncError(format!("{:?}", e)).into()),
.set_sync_error(chain::Error::SyncError(format!("{:?}", e))),
}
self.sync_state
@ -330,7 +331,7 @@ impl StateSync {
self.sync_state
.update_pibd_progress(true, true, 0, 1, &archive_header);
self.sync_state
.set_sync_error(chain::ErrorKind::AbortingPIBDError.into());
.set_sync_error(chain::Error::AbortingPIBDError);
self.set_pibd_aborted();
return false;
}

View file

@ -79,8 +79,8 @@ pub fn get_block(
while let Err(e) = result {
let mut new_key_id = key_id.to_owned();
match e {
self::Error::Chain(c) => match c.kind() {
chain::ErrorKind::DuplicateCommitment(_) => {
self::Error::Chain(c) => match c {
chain::Error::DuplicateCommitment(_) => {
debug!(
"Duplicate commit for potential coinbase detected. Trying next derivation."
);
@ -182,20 +182,18 @@ fn build_block(
match chain.set_txhashset_roots(&mut b) {
Ok(_) => Ok((b, block_fees)),
Err(e) => {
match e.kind() {
match e {
// If this is a duplicate commitment then likely trying to use
// a key that hass already been derived but not in the wallet
// for some reason, allow caller to retry.
chain::ErrorKind::DuplicateCommitment(e) => Err(Error::Chain(
chain::ErrorKind::DuplicateCommitment(e).into(),
)),
chain::Error::DuplicateCommitment(e) => {
Err(Error::Chain(chain::Error::DuplicateCommitment(e)))
}
// Some other issue, possibly duplicate kernel
_ => {
error!("Error setting txhashset root to build a block: {:?}", e);
Err(Error::Chain(
chain::ErrorKind::Other(format!("{:?}", e)).into(),
))
Err(Error::Chain(chain::Error::Other(format!("{:?}", e))))
}
}
}

View file

@ -424,7 +424,7 @@ impl Handler {
if let Err(e) = res {
// Return error status
error!(
"(Server ID: {}) Failed to validate solution at height {}, hash {}, edge_bits {}, nonce {}, job_id {}, {}: {}",
"(Server ID: {}) Failed to validate solution at height {}, hash {}, edge_bits {}, nonce {}, job_id {}, {}",
self.id,
params.height,
b.hash(),
@ -432,7 +432,6 @@ impl Handler {
params.nonce,
params.job_id,
e,
e.backtrace().unwrap(),
);
self.workers
.update_stats(worker_id, |worker_stats| worker_stats.num_rejected += 1);

View file

@ -13,13 +13,12 @@ edition = "2018"
byteorder = "1"
croaring = "0.4.6"
libc = "0.2"
failure = "0.1"
failure_derive = "0.1"
lmdb-zero = "0.4.4"
memmap = "0.7"
tempfile = "3.1"
serde = "1"
serde_derive = "1"
thiserror = "1"
log = "0.4"
grin_core = { path = "../core", version = "5.2.0-alpha.1" }

View file

@ -23,8 +23,6 @@
#[macro_use]
extern crate log;
#[macro_use]
extern crate failure_derive;
#[macro_use]
extern crate grin_core as core;
extern crate grin_util as util;

View file

@ -35,22 +35,22 @@ const RESIZE_PERCENT: f32 = 0.9;
const RESIZE_MIN_TARGET_PERCENT: f32 = 0.65;
/// Main error type for this lmdb
#[derive(Clone, Eq, PartialEq, Debug, Fail)]
#[derive(Clone, Eq, PartialEq, Debug, thiserror::Error)]
pub enum Error {
/// Couldn't find what we were looking for
#[fail(display = "DB Not Found Error: {}", _0)]
#[error("DB Not Found Error: {0}")]
NotFoundErr(String),
/// Wraps an error originating from LMDB
#[fail(display = "LMDB error: {} ", _0)]
#[error("LMDB error: {0}")]
LmdbErr(lmdb::error::Error),
/// Wraps a serialization error for Writeable or Readable
#[fail(display = "Serialization Error: {}", _0)]
#[error("Serialization Error: {0}")]
SerErr(ser::Error),
/// File handling error
#[fail(display = "File handling Error: {}", _0)]
#[error("File handling Error: {0}")]
FileErr(String),
/// Other error
#[fail(display = "Other Error: {}", _0)]
#[error("Other Error: {0}")]
OtherErr(String),
}