2018-03-05 22:33:44 +03:00
|
|
|
// Copyright 2018 The Grin Developers
|
2017-10-25 20:57:48 +03:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2017-11-01 02:32:33 +03:00
|
|
|
use std::io::Read;
|
2018-03-04 03:19:54 +03:00
|
|
|
use std::sync::{Arc, RwLock, Weak};
|
2017-11-01 02:32:33 +03:00
|
|
|
use std::thread;
|
2017-10-25 20:57:48 +03:00
|
|
|
|
|
|
|
use iron::prelude::*;
|
|
|
|
use iron::Handler;
|
|
|
|
use iron::status;
|
|
|
|
use urlencoded::UrlEncodedQuery;
|
2017-11-01 02:32:33 +03:00
|
|
|
use serde::Serialize;
|
2017-10-25 20:57:48 +03:00
|
|
|
use serde_json;
|
|
|
|
|
|
|
|
use chain;
|
2018-03-04 03:19:54 +03:00
|
|
|
use core::core::{OutputFeatures, OutputIdentifier, Transaction};
|
2018-01-17 06:03:40 +03:00
|
|
|
use core::core::hash::{Hash, Hashed};
|
2017-11-01 02:32:33 +03:00
|
|
|
use core::ser;
|
|
|
|
use pool;
|
2017-11-02 19:49:33 +03:00
|
|
|
use p2p;
|
2017-12-05 21:55:32 +03:00
|
|
|
use regex::Regex;
|
2017-10-25 20:57:48 +03:00
|
|
|
use rest::*;
|
2017-11-01 02:20:55 +03:00
|
|
|
use util::secp::pedersen::Commitment;
|
2017-11-01 02:32:33 +03:00
|
|
|
use types::*;
|
2017-10-25 20:57:48 +03:00
|
|
|
use util;
|
|
|
|
use util::LOGGER;
|
|
|
|
|
2018-02-13 03:38:52 +03:00
|
|
|
// All handlers use `Weak` references instead of `Arc` to avoid cycles that
|
|
|
|
// can never be destroyed. These 2 functions are simple helpers to reduce the
|
|
|
|
// boilerplate of dealing with `Weak`.
|
|
|
|
fn w<T>(weak: &Weak<T>) -> Arc<T> {
|
|
|
|
weak.upgrade().unwrap()
|
|
|
|
}
|
|
|
|
|
2017-11-19 23:24:14 +03:00
|
|
|
// RESTful index of available api endpoints
|
|
|
|
// GET /v1/
|
|
|
|
struct IndexHandler {
|
|
|
|
list: Vec<String>,
|
|
|
|
}
|
|
|
|
|
2018-01-04 06:25:14 +03:00
|
|
|
impl IndexHandler {}
|
2017-11-19 23:24:14 +03:00
|
|
|
|
|
|
|
impl Handler for IndexHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
|
|
|
json_response_pretty(&self.list)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-01 02:32:33 +03:00
|
|
|
// Supports retrieval of multiple outputs in a single request -
|
2018-03-05 22:33:44 +03:00
|
|
|
// GET /v1/chain/outputs/byids?id=xxx,yyy,zzz
|
|
|
|
// GET /v1/chain/outputs/byids?id=xxx&id=yyy&id=zzz
|
|
|
|
// GET /v1/chain/outputs/byheight?start_height=101&end_height=200
|
|
|
|
struct OutputHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
chain: Weak<chain::Chain>,
|
2017-10-25 20:57:48 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
impl OutputHandler {
|
|
|
|
fn get_output(&self, id: &str) -> Result<Output, Error> {
|
2018-01-04 06:25:14 +03:00
|
|
|
let c = util::from_hex(String::from(id))
|
|
|
|
.map_err(|_| Error::Argument(format!("Not a valid commitment: {}", id)))?;
|
2017-10-25 20:57:48 +03:00
|
|
|
let commit = Commitment::from_vec(c);
|
|
|
|
|
2018-01-17 06:03:40 +03:00
|
|
|
// We need the features here to be able to generate the necessary hash
|
|
|
|
// to compare against the hash in the output MMR.
|
2018-03-04 03:19:54 +03:00
|
|
|
// For now we can just try both (but this probably needs to be part of the api
|
|
|
|
// params)
|
2018-01-17 06:03:40 +03:00
|
|
|
let outputs = [
|
2018-02-05 22:43:54 +03:00
|
|
|
OutputIdentifier::new(OutputFeatures::DEFAULT_OUTPUT, &commit),
|
2018-03-04 03:19:54 +03:00
|
|
|
OutputIdentifier::new(OutputFeatures::COINBASE_OUTPUT, &commit),
|
2018-01-17 06:03:40 +03:00
|
|
|
];
|
|
|
|
|
|
|
|
for x in outputs.iter() {
|
2018-02-13 03:38:52 +03:00
|
|
|
if let Ok(_) = w(&self.chain).is_unspent(&x) {
|
2018-03-05 22:33:44 +03:00
|
|
|
return Ok(Output::new(&commit));
|
2018-01-17 06:03:40 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(Error::NotFound)
|
2017-10-25 20:57:48 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
fn outputs_by_ids(&self, req: &mut Request) -> Vec<Output> {
|
2017-10-25 20:57:48 +03:00
|
|
|
let mut commitments: Vec<&str> = vec![];
|
|
|
|
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
|
|
|
|
if let Some(ids) = params.get("id") {
|
|
|
|
for id in ids {
|
|
|
|
for id in id.split(",") {
|
|
|
|
commitments.push(id.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-17 06:03:40 +03:00
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
debug!(LOGGER, "outputs_by_ids: {:?}", commitments);
|
2018-01-17 06:03:40 +03:00
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
let mut outputs: Vec<Output> = vec![];
|
2018-01-17 06:03:40 +03:00
|
|
|
for x in commitments {
|
2018-03-05 22:33:44 +03:00
|
|
|
if let Ok(output) = self.get_output(x) {
|
|
|
|
outputs.push(output);
|
2017-10-25 20:57:48 +03:00
|
|
|
}
|
|
|
|
}
|
2018-03-05 22:33:44 +03:00
|
|
|
outputs
|
2017-11-20 03:50:09 +03:00
|
|
|
}
|
|
|
|
|
2018-01-17 06:03:40 +03:00
|
|
|
fn outputs_at_height(
|
|
|
|
&self,
|
|
|
|
block_height: u64,
|
|
|
|
commitments: Vec<Commitment>,
|
|
|
|
include_proof: bool,
|
|
|
|
) -> BlockOutputs {
|
2018-03-04 03:19:54 +03:00
|
|
|
let header = w(&self.chain).get_header_by_height(block_height).unwrap();
|
2018-03-13 21:22:34 +03:00
|
|
|
|
|
|
|
// TODO - possible to compact away blocks we care about
|
|
|
|
// in the period between accepting the block and refreshing the wallet
|
|
|
|
if let Ok(block) = w(&self.chain).get_block(&header.hash()) {
|
|
|
|
let outputs = block
|
|
|
|
.outputs
|
|
|
|
.iter()
|
|
|
|
.filter(|output| commitments.is_empty() || commitments.contains(&output.commit))
|
|
|
|
.map(|output| {
|
|
|
|
OutputPrintable::from_output(output, w(&self.chain), &header, include_proof)
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
BlockOutputs {
|
|
|
|
header: BlockHeaderInfo::from_header(&header),
|
|
|
|
outputs: outputs,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!(
|
|
|
|
LOGGER,
|
|
|
|
"could not find block {:?} at height {}, maybe compacted?",
|
|
|
|
&header.hash(),
|
|
|
|
block_height,
|
|
|
|
);
|
|
|
|
|
|
|
|
BlockOutputs {
|
|
|
|
header: BlockHeaderInfo::from_header(&header),
|
|
|
|
outputs: vec![],
|
|
|
|
}
|
2017-11-20 03:50:09 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-17 06:03:40 +03:00
|
|
|
// returns outputs for a specified range of blocks
|
|
|
|
fn outputs_block_batch(&self, req: &mut Request) -> Vec<BlockOutputs> {
|
|
|
|
let mut commitments: Vec<Commitment> = vec![];
|
2017-11-20 03:50:09 +03:00
|
|
|
let mut start_height = 1;
|
|
|
|
let mut end_height = 1;
|
2018-01-17 06:03:40 +03:00
|
|
|
let mut include_rp = false;
|
|
|
|
|
2017-11-20 03:50:09 +03:00
|
|
|
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
|
2018-01-17 06:03:40 +03:00
|
|
|
if let Some(ids) = params.get("id") {
|
|
|
|
for id in ids {
|
|
|
|
for id in id.split(",") {
|
|
|
|
if let Ok(x) = util::from_hex(String::from(id)) {
|
|
|
|
commitments.push(Commitment::from_vec(x));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-20 03:50:09 +03:00
|
|
|
if let Some(heights) = params.get("start_height") {
|
|
|
|
for height in heights {
|
|
|
|
start_height = height.parse().unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(heights) = params.get("end_height") {
|
|
|
|
for height in heights {
|
|
|
|
end_height = height.parse().unwrap();
|
|
|
|
}
|
|
|
|
}
|
2018-01-17 06:03:40 +03:00
|
|
|
if let Some(_) = params.get("include_rp") {
|
|
|
|
include_rp = true;
|
|
|
|
}
|
2017-11-20 03:50:09 +03:00
|
|
|
}
|
2018-01-17 06:03:40 +03:00
|
|
|
|
|
|
|
debug!(
|
|
|
|
LOGGER,
|
|
|
|
"outputs_block_batch: {}-{}, {:?}, {:?}",
|
|
|
|
start_height,
|
|
|
|
end_height,
|
|
|
|
commitments,
|
|
|
|
include_rp,
|
|
|
|
);
|
|
|
|
|
2017-11-20 03:50:09 +03:00
|
|
|
let mut return_vec = vec![];
|
|
|
|
for i in start_height..end_height + 1 {
|
2018-01-17 06:03:40 +03:00
|
|
|
let res = self.outputs_at_height(i, commitments.clone(), include_rp);
|
|
|
|
if res.outputs.len() > 0 {
|
|
|
|
return_vec.push(res);
|
|
|
|
}
|
2017-11-20 03:50:09 +03:00
|
|
|
}
|
2018-01-17 06:03:40 +03:00
|
|
|
|
2017-11-20 03:50:09 +03:00
|
|
|
return_vec
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
impl Handler for OutputHandler {
|
2017-11-20 03:50:09 +03:00
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
|
|
let url = req.url.clone();
|
|
|
|
let mut path_elems = url.path();
|
|
|
|
if *path_elems.last().unwrap() == "" {
|
|
|
|
path_elems.pop();
|
|
|
|
}
|
|
|
|
match *path_elems.last().unwrap() {
|
2018-03-05 22:33:44 +03:00
|
|
|
"byids" => json_response(&self.outputs_by_ids(req)),
|
2018-01-17 06:03:40 +03:00
|
|
|
"byheight" => json_response(&self.outputs_block_batch(req)),
|
2017-11-20 03:50:09 +03:00
|
|
|
_ => Ok(Response::with((status::BadRequest, ""))),
|
|
|
|
}
|
2017-10-25 20:57:48 +03:00
|
|
|
}
|
|
|
|
}
|
2017-10-28 00:57:04 +03:00
|
|
|
|
2017-11-01 02:32:33 +03:00
|
|
|
// Sum tree handler. Retrieve the roots:
|
2018-03-05 22:33:44 +03:00
|
|
|
// GET /v1/txhashset/roots
|
2017-11-01 02:32:33 +03:00
|
|
|
//
|
|
|
|
// Last inserted nodes::
|
2018-03-05 22:33:44 +03:00
|
|
|
// GET /v1/txhashset/lastoutputs (gets last 10)
|
|
|
|
// GET /v1/txhashset/lastoutputs?n=5
|
|
|
|
// GET /v1/txhashset/lastrangeproofs
|
|
|
|
// GET /v1/txhashset/lastkernels
|
|
|
|
struct TxHashSetHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
chain: Weak<chain::Chain>,
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
impl TxHashSetHandler {
|
2017-11-01 02:32:33 +03:00
|
|
|
// gets roots
|
2018-03-05 22:33:44 +03:00
|
|
|
fn get_roots(&self) -> TxHashSet {
|
|
|
|
TxHashSet::from_head(w(&self.chain))
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
// gets last n outputs inserted in to the tree
|
|
|
|
fn get_last_n_output(&self, distance: u64) -> Vec<TxHashSetNode> {
|
|
|
|
TxHashSetNode::get_last_n_output(w(&self.chain), distance)
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
// gets last n outputs inserted in to the tree
|
|
|
|
fn get_last_n_rangeproof(&self, distance: u64) -> Vec<TxHashSetNode> {
|
|
|
|
TxHashSetNode::get_last_n_rangeproof(w(&self.chain), distance)
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
// gets last n outputs inserted in to the tree
|
|
|
|
fn get_last_n_kernel(&self, distance: u64) -> Vec<TxHashSetNode> {
|
|
|
|
TxHashSetNode::get_last_n_kernel(w(&self.chain), distance)
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
impl Handler for TxHashSetHandler {
|
2017-10-28 00:57:04 +03:00
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
|
|
let url = req.url.clone();
|
|
|
|
let mut path_elems = url.path();
|
|
|
|
if *path_elems.last().unwrap() == "" {
|
|
|
|
path_elems.pop();
|
|
|
|
}
|
2017-11-01 02:32:33 +03:00
|
|
|
// TODO: probably need to set a reasonable max limit here
|
|
|
|
let mut last_n = 10;
|
2017-10-28 00:57:04 +03:00
|
|
|
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
|
|
|
|
if let Some(nums) = params.get("n") {
|
|
|
|
for num in nums {
|
|
|
|
if let Ok(n) = str::parse(num) {
|
2017-11-01 02:32:33 +03:00
|
|
|
last_n = n;
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-01 02:32:33 +03:00
|
|
|
match *path_elems.last().unwrap() {
|
2017-11-07 19:48:37 +03:00
|
|
|
"roots" => json_response_pretty(&self.get_roots()),
|
2018-03-05 22:33:44 +03:00
|
|
|
"lastoutputs" => json_response_pretty(&self.get_last_n_output(last_n)),
|
2017-11-07 19:48:37 +03:00
|
|
|
"lastrangeproofs" => json_response_pretty(&self.get_last_n_rangeproof(last_n)),
|
|
|
|
"lastkernels" => json_response_pretty(&self.get_last_n_kernel(last_n)),
|
2017-11-01 02:32:33 +03:00
|
|
|
_ => Ok(Response::with((status::BadRequest, ""))),
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-02 19:49:33 +03:00
|
|
|
pub struct PeersAllHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub peers: Weak<p2p::Peers>,
|
2017-11-02 19:49:33 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for PeersAllHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
2018-02-13 03:38:52 +03:00
|
|
|
let peers = &w(&self.peers).all_peers();
|
2017-11-07 19:48:37 +03:00
|
|
|
json_response_pretty(&peers)
|
2017-11-02 19:49:33 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct PeersConnectedHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub peers: Weak<p2p::Peers>,
|
2017-11-02 19:49:33 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for PeersConnectedHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
|
|
|
let mut peers = vec![];
|
2018-02-13 03:38:52 +03:00
|
|
|
for p in &w(&self.peers).connected_peers() {
|
2017-11-21 17:24:29 +03:00
|
|
|
let p = p.read().unwrap();
|
2017-11-02 19:49:33 +03:00
|
|
|
let peer_info = p.info.clone();
|
|
|
|
peers.push(peer_info);
|
|
|
|
}
|
|
|
|
json_response(&peers)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-03 04:03:44 +03:00
|
|
|
/// Peer operations
|
2017-12-14 00:52:21 +03:00
|
|
|
/// POST /v1/peers/10.12.12.13/ban
|
2018-01-04 06:25:14 +03:00
|
|
|
/// POST /v1/peers/10.12.12.13/unban
|
2018-01-03 04:03:44 +03:00
|
|
|
pub struct PeerPostHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub peers: Weak<p2p::Peers>,
|
2017-12-14 00:52:21 +03:00
|
|
|
}
|
|
|
|
|
2018-01-03 04:03:44 +03:00
|
|
|
impl Handler for PeerPostHandler {
|
2017-12-14 00:52:21 +03:00
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
|
|
let url = req.url.clone();
|
|
|
|
let mut path_elems = url.path();
|
|
|
|
if *path_elems.last().unwrap() == "" {
|
|
|
|
path_elems.pop();
|
|
|
|
}
|
|
|
|
match *path_elems.last().unwrap() {
|
2018-01-04 06:25:14 +03:00
|
|
|
"ban" => {
|
|
|
|
path_elems.pop();
|
|
|
|
if let Ok(addr) = path_elems.last().unwrap().parse() {
|
2018-02-13 03:38:52 +03:00
|
|
|
w(&self.peers).ban_peer(&addr);
|
2018-01-04 06:25:14 +03:00
|
|
|
Ok(Response::with((status::Ok, "")))
|
|
|
|
} else {
|
|
|
|
Ok(Response::with((status::BadRequest, "")))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"unban" => {
|
|
|
|
path_elems.pop();
|
|
|
|
if let Ok(addr) = path_elems.last().unwrap().parse() {
|
2018-02-13 03:38:52 +03:00
|
|
|
w(&self.peers).unban_peer(&addr);
|
2018-01-04 06:25:14 +03:00
|
|
|
Ok(Response::with((status::Ok, "")))
|
|
|
|
} else {
|
|
|
|
Ok(Response::with((status::BadRequest, "")))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => Ok(Response::with((status::BadRequest, ""))),
|
|
|
|
}
|
2017-12-14 00:52:21 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-03 04:03:44 +03:00
|
|
|
/// Get details about a given peer
|
|
|
|
pub struct PeerGetHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub peers: Weak<p2p::Peers>,
|
2018-01-03 04:03:44 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for PeerGetHandler {
|
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
|
|
let url = req.url.clone();
|
|
|
|
let mut path_elems = url.path();
|
|
|
|
if *path_elems.last().unwrap() == "" {
|
|
|
|
path_elems.pop();
|
|
|
|
}
|
|
|
|
if let Ok(addr) = path_elems.last().unwrap().parse() {
|
2018-02-13 03:38:52 +03:00
|
|
|
match w(&self.peers).get_peer(addr) {
|
2018-01-03 04:03:44 +03:00
|
|
|
Ok(peer) => json_response(&peer),
|
|
|
|
Err(_) => Ok(Response::with((status::BadRequest, ""))),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Ok(Response::with((status::BadRequest, "")))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-06 20:58:33 +03:00
|
|
|
/// Status handler. Post a summary of the server status
|
|
|
|
/// GET /v1/status
|
2018-01-11 08:25:48 +03:00
|
|
|
pub struct StatusHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub chain: Weak<chain::Chain>,
|
|
|
|
pub peers: Weak<p2p::Peers>,
|
2018-01-11 08:25:48 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl StatusHandler {
|
|
|
|
fn get_status(&self) -> Status {
|
2018-02-13 03:38:52 +03:00
|
|
|
Status::from_tip_and_peers(w(&self.chain).head().unwrap(), w(&self.peers).peer_count())
|
2018-01-11 08:25:48 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for StatusHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
|
|
|
json_response(&self.get_status())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-06 20:58:33 +03:00
|
|
|
/// Chain handler. Get the head details.
|
|
|
|
/// GET /v1/chain
|
2017-10-28 00:57:04 +03:00
|
|
|
pub struct ChainHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub chain: Weak<chain::Chain>,
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl ChainHandler {
|
|
|
|
fn get_tip(&self) -> Tip {
|
2018-02-13 03:38:52 +03:00
|
|
|
Tip::from_tip(w(&self.chain).head().unwrap())
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for ChainHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
2017-11-01 02:32:33 +03:00
|
|
|
json_response(&self.get_tip())
|
2017-10-28 00:57:04 +03:00
|
|
|
}
|
|
|
|
}
|
2017-11-01 02:32:33 +03:00
|
|
|
|
2018-03-06 20:58:33 +03:00
|
|
|
/// Chain compaction handler. Trigger a compaction of the chain state to regain
|
|
|
|
/// storage space.
|
|
|
|
/// GET /v1/chain/compact
|
|
|
|
pub struct ChainCompactHandler {
|
|
|
|
pub chain: Weak<chain::Chain>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for ChainCompactHandler {
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
|
|
|
w(&self.chain).compact().unwrap();
|
|
|
|
Ok(Response::with((status::Ok, "{}")))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-20 01:43:02 +03:00
|
|
|
/// Gets block details given either a hash or height.
|
|
|
|
/// GET /v1/blocks/<hash>
|
|
|
|
/// GET /v1/blocks/<height>
|
|
|
|
///
|
|
|
|
/// Optionally return results as "compact blocks" by passing "?compact" query param
|
|
|
|
/// GET /v1/blocks/<hash>?compact
|
|
|
|
///
|
2017-12-05 21:55:32 +03:00
|
|
|
pub struct BlockHandler {
|
2018-02-13 03:38:52 +03:00
|
|
|
pub chain: Weak<chain::Chain>,
|
2017-12-05 21:55:32 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl BlockHandler {
|
|
|
|
fn get_block(&self, h: &Hash) -> Result<BlockPrintable, Error> {
|
2018-02-13 03:38:52 +03:00
|
|
|
let block = w(&self.chain).get_block(h).map_err(|_| Error::NotFound)?;
|
2018-03-04 03:19:54 +03:00
|
|
|
Ok(BlockPrintable::from_block(&block, w(&self.chain), false))
|
2017-12-05 21:55:32 +03:00
|
|
|
}
|
|
|
|
|
2018-01-20 01:43:02 +03:00
|
|
|
fn get_compact_block(&self, h: &Hash) -> Result<CompactBlockPrintable, Error> {
|
2018-02-13 03:38:52 +03:00
|
|
|
let block = w(&self.chain).get_block(h).map_err(|_| Error::NotFound)?;
|
2018-01-20 06:29:48 +03:00
|
|
|
Ok(CompactBlockPrintable::from_compact_block(
|
|
|
|
&block.as_compact_block(),
|
2018-02-13 03:38:52 +03:00
|
|
|
w(&self.chain),
|
2018-01-20 06:29:48 +03:00
|
|
|
))
|
2018-01-20 01:43:02 +03:00
|
|
|
}
|
|
|
|
|
2017-12-07 12:27:55 +03:00
|
|
|
// Try to decode the string as a height or a hash.
|
2017-12-05 21:55:32 +03:00
|
|
|
fn parse_input(&self, input: String) -> Result<Hash, Error> {
|
|
|
|
if let Ok(height) = input.parse() {
|
2018-02-13 03:38:52 +03:00
|
|
|
match w(&self.chain).get_header_by_height(height) {
|
2017-12-05 21:55:32 +03:00
|
|
|
Ok(header) => return Ok(header.hash()),
|
|
|
|
Err(_) => return Err(Error::NotFound),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
lazy_static! {
|
|
|
|
static ref RE: Regex = Regex::new(r"[0-9a-fA-F]{64}").unwrap();
|
|
|
|
}
|
|
|
|
if !RE.is_match(&input) {
|
2018-01-04 06:25:14 +03:00
|
|
|
return Err(Error::Argument(String::from("Not a valid hash or height.")));
|
2017-12-05 21:55:32 +03:00
|
|
|
}
|
|
|
|
let vec = util::from_hex(input).unwrap();
|
|
|
|
Ok(Hash::from_vec(vec))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Handler for BlockHandler {
|
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
|
|
|
let url = req.url.clone();
|
|
|
|
let mut path_elems = url.path();
|
|
|
|
if *path_elems.last().unwrap() == "" {
|
|
|
|
path_elems.pop();
|
|
|
|
}
|
|
|
|
let el = *path_elems.last().unwrap();
|
|
|
|
let h = try!(self.parse_input(el.to_string()));
|
2018-01-20 01:43:02 +03:00
|
|
|
|
|
|
|
let mut compact = false;
|
|
|
|
if let Ok(params) = req.get_ref::<UrlEncodedQuery>() {
|
|
|
|
if let Some(_) = params.get("compact") {
|
|
|
|
compact = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if compact {
|
|
|
|
let b = try!(self.get_compact_block(&h));
|
|
|
|
json_response(&b)
|
|
|
|
} else {
|
|
|
|
let b = try!(self.get_block(&h));
|
|
|
|
json_response(&b)
|
|
|
|
}
|
2017-12-05 21:55:32 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-01 02:32:33 +03:00
|
|
|
// Get basic information about the transaction pool.
|
|
|
|
struct PoolInfoHandler<T> {
|
2018-02-13 03:38:52 +03:00
|
|
|
tx_pool: Weak<RwLock<pool::TransactionPool<T>>>,
|
2017-11-01 02:32:33 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Handler for PoolInfoHandler<T>
|
|
|
|
where
|
|
|
|
T: pool::BlockChain + Send + Sync + 'static,
|
|
|
|
{
|
|
|
|
fn handle(&self, _req: &mut Request) -> IronResult<Response> {
|
2018-02-13 03:38:52 +03:00
|
|
|
let pool_arc = w(&self.tx_pool);
|
|
|
|
let pool = pool_arc.read().unwrap();
|
2017-11-01 02:32:33 +03:00
|
|
|
json_response(&PoolInfo {
|
|
|
|
pool_size: pool.pool_size(),
|
|
|
|
orphans_size: pool.orphans_size(),
|
|
|
|
total_size: pool.total_size(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Dummy wrapper for the hex-encoded serialized transaction.
|
|
|
|
#[derive(Serialize, Deserialize)]
|
|
|
|
struct TxWrapper {
|
|
|
|
tx_hex: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
// Push new transactions to our transaction pool, that should broadcast it
|
|
|
|
// to the network if valid.
|
|
|
|
struct PoolPushHandler<T> {
|
2018-02-13 03:38:52 +03:00
|
|
|
tx_pool: Weak<RwLock<pool::TransactionPool<T>>>,
|
2017-11-01 02:32:33 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Handler for PoolPushHandler<T>
|
|
|
|
where
|
|
|
|
T: pool::BlockChain + Send + Sync + 'static,
|
|
|
|
{
|
|
|
|
fn handle(&self, req: &mut Request) -> IronResult<Response> {
|
2018-01-04 06:25:14 +03:00
|
|
|
let wrapper: TxWrapper = serde_json::from_reader(req.body.by_ref())
|
|
|
|
.map_err(|e| IronError::new(e, status::BadRequest))?;
|
2017-11-01 02:32:33 +03:00
|
|
|
|
2018-01-04 06:25:14 +03:00
|
|
|
let tx_bin = util::from_hex(wrapper.tx_hex)
|
|
|
|
.map_err(|_| Error::Argument(format!("Invalid hex in transaction wrapper.")))?;
|
2017-11-01 02:32:33 +03:00
|
|
|
|
|
|
|
let tx: Transaction = ser::deserialize(&mut &tx_bin[..]).map_err(|_| {
|
2018-01-04 06:25:14 +03:00
|
|
|
Error::Argument("Could not deserialize transaction, invalid format.".to_string())
|
2017-11-01 02:32:33 +03:00
|
|
|
})?;
|
|
|
|
|
|
|
|
let source = pool::TxSource {
|
|
|
|
debug_name: "push-api".to_string(),
|
|
|
|
identifier: "?.?.?.?".to_string(),
|
|
|
|
};
|
|
|
|
info!(
|
|
|
|
LOGGER,
|
|
|
|
"Pushing transaction with {} inputs and {} outputs to pool.",
|
|
|
|
tx.inputs.len(),
|
|
|
|
tx.outputs.len()
|
|
|
|
);
|
2018-01-11 08:25:48 +03:00
|
|
|
|
2018-02-13 03:38:52 +03:00
|
|
|
let pool_arc = w(&self.tx_pool);
|
|
|
|
let res = pool_arc.write().unwrap().add_to_memory_pool(source, tx);
|
2017-11-01 02:32:33 +03:00
|
|
|
|
2018-01-07 02:27:21 +03:00
|
|
|
match res {
|
|
|
|
Ok(()) => Ok(Response::with(status::Ok)),
|
2018-01-17 06:03:40 +03:00
|
|
|
Err(e) => {
|
|
|
|
debug!(LOGGER, "error - {:?}", e);
|
|
|
|
Err(IronError::from(Error::Argument(format!("{:?}", e))))
|
|
|
|
}
|
2018-01-07 02:27:21 +03:00
|
|
|
}
|
2017-11-01 02:32:33 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Utility to serialize a struct into JSON and produce a sensible IronResult
|
|
|
|
// out of it.
|
|
|
|
fn json_response<T>(s: &T) -> IronResult<Response>
|
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
2017-11-07 19:48:37 +03:00
|
|
|
match serde_json::to_string(s) {
|
2017-11-01 02:32:33 +03:00
|
|
|
Ok(json) => Ok(Response::with((status::Ok, json))),
|
|
|
|
Err(_) => Ok(Response::with((status::InternalServerError, ""))),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-07 19:48:37 +03:00
|
|
|
// pretty-printed version of above
|
|
|
|
fn json_response_pretty<T>(s: &T) -> IronResult<Response>
|
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
|
|
|
match serde_json::to_string_pretty(s) {
|
|
|
|
Ok(json) => Ok(Response::with((status::Ok, json))),
|
|
|
|
Err(_) => Ok(Response::with((status::InternalServerError, ""))),
|
|
|
|
}
|
|
|
|
}
|
2017-11-01 02:32:33 +03:00
|
|
|
/// Start all server HTTP handlers. Register all of them with Iron
|
|
|
|
/// and runs the corresponding HTTP server.
|
2018-02-13 03:38:52 +03:00
|
|
|
///
|
|
|
|
/// Hyper currently has a bug that prevents clean shutdown. In order
|
|
|
|
/// to avoid having references kept forever by handlers, we only pass
|
|
|
|
/// weak references. Note that this likely means a crash if the handlers are
|
|
|
|
/// used after a server shutdown (which should normally never happen,
|
|
|
|
/// except during tests).
|
2017-11-01 02:32:33 +03:00
|
|
|
pub fn start_rest_apis<T>(
|
|
|
|
addr: String,
|
2018-02-13 03:38:52 +03:00
|
|
|
chain: Weak<chain::Chain>,
|
|
|
|
tx_pool: Weak<RwLock<pool::TransactionPool<T>>>,
|
|
|
|
peers: Weak<p2p::Peers>,
|
2017-11-01 02:32:33 +03:00
|
|
|
) where
|
|
|
|
T: pool::BlockChain + Send + Sync + 'static,
|
|
|
|
{
|
2018-01-04 06:25:14 +03:00
|
|
|
let _ = thread::Builder::new()
|
|
|
|
.name("apis".to_string())
|
|
|
|
.spawn(move || {
|
2017-12-03 03:05:43 +03:00
|
|
|
// build handlers and register them under the appropriate endpoint
|
2018-03-05 22:33:44 +03:00
|
|
|
let output_handler = OutputHandler {
|
2017-12-03 03:05:43 +03:00
|
|
|
chain: chain.clone(),
|
|
|
|
};
|
|
|
|
let block_handler = BlockHandler {
|
|
|
|
chain: chain.clone(),
|
|
|
|
};
|
|
|
|
let chain_tip_handler = ChainHandler {
|
|
|
|
chain: chain.clone(),
|
|
|
|
};
|
2018-03-06 20:58:33 +03:00
|
|
|
let chain_compact_handler = ChainCompactHandler {
|
|
|
|
chain: chain.clone(),
|
|
|
|
};
|
2018-01-11 08:25:48 +03:00
|
|
|
let status_handler = StatusHandler {
|
|
|
|
chain: chain.clone(),
|
|
|
|
peers: peers.clone(),
|
|
|
|
};
|
2018-03-05 22:33:44 +03:00
|
|
|
let txhashset_handler = TxHashSetHandler {
|
2017-12-03 03:05:43 +03:00
|
|
|
chain: chain.clone(),
|
|
|
|
};
|
|
|
|
let pool_info_handler = PoolInfoHandler {
|
|
|
|
tx_pool: tx_pool.clone(),
|
|
|
|
};
|
|
|
|
let pool_push_handler = PoolPushHandler {
|
|
|
|
tx_pool: tx_pool.clone(),
|
|
|
|
};
|
|
|
|
let peers_all_handler = PeersAllHandler {
|
2017-12-12 19:40:26 +03:00
|
|
|
peers: peers.clone(),
|
2017-12-03 03:05:43 +03:00
|
|
|
};
|
|
|
|
let peers_connected_handler = PeersConnectedHandler {
|
2017-12-12 19:40:26 +03:00
|
|
|
peers: peers.clone(),
|
2017-12-03 03:05:43 +03:00
|
|
|
};
|
2018-01-03 04:03:44 +03:00
|
|
|
let peer_post_handler = PeerPostHandler {
|
|
|
|
peers: peers.clone(),
|
|
|
|
};
|
|
|
|
let peer_get_handler = PeerGetHandler {
|
2017-12-12 19:40:26 +03:00
|
|
|
peers: peers.clone(),
|
2017-12-03 03:05:43 +03:00
|
|
|
};
|
|
|
|
|
2018-01-04 06:25:14 +03:00
|
|
|
let route_list = vec![
|
2017-12-14 20:44:06 +03:00
|
|
|
"get blocks".to_string(),
|
|
|
|
"get chain".to_string(),
|
2018-03-06 20:58:33 +03:00
|
|
|
"get chain/compact".to_string(),
|
2018-03-05 22:33:44 +03:00
|
|
|
"get chain/outputs".to_string(),
|
2018-01-11 08:25:48 +03:00
|
|
|
"get status".to_string(),
|
2018-03-05 22:33:44 +03:00
|
|
|
"get txhashset/roots".to_string(),
|
|
|
|
"get txhashset/lastoutputs?n=10".to_string(),
|
|
|
|
"get txhashset/lastrangeproofs".to_string(),
|
|
|
|
"get txhashset/lastkernels".to_string(),
|
2017-12-14 20:44:06 +03:00
|
|
|
"get pool".to_string(),
|
|
|
|
"post pool/push".to_string(),
|
|
|
|
"post peers/a.b.c.d:p/ban".to_string(),
|
2018-01-04 06:25:14 +03:00
|
|
|
"post peers/a.b.c.d:p/unban".to_string(),
|
2017-12-14 20:44:06 +03:00
|
|
|
"get peers/all".to_string(),
|
|
|
|
"get peers/connected".to_string(),
|
2018-01-03 04:03:44 +03:00
|
|
|
"get peers/a.b.c.d".to_string(),
|
2018-01-04 06:25:14 +03:00
|
|
|
];
|
2017-12-03 03:05:43 +03:00
|
|
|
let index_handler = IndexHandler { list: route_list };
|
2018-03-06 20:58:33 +03:00
|
|
|
|
2017-12-03 03:05:43 +03:00
|
|
|
let router = router!(
|
|
|
|
index: get "/" => index_handler,
|
|
|
|
blocks: get "/blocks/*" => block_handler,
|
|
|
|
chain_tip: get "/chain" => chain_tip_handler,
|
2018-03-06 20:58:33 +03:00
|
|
|
chain_compact: get "/chain/compact" => chain_compact_handler,
|
2018-03-05 22:33:44 +03:00
|
|
|
chain_outputs: get "/chain/outputs/*" => output_handler,
|
2018-01-11 08:25:48 +03:00
|
|
|
status: get "/status" => status_handler,
|
2018-03-05 22:33:44 +03:00
|
|
|
txhashset_roots: get "/txhashset/*" => txhashset_handler,
|
2017-12-03 03:05:43 +03:00
|
|
|
pool_info: get "/pool" => pool_info_handler,
|
|
|
|
pool_push: post "/pool/push" => pool_push_handler,
|
|
|
|
peers_all: get "/peers/all" => peers_all_handler,
|
|
|
|
peers_connected: get "/peers/connected" => peers_connected_handler,
|
2018-01-03 04:03:44 +03:00
|
|
|
peer: post "/peers/*" => peer_post_handler,
|
|
|
|
peer: get "/peers/*" => peer_get_handler
|
2017-12-03 03:05:43 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
let mut apis = ApiServer::new("/v1".to_string());
|
|
|
|
apis.register_handler(router);
|
|
|
|
|
|
|
|
info!(LOGGER, "Starting HTTP API server at {}.", addr);
|
|
|
|
apis.start(&addr[..]).unwrap_or_else(|e| {
|
|
|
|
error!(LOGGER, "Failed to start API HTTP server: {}.", e);
|
|
|
|
});
|
2018-01-04 06:25:14 +03:00
|
|
|
});
|
2017-11-01 02:32:33 +03:00
|
|
|
}
|