2018-01-30 17:42:04 +03:00
|
|
|
// Copyright 2018 The Grin Developers
|
2016-10-25 07:35:10 +03:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-10-13 05:12:13 +03:00
|
|
|
use std::cmp;
|
2018-02-10 01:32:16 +03:00
|
|
|
use std::env;
|
|
|
|
use std::fs::File;
|
2018-11-16 17:33:35 +03:00
|
|
|
use std::io::{BufWriter, Write};
|
2018-11-16 14:00:39 +03:00
|
|
|
use std::net::SocketAddr;
|
2018-02-10 01:32:16 +03:00
|
|
|
use std::sync::Arc;
|
2016-10-29 22:36:45 +03:00
|
|
|
|
2018-12-08 02:59:40 +03:00
|
|
|
use crate::conn::{Message, MessageHandler, Response};
|
|
|
|
use crate::core::core::{self, hash::Hash, CompactBlock};
|
|
|
|
use crate::util::{RateCounter, RwLock};
|
2018-10-13 05:12:13 +03:00
|
|
|
use chrono::prelude::Utc;
|
2018-08-31 02:50:55 +03:00
|
|
|
|
2018-12-08 02:59:40 +03:00
|
|
|
use crate::msg::{
|
2018-11-16 14:00:39 +03:00
|
|
|
BanReason, GetPeerAddrs, Headers, Locator, PeerAddrs, Ping, Pong, SockAddr, TxHashSetArchive,
|
|
|
|
TxHashSetRequest, Type,
|
2018-08-01 12:44:07 +03:00
|
|
|
};
|
2018-12-08 02:59:40 +03:00
|
|
|
use crate::types::{Error, NetAdapter};
|
2016-10-25 07:35:10 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
pub struct Protocol {
|
2018-12-08 02:59:40 +03:00
|
|
|
adapter: Arc<dyn NetAdapter>,
|
2018-02-02 05:03:12 +03:00
|
|
|
addr: SocketAddr,
|
2016-10-31 22:29:08 +03:00
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
impl Protocol {
|
2018-12-08 02:59:40 +03:00
|
|
|
pub fn new(adapter: Arc<dyn NetAdapter>, addr: SocketAddr) -> Protocol {
|
2018-03-04 03:19:54 +03:00
|
|
|
Protocol { adapter, addr }
|
2017-02-02 06:05:17 +03:00
|
|
|
}
|
2016-10-25 07:35:10 +03:00
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
impl MessageHandler for Protocol {
|
2018-11-11 02:30:57 +03:00
|
|
|
fn consume<'a>(
|
|
|
|
&self,
|
|
|
|
mut msg: Message<'a>,
|
2018-12-08 02:59:40 +03:00
|
|
|
writer: &'a mut dyn Write,
|
2018-11-11 02:30:57 +03:00
|
|
|
received_bytes: Arc<RwLock<RateCounter>>,
|
|
|
|
) -> Result<Option<Response<'a>>, Error> {
|
2018-02-02 05:03:12 +03:00
|
|
|
let adapter = &self.adapter;
|
2018-01-30 17:42:04 +03:00
|
|
|
|
2018-03-27 19:09:41 +03:00
|
|
|
// If we received a msg from a banned peer then log and drop it.
|
|
|
|
// If we are getting a lot of these then maybe we are not cleaning
|
|
|
|
// banned peers up correctly?
|
|
|
|
if adapter.is_banned(self.addr.clone()) {
|
|
|
|
debug!(
|
|
|
|
"handler: consume: peer {:?} banned, received: {:?}, dropping.",
|
2018-10-21 23:30:56 +03:00
|
|
|
self.addr, msg.header.msg_type,
|
2018-03-27 19:09:41 +03:00
|
|
|
);
|
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
match msg.header.msg_type {
|
|
|
|
Type::Ping => {
|
|
|
|
let ping: Ping = msg.body()?;
|
|
|
|
adapter.peer_difficulty(self.addr, ping.total_difficulty, ping.height);
|
2017-02-08 00:52:17 +03:00
|
|
|
|
2018-11-16 17:33:35 +03:00
|
|
|
Ok(Some(Response::new(
|
2018-03-04 03:19:54 +03:00
|
|
|
Type::Pong,
|
|
|
|
Pong {
|
|
|
|
total_difficulty: adapter.total_difficulty(),
|
|
|
|
height: adapter.total_height(),
|
|
|
|
},
|
2018-11-16 17:33:35 +03:00
|
|
|
writer,
|
2018-03-04 03:19:54 +03:00
|
|
|
)))
|
2018-02-02 05:03:12 +03:00
|
|
|
}
|
2018-01-31 23:39:55 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::Pong => {
|
|
|
|
let pong: Pong = msg.body()?;
|
|
|
|
adapter.peer_difficulty(self.addr, pong.total_difficulty, pong.height);
|
|
|
|
Ok(None)
|
2018-03-04 03:19:54 +03:00
|
|
|
}
|
2017-12-14 15:19:43 +03:00
|
|
|
|
2018-05-29 05:45:31 +03:00
|
|
|
Type::BanReason => {
|
|
|
|
let ban_reason: BanReason = msg.body()?;
|
2018-10-21 23:30:56 +03:00
|
|
|
error!("handle_payload: BanReason {:?}", ban_reason);
|
2018-05-29 05:45:31 +03:00
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
2018-11-07 12:28:17 +03:00
|
|
|
Type::TransactionKernel => {
|
|
|
|
let h: Hash = msg.body()?;
|
|
|
|
debug!(
|
|
|
|
"handle_payload: received tx kernel: {}, msg_len: {}",
|
|
|
|
h, msg.header.msg_len
|
|
|
|
);
|
|
|
|
adapter.tx_kernel_received(h, self.addr);
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
|
|
|
Type::GetTransaction => {
|
|
|
|
let h: Hash = msg.body()?;
|
|
|
|
debug!(
|
|
|
|
"handle_payload: GetTransaction: {}, msg_len: {}",
|
|
|
|
h, msg.header.msg_len,
|
|
|
|
);
|
|
|
|
let tx = adapter.get_transaction(h);
|
|
|
|
if let Some(tx) = tx {
|
2018-11-16 17:33:35 +03:00
|
|
|
Ok(Some(Response::new(Type::Transaction, tx, writer)))
|
2018-11-07 12:28:17 +03:00
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::Transaction => {
|
2018-08-19 20:15:42 +03:00
|
|
|
debug!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: received tx: msg_len: {}",
|
|
|
|
msg.header.msg_len
|
2018-08-19 20:15:42 +03:00
|
|
|
);
|
2018-02-02 05:03:12 +03:00
|
|
|
let tx: core::Transaction = msg.body()?;
|
2018-03-20 06:18:54 +03:00
|
|
|
adapter.transaction_received(tx, false);
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
|
|
|
Type::StemTransaction => {
|
2018-08-19 20:15:42 +03:00
|
|
|
debug!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: received stem tx: msg_len: {}",
|
|
|
|
msg.header.msg_len
|
2018-08-19 20:15:42 +03:00
|
|
|
);
|
2018-03-20 06:18:54 +03:00
|
|
|
let tx: core::Transaction = msg.body()?;
|
|
|
|
adapter.transaction_received(tx, true);
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
2017-12-14 15:19:43 +03:00
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::GetBlock => {
|
|
|
|
let h: Hash = msg.body()?;
|
2018-10-02 00:12:39 +03:00
|
|
|
trace!(
|
2018-11-07 12:28:17 +03:00
|
|
|
"handle_payload: GetBlock: {}, msg_len: {}",
|
2018-10-02 00:12:39 +03:00
|
|
|
h,
|
|
|
|
msg.header.msg_len,
|
2018-09-18 20:51:37 +03:00
|
|
|
);
|
2018-01-30 17:42:04 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
let bo = adapter.get_block(h);
|
|
|
|
if let Some(b) = bo {
|
2018-11-16 17:33:35 +03:00
|
|
|
return Ok(Some(Response::new(Type::Block, b, writer)));
|
2017-12-14 15:19:43 +03:00
|
|
|
}
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
2017-02-08 00:52:17 +03:00
|
|
|
}
|
2018-01-31 23:39:55 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::Block => {
|
2018-08-19 20:15:42 +03:00
|
|
|
debug!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: received block: msg_len: {}",
|
|
|
|
msg.header.msg_len
|
2018-08-19 20:15:42 +03:00
|
|
|
);
|
2018-02-02 05:03:12 +03:00
|
|
|
let b: core::Block = msg.body()?;
|
2018-01-31 23:39:55 +03:00
|
|
|
|
2019-01-12 20:28:03 +03:00
|
|
|
// we can't know at this level whether we requested the block or not,
|
|
|
|
// the boolean should be properly set in higher level adapter
|
|
|
|
adapter.block_received(b, self.addr, false);
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
|
|
|
}
|
2018-02-07 19:26:52 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::GetCompactBlock => {
|
|
|
|
let h: Hash = msg.body()?;
|
|
|
|
if let Some(b) = adapter.get_block(h) {
|
2018-09-18 20:51:37 +03:00
|
|
|
let cb: CompactBlock = b.into();
|
2018-11-16 17:33:35 +03:00
|
|
|
Ok(Some(Response::new(Type::CompactBlock, cb, writer)))
|
2018-01-31 23:39:55 +03:00
|
|
|
} else {
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
2018-01-31 23:39:55 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::CompactBlock => {
|
2018-08-19 20:15:42 +03:00
|
|
|
debug!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: received compact block: msg_len: {}",
|
|
|
|
msg.header.msg_len
|
2018-08-19 20:15:42 +03:00
|
|
|
);
|
2018-02-02 05:03:12 +03:00
|
|
|
let b: core::CompactBlock = msg.body()?;
|
2017-02-08 00:52:17 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
adapter.compact_block_received(b, self.addr);
|
|
|
|
Ok(None)
|
2017-12-14 15:19:43 +03:00
|
|
|
}
|
2017-02-08 00:52:17 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::GetHeaders => {
|
|
|
|
// load headers from the locator
|
|
|
|
let loc: Locator = msg.body()?;
|
2018-12-05 19:50:32 +03:00
|
|
|
let headers = adapter.locate_headers(&loc.hashes);
|
2018-01-30 17:42:04 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
// serialize and send all the headers over
|
2018-11-16 17:34:05 +03:00
|
|
|
Ok(Some(Response::new(
|
|
|
|
Type::Headers,
|
|
|
|
Headers { headers },
|
|
|
|
writer,
|
|
|
|
)))
|
2018-02-02 05:03:12 +03:00
|
|
|
}
|
2018-01-30 17:42:04 +03:00
|
|
|
|
2018-02-07 19:26:52 +03:00
|
|
|
// "header first" block propagation - if we have not yet seen this block
|
|
|
|
// we can go request it from some of our peers
|
|
|
|
Type::Header => {
|
|
|
|
let header: core::BlockHeader = msg.body()?;
|
|
|
|
adapter.header_received(header, self.addr);
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::Headers => {
|
2018-11-16 14:00:39 +03:00
|
|
|
let mut total_bytes_read = 0;
|
|
|
|
|
|
|
|
// Read the count (u16) so we now how many headers to read.
|
|
|
|
let (count, bytes_read): (u16, _) = msg.streaming_read()?;
|
|
|
|
total_bytes_read += bytes_read;
|
|
|
|
|
|
|
|
// Read chunks of headers off the stream and pass them off to the adapter.
|
|
|
|
let chunk_size = 32;
|
|
|
|
for chunk in (0..count).collect::<Vec<_>>().chunks(chunk_size) {
|
|
|
|
let mut headers = vec![];
|
|
|
|
for _ in chunk {
|
|
|
|
let (header, bytes_read) = msg.streaming_read()?;
|
|
|
|
headers.push(header);
|
|
|
|
total_bytes_read += bytes_read;
|
|
|
|
}
|
2018-12-05 19:50:32 +03:00
|
|
|
adapter.headers_received(&headers, self.addr);
|
2018-08-31 02:50:55 +03:00
|
|
|
}
|
2018-11-16 14:00:39 +03:00
|
|
|
|
|
|
|
// Now check we read the correct total number of bytes off the stream.
|
|
|
|
if total_bytes_read != msg.header.msg_len {
|
|
|
|
return Err(Error::MsgLen);
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
|
|
|
}
|
2018-01-30 17:42:04 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::GetPeerAddrs => {
|
|
|
|
let get_peers: GetPeerAddrs = msg.body()?;
|
|
|
|
let peer_addrs = adapter.find_peer_addrs(get_peers.capabilities);
|
2018-11-16 17:33:35 +03:00
|
|
|
Ok(Some(Response::new(
|
2018-03-04 03:19:54 +03:00
|
|
|
Type::PeerAddrs,
|
|
|
|
PeerAddrs {
|
|
|
|
peers: peer_addrs.iter().map(|sa| SockAddr(*sa)).collect(),
|
|
|
|
},
|
2018-11-16 17:33:35 +03:00
|
|
|
writer,
|
2018-03-04 03:19:54 +03:00
|
|
|
)))
|
2018-02-02 05:03:12 +03:00
|
|
|
}
|
2017-02-19 05:42:34 +03:00
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
Type::PeerAddrs => {
|
|
|
|
let peer_addrs: PeerAddrs = msg.body()?;
|
|
|
|
adapter.peer_addrs_received(peer_addrs.peers.iter().map(|pa| pa.0).collect());
|
|
|
|
Ok(None)
|
2017-12-14 15:19:43 +03:00
|
|
|
}
|
2017-02-19 05:42:34 +03:00
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
Type::TxHashSetRequest => {
|
|
|
|
let sm_req: TxHashSetRequest = msg.body()?;
|
2018-03-04 03:19:54 +03:00
|
|
|
debug!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: txhashset req for {} at {}",
|
|
|
|
sm_req.hash, sm_req.height
|
2018-03-04 03:19:54 +03:00
|
|
|
);
|
2018-02-10 01:32:16 +03:00
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
let txhashset = self.adapter.txhashset_read(sm_req.hash);
|
2018-02-10 01:32:16 +03:00
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
if let Some(txhashset) = txhashset {
|
|
|
|
let file_sz = txhashset.reader.metadata()?.len();
|
2018-11-16 17:33:35 +03:00
|
|
|
let mut resp = Response::new(
|
2018-03-05 22:33:44 +03:00
|
|
|
Type::TxHashSetArchive,
|
|
|
|
&TxHashSetArchive {
|
2018-02-10 01:32:16 +03:00
|
|
|
height: sm_req.height as u64,
|
|
|
|
hash: sm_req.hash,
|
|
|
|
bytes: file_sz,
|
2018-03-04 03:19:54 +03:00
|
|
|
},
|
2018-11-16 17:33:35 +03:00
|
|
|
writer,
|
2018-03-04 03:19:54 +03:00
|
|
|
);
|
2018-03-05 22:33:44 +03:00
|
|
|
resp.add_attachment(txhashset.reader);
|
2018-02-10 01:32:16 +03:00
|
|
|
Ok(Some(resp))
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-05 22:33:44 +03:00
|
|
|
Type::TxHashSetArchive => {
|
|
|
|
let sm_arch: TxHashSetArchive = msg.body()?;
|
2018-03-04 03:19:54 +03:00
|
|
|
debug!(
|
2018-07-23 20:37:35 +03:00
|
|
|
"handle_payload: txhashset archive for {} at {}. size={}",
|
2018-10-21 23:30:56 +03:00
|
|
|
sm_arch.hash, sm_arch.height, sm_arch.bytes,
|
2018-03-04 03:19:54 +03:00
|
|
|
);
|
2018-07-12 19:06:52 +03:00
|
|
|
if !self.adapter.txhashset_receive_ready() {
|
2018-07-23 20:37:35 +03:00
|
|
|
error!(
|
|
|
|
"handle_payload: txhashset archive received but SyncStatus not on TxHashsetDownload",
|
|
|
|
);
|
2018-07-12 19:06:52 +03:00
|
|
|
return Err(Error::BadMessage);
|
|
|
|
}
|
2018-10-13 05:12:13 +03:00
|
|
|
|
|
|
|
let download_start_time = Utc::now();
|
|
|
|
self.adapter
|
|
|
|
.txhashset_download_update(download_start_time, 0, sm_arch.bytes);
|
|
|
|
|
2018-02-10 01:32:16 +03:00
|
|
|
let mut tmp = env::temp_dir();
|
2018-03-05 22:33:44 +03:00
|
|
|
tmp.push("txhashset.zip");
|
2018-08-01 12:44:07 +03:00
|
|
|
let mut save_txhashset_to_file = |file| -> Result<(), Error> {
|
2018-08-18 04:52:44 +03:00
|
|
|
let mut tmp_zip = BufWriter::new(File::create(file)?);
|
2018-10-13 05:12:13 +03:00
|
|
|
let total_size = sm_arch.bytes as usize;
|
|
|
|
let mut downloaded_size: usize = 0;
|
2018-10-14 15:13:49 +03:00
|
|
|
let mut request_size = cmp::min(48_000, total_size);
|
2018-10-13 05:12:13 +03:00
|
|
|
while request_size > 0 {
|
2018-11-11 02:30:57 +03:00
|
|
|
let size = msg.copy_attachment(request_size, &mut tmp_zip)?;
|
|
|
|
downloaded_size += size;
|
2018-10-13 05:12:13 +03:00
|
|
|
request_size = cmp::min(48_000, total_size - downloaded_size);
|
|
|
|
self.adapter.txhashset_download_update(
|
|
|
|
download_start_time,
|
|
|
|
downloaded_size as u64,
|
|
|
|
total_size as u64,
|
|
|
|
);
|
2018-11-11 02:30:57 +03:00
|
|
|
|
2018-11-14 00:34:45 +03:00
|
|
|
// Increase received bytes quietly (without affecting the counters).
|
|
|
|
// Otherwise we risk banning a peer as "abusive".
|
2018-11-11 02:30:57 +03:00
|
|
|
{
|
|
|
|
let mut received_bytes = received_bytes.write();
|
2018-11-14 00:34:45 +03:00
|
|
|
received_bytes.inc_quiet(size as u64);
|
2018-11-11 02:30:57 +03:00
|
|
|
}
|
2018-10-13 05:12:13 +03:00
|
|
|
}
|
2018-08-18 04:52:44 +03:00
|
|
|
tmp_zip.into_inner().unwrap().sync_all()?;
|
2018-07-23 20:37:35 +03:00
|
|
|
Ok(())
|
|
|
|
};
|
|
|
|
|
2018-08-01 12:44:07 +03:00
|
|
|
if let Err(e) = save_txhashset_to_file(tmp.clone()) {
|
2018-07-23 20:37:35 +03:00
|
|
|
error!(
|
2018-10-21 23:30:56 +03:00
|
|
|
"handle_payload: txhashset archive save to file fail. err={:?}",
|
|
|
|
e
|
2018-07-23 20:37:35 +03:00
|
|
|
);
|
|
|
|
return Err(e);
|
2018-02-10 01:32:16 +03:00
|
|
|
}
|
|
|
|
|
2018-07-23 20:37:35 +03:00
|
|
|
trace!(
|
|
|
|
"handle_payload: txhashset archive save to file {:?} success",
|
|
|
|
tmp,
|
|
|
|
);
|
|
|
|
|
2018-02-10 01:32:16 +03:00
|
|
|
let tmp_zip = File::open(tmp)?;
|
2018-08-31 02:50:55 +03:00
|
|
|
let res = self
|
|
|
|
.adapter
|
2018-08-01 12:44:07 +03:00
|
|
|
.txhashset_write(sm_arch.hash, tmp_zip, self.addr);
|
2018-04-06 20:14:50 +03:00
|
|
|
|
|
|
|
debug!(
|
2018-07-23 20:37:35 +03:00
|
|
|
"handle_payload: txhashset archive for {} at {}, DONE. Data Ok: {}",
|
2018-10-21 23:30:56 +03:00
|
|
|
sm_arch.hash, sm_arch.height, res
|
2018-04-06 20:14:50 +03:00
|
|
|
);
|
|
|
|
|
2018-02-10 01:32:16 +03:00
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
2018-02-02 05:03:12 +03:00
|
|
|
_ => {
|
2018-10-21 23:30:56 +03:00
|
|
|
debug!("unknown message type {:?}", msg.header.msg_type);
|
2018-02-02 05:03:12 +03:00
|
|
|
Ok(None)
|
|
|
|
}
|
2017-02-02 06:05:17 +03:00
|
|
|
}
|
|
|
|
}
|
2016-12-16 01:57:04 +03:00
|
|
|
}
|