Merged upstream

This commit is contained in:
Ignotus Peverell 2018-12-11 18:54:44 +00:00
commit e4ecc30884
No known key found for this signature in database
GPG key ID: 99CD25F39F8F8211
218 changed files with 3213 additions and 2606 deletions

1
.gitignore vendored
View file

@ -3,6 +3,7 @@
.grin*
node*
!node_clients
!node_clients.rs
target
Cargo.lock
*.iml

View file

@ -40,6 +40,8 @@ env:
matrix:
include:
- os: linux
env: TEST_SUITE=.
- os: linux
env: TEST_SUITE=servers
- os: linux

174
Cargo.lock generated
View file

@ -170,7 +170,7 @@ dependencies = [
"git2 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -235,7 +235,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -304,8 +304,11 @@ dependencies = [
[[package]]
name = "crc32fast"
version = "1.1.1"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "croaring"
@ -349,7 +352,7 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-epoch 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -359,7 +362,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -372,7 +375,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "crossbeam-utils"
version = "0.6.1"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -430,7 +433,7 @@ dependencies = [
"owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"signal-hook 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"xi-unicode 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -589,7 +592,7 @@ name = "flate2"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crc32fast 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crc32fast 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"miniz-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"miniz_oxide_c_api 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -699,7 +702,7 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"reqwest 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rpassword 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -719,17 +722,17 @@ dependencies = [
"grin_store 0.4.2",
"grin_util 0.4.2",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper-rustls 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ring 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-rustls 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-tcp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -755,8 +758,8 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -770,9 +773,9 @@ dependencies = [
"grin_wallet 0.4.2",
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -794,8 +797,8 @@ dependencies = [
"num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -814,8 +817,8 @@ dependencies = [
"pbkdf2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ripemd160 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -838,8 +841,8 @@ dependencies = [
"net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -855,13 +858,13 @@ dependencies = [
"grin_util 0.4.2",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "grin_secp256k1zkp"
version = "0.7.1"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)",
@ -869,7 +872,7 @@ dependencies = [
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -883,7 +886,6 @@ dependencies = [
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"grin_api 0.4.2",
"grin_chain 0.4.2",
"grin_config 0.4.2",
"grin_core 0.4.2",
"grin_keychain 0.4.2",
"grin_p2p 0.4.2",
@ -892,15 +894,15 @@ dependencies = [
"grin_util 0.4.2",
"grin_wallet 0.4.2",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper-staticfile 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lmdb-zero 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -914,6 +916,7 @@ dependencies = [
"env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"grin_core 0.4.2",
"grin_util 0.4.2",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
@ -921,8 +924,8 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -932,14 +935,14 @@ dependencies = [
"backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"grin_secp256k1zkp 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"grin_secp256k1zkp 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log4rs 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"zip 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -961,16 +964,16 @@ dependencies = [
"grin_keychain 0.4.2",
"grin_store 0.4.2",
"grin_util 0.4.2",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"prettytable-rs 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ring 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-retry 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -979,7 +982,7 @@ dependencies = [
[[package]]
name = "h2"
version = "0.1.13"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1033,13 +1036,13 @@ dependencies = [
[[package]]
name = "hyper"
version = "0.12.16"
version = "0.12.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bytes 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"h2 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"h2 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"httparse 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1047,7 +1050,7 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-reactor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1065,7 +1068,7 @@ dependencies = [
"ct-logs 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1083,8 +1086,8 @@ dependencies = [
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1095,7 +1098,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bytes 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1144,8 +1147,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1165,7 +1168,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "1.2.0"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -1180,7 +1183,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"crc32fast 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crc32fast 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1267,7 +1270,7 @@ version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1289,9 +1292,9 @@ dependencies = [
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log-mdc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)",
"thread-id 3.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1405,7 +1408,7 @@ dependencies = [
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2000,17 +2003,17 @@ dependencies = [
"encoding_rs 0.8.13 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper-tls 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libflate 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"mime_guess 2.0.0-alpha.6 (registry+https://github.com/rust-lang/crates.io-index)",
"native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_urlencoded 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2164,7 +2167,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde"
version = "1.0.80"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -2173,12 +2176,12 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serde_derive"
version = "1.0.80"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2193,7 +2196,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2203,7 +2206,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2214,7 +2217,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2406,13 +2409,12 @@ dependencies = [
[[package]]
name = "tokio"
version = "0.1.13"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bytes 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-current-thread 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2447,7 +2449,7 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-reactor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2496,7 +2498,7 @@ name = "tokio-reactor"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2525,7 +2527,7 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"webpki 0.18.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2556,7 +2558,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-deque 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2569,7 +2571,7 @@ name = "tokio-timer"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-executor 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2608,10 +2610,10 @@ dependencies = [
[[package]]
name = "toml"
version = "0.4.9"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2729,7 +2731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2938,7 +2940,7 @@ dependencies = [
"checksum core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "286e0b41c3a20da26536c6000a280585d519fd07b3956b43aed8a79e9edce980"
"checksum core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "716c271e8613ace48344f723b60b900a93150271e5be206212d052bbc0883efa"
"checksum crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb"
"checksum crc32fast 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e0e685559fa8bccfa46afd0f876047ee5d87c536d71d0c2b3a08cc9e880f73eb"
"checksum crc32fast 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e91d5240c6975ef33aeb5f148f35275c25eda8e8a5f95abe421978b05b8bf192"
"checksum croaring 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a2c82431f150237fc25ef9ece26ccbcc8325118f44a538b48449a7639cb6e9cf"
"checksum croaring-sys 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "d36e44ca368664098be5d03576da36edd3e2c728df553f13f89cb25fbc3792c5"
"checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19"
@ -2946,7 +2948,7 @@ dependencies = [
"checksum crossbeam-deque 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4fe1b6f945f824c7a25afe44f62e25d714c0cc523f8e99d8db5cd1026e1269d3"
"checksum crossbeam-epoch 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2449aaa4ec7ef96e5fb24db16024b935df718e9ae1cec0a1e68feeca2efca7b8"
"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015"
"checksum crossbeam-utils 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c55913cc2799171a550e307918c0a360e8c16004820291bf3b638969b4a01816"
"checksum crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e07fc155212827475223f0bcfae57e945e694fc90950ddf3f6695bbfd5555c72"
"checksum crypto-mac 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7afa06d05a046c7a47c3a849907ec303504608c927f4e85f7bfff22b7180d971"
"checksum csv 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7ef22b37c7a51c564a365892c012dc0271221fdcc64c69b19ba4d6fa8bd96d9c"
"checksum ct-logs 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "95a4bf5107667e12bf6ce31a3a5066d67acc88942b6742117a41198734aaccaa"
@ -2982,14 +2984,14 @@ dependencies = [
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum git2 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)" = "591f8be1674b421644b6c030969520bc3fa12114d2eb467471982ed3e9584e71"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum grin_secp256k1zkp 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "42fd2759f1fb49fcbbf74fc9a818a0a32be49622395e82d88a26bba42a9bdd71"
"checksum h2 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "7dd33bafe2e6370e6c8eb0cf1b8c5f93390b90acde7e9b03723f166b28b648ed"
"checksum grin_secp256k1zkp 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aea95f8b846440f6a9caf0fd4c22c91c124f2a896d69d781f7dc0fa88e33b0ff"
"checksum h2 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "1ac030ae20dee464c5d0f36544d8b914a6bc606da44a57e052d2b0f5dae129e0"
"checksum hmac 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "733e1b3ac906631ca01ebb577e9bb0f5e37a454032b9036b5eaea4013ed6f99a"
"checksum http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "02096a6d2c55e63f7fcb800690e4f889a25f6ec342e3adb4594e293b625215ab"
"checksum httparse 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e8734b0cfd3bc3e101ec59100e101c2eecd19282202e87808b3037b442777a83"
"checksum humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6cab2627acfc432780848602f3f558f7e9dd427352224b0d9324025796d2a5e"
"checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114"
"checksum hyper 0.12.16 (registry+https://github.com/rust-lang/crates.io-index)" = "0aeedb8ca5f0f96be00f84073c6d0d5f962ecad020ef543dff99a7c12717a60e"
"checksum hyper 0.12.17 (registry+https://github.com/rust-lang/crates.io-index)" = "c49a75385d35ff5e9202755f09beb0b878a05c4c363fcc52b23eeb5dcb6782cc"
"checksum hyper-rustls 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68f2aa6b1681795bf4da8063f718cd23145aa0c9a5143d9787b345aa60d38ee4"
"checksum hyper-staticfile 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4080cb44b9c1e4c6dfd6f7ee85a9c3439777ec9c59df32f944836d3de58ac35e"
"checksum hyper-tls 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "32cd73f14ad370d3b4d4b7dce08f69b81536c82e39fcc89731930fe5788cd661"
@ -3001,7 +3003,7 @@ dependencies = [
"checksum jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ddf83704f4e79979a424d1082dd2c1e52683058056c9280efa19ac5f6bc9033c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
"checksum lazycell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ddba4c30a78328befecec92fc94970e53b3ae385827d28620f0f5bb2493081e0"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
"checksum libc 0.2.44 (registry+https://github.com/rust-lang/crates.io-index)" = "10923947f84a519a45c8fefb7dd1b3e8c08747993381adee176d7a82b4195311"
"checksum libflate 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "bff3ac7d6f23730d3b533c35ed75eef638167634476a499feef16c428d74b57b"
"checksum libgit2-sys 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4916b5addc78ec36cc309acfcdf0b9f9d97ab7b84083118b248709c5b7029356"
@ -3113,9 +3115,9 @@ dependencies = [
"checksum security-framework-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ab01dfbe5756785b5b4d46e0289e5a18071dfa9a7c2b24213ea00b9ef9b665bf"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "15c141fc7027dd265a47c090bf864cf62b42c4d228bbcf4e51a0c9e2b0d3f7ef"
"checksum serde 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)" = "c91eb5b0190ae87b4e2e39cbba6e3bed3ac6186935fe265f0426156c4c49961b"
"checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f"
"checksum serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "225de307c6302bec3898c51ca302fc94a7a1697ef0845fcee6448f33c032249c"
"checksum serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)" = "477b13b646f5b5b56fc95bedfc3b550d12141ce84f466f6c44b9a17589923885"
"checksum serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)" = "c37ccd6be3ed1fdf419ee848f7c758eb31b054d7cd3ae3600e3bae0adf569811"
"checksum serde_urlencoded 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d48f9f99cd749a2de71d29da5f948de7f2764cc5a9d7f3c97e3514d4ee6eabf2"
"checksum serde_yaml 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0887a8e097a69559b56aa2526bf7aff7c3048cf627dff781f0b56a6001534593"
@ -3141,7 +3143,7 @@ dependencies = [
"checksum thread-id 3.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7fbf4c9d56b320106cd64fd024dadfa0be7cb4706725fc44a7d7ce952d820c1"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "d825be0eb33fda1a7e68012d51e9c7f451dc1a69391e7fdc197060bb8c56667b"
"checksum tokio 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "a7817d4c98cc5be21360b3b37d6036fe9b7aefa5b7a201b7b16ff33423822f7d"
"checksum tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "6e93c78d23cc61aa245a8acd2c4a79c4d7fa7fb5c3ca90d5737029f043a84895"
"checksum tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c501eceaf96f0e1793cf26beb63da3d11c738c4a943fdf3746d81d64684c39f"
"checksum tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "aeeffbbb94209023feaef3c196a41cbcdafa06b4a6f893f68779bb5e53796f71"
"checksum tokio-current-thread 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "331c8acc267855ec06eb0c94618dcbbfea45bed2d20b77252940095273fb58f6"
@ -3157,7 +3159,7 @@ dependencies = [
"checksum tokio-timer 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "4f37f0111d76cc5da132fe9bc0590b9b9cfd079bc7e75ac3846278430a299ff8"
"checksum tokio-udp 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "66268575b80f4a4a710ef83d087fdfeeabdce9b74c797535fbac18a2cb906e92"
"checksum tokio-uds 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "99ce87382f6c1a24b513a72c048b2c8efe66cb5161c9061d00bee510f08dc168"
"checksum toml 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "19782e145d5abefb03758958f06ea35f7b1d8421b534140e0238fd3d0bfd66e3"
"checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f"
"checksum traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079"
"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
"checksum typemap 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "653be63c80a3296da5551e1bfd2cca35227e13cdd08c6668903ae2f4f77aa1f6"

View file

@ -9,6 +9,7 @@ keywords = [ "crypto", "grin", "mimblewimble" ]
readme = "README.md"
exclude = ["**/*.grin", "**/*.grin2"]
build = "src/build/build.rs"
edition = "2018"
[workspace]
members = ["api", "chain", "config", "core", "keychain", "p2p", "servers", "store", "util", "pool", "wallet"]

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = ".."
edition = "2018"
[dependencies]
failure = "0.1.1"

View file

@ -12,11 +12,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::router::{Handler, HandlerObj, ResponseFuture};
use futures::future::ok;
use hyper::header::{HeaderValue, AUTHORIZATION, WWW_AUTHENTICATE};
use hyper::{Body, Request, Response, StatusCode};
use ring::constant_time::verify_slices_are_equal;
use router::{Handler, HandlerObj, ResponseFuture};
// Basic Authentication Middleware
pub struct BasicAuthMiddleware {
@ -37,12 +37,17 @@ impl Handler for BasicAuthMiddleware {
fn call(
&self,
req: Request<Body>,
mut handlers: Box<Iterator<Item = HandlerObj>>,
mut handlers: Box<dyn Iterator<Item = HandlerObj>>,
) -> ResponseFuture {
if req.headers().contains_key(AUTHORIZATION) && verify_slices_are_equal(
req.headers()[AUTHORIZATION].as_bytes(),
&self.api_basic_auth.as_bytes(),
).is_ok()
if req.method().as_str() == "OPTIONS" {
return handlers.next().unwrap().call(req, handlers);
}
if req.headers().contains_key(AUTHORIZATION)
&& verify_slices_are_equal(
req.headers()[AUTHORIZATION].as_bytes(),
&self.api_basic_auth.as_bytes(),
)
.is_ok()
{
handlers.next().unwrap().call(req, handlers)
} else {
@ -58,7 +63,8 @@ fn unauthorized_response(basic_realm: &str) -> ResponseFuture {
.header(
WWW_AUTHENTICATE,
HeaderValue::from_str(basic_realm).unwrap(),
).body(Body::empty())
)
.body(Body::empty())
.unwrap();
Box::new(ok(response))
}

View file

@ -14,22 +14,20 @@
//! High level JSON/HTTP client API
use crate::rest::{Error, ErrorKind};
use crate::util::to_base64;
use failure::{Fail, ResultExt};
use futures::future::{err, ok, Either};
use http::uri::{InvalidUri, Uri};
use hyper::header::{ACCEPT, AUTHORIZATION, USER_AGENT};
use hyper::rt::{Future, Stream};
use hyper::{Body, Client, Request};
use hyper_rustls;
use serde::{Deserialize, Serialize};
use serde_json;
use futures::future::{err, ok, Either};
use hyper_rustls;
use tokio::runtime::Runtime;
use rest::{Error, ErrorKind};
use util::to_base64;
pub type ClientResponseFuture<T> = Box<Future<Item = T, Error = Error> + Send>;
pub type ClientResponseFuture<T> = Box<dyn Future<Item = T, Error = Error> + Send>;
/// Helper function to easily issue a HTTP GET request against a given URL that
/// returns a JSON object. Handles request building, JSON deserialization and
@ -143,7 +141,8 @@ fn build_request<'a>(
.body(match body {
None => Body::empty(),
Some(json) => json.into(),
}).map_err(|e| {
})
.map_err(|e| {
ErrorKind::RequestError(format!("Bad request {} {}: {}", method, url, e)).into()
})
}
@ -185,7 +184,7 @@ where
}))
}
fn send_request_async(req: Request<Body>) -> Box<Future<Item = String, Error = Error> + Send> {
fn send_request_async(req: Request<Body>) -> Box<dyn Future<Item = String, Error = Error> + Send> {
let https = hyper_rustls::HttpsConnector::new(1);
let client = Client::builder().build::<_, Body>(https);
Box::new(
@ -196,14 +195,16 @@ fn send_request_async(req: Request<Body>) -> Box<Future<Item = String, Error = E
if !resp.status().is_success() {
Either::A(err(ErrorKind::RequestError(
"Wrong response code".to_owned(),
).into()))
)
.into()))
} else {
Either::B(
resp.into_body()
.map_err(|e| {
ErrorKind::RequestError(format!("Cannot read response body: {}", e))
.into()
}).concat2()
})
.concat2()
.and_then(|ch| ok(String::from_utf8_lossy(&ch.to_vec()).to_string())),
)
}

View file

@ -20,7 +20,7 @@ mod server_api;
mod transactions_api;
mod utils;
use router::{Router, RouterError};
use crate::router::{Router, RouterError};
// Server
use self::server_api::IndexHandler;
@ -48,15 +48,15 @@ use self::peers_api::PeerHandler;
use self::peers_api::PeersAllHandler;
use self::peers_api::PeersConnectedHandler;
use auth::BasicAuthMiddleware;
use chain;
use p2p;
use pool;
use rest::*;
use crate::auth::BasicAuthMiddleware;
use crate::chain;
use crate::p2p;
use crate::pool;
use crate::rest::*;
use crate::util;
use crate::util::RwLock;
use std::net::SocketAddr;
use std::sync::Arc;
use util;
use util::RwLock;
/// Start all server HTTP handlers. Register all of them with Router
/// and runs the corresponding HTTP server.

View file

@ -13,18 +13,18 @@
// limitations under the License.
use super::utils::{get_output, w};
use chain;
use core::core::hash::Hash;
use core::core::hash::Hashed;
use crate::chain;
use crate::core::core::hash::Hash;
use crate::core::core::hash::Hashed;
use crate::rest::*;
use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::util;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use regex::Regex;
use rest::*;
use router::{Handler, ResponseFuture};
use std::sync::Weak;
use types::*;
use util;
use web::*;
/// Gets block headers given either a hash or height or an output commit.
/// GET /v1/headers/<hash>

View file

@ -13,18 +13,18 @@
// limitations under the License.
use super::utils::{get_output, w};
use chain;
use core::core::hash::Hashed;
use crate::chain;
use crate::core::core::hash::Hashed;
use crate::rest::*;
use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::util;
use crate::util::secp::pedersen::Commitment;
use crate::web::*;
use hyper::{Body, Request, StatusCode};
use rest::*;
use router::{Handler, ResponseFuture};
use std::collections::HashMap;
use std::sync::Weak;
use types::*;
use url::form_urlencoded;
use util;
use util::secp::pedersen::Commitment;
use web::*;
/// Chain handler. Get the head details.
/// GET /v1/chain

View file

@ -13,12 +13,12 @@
// limitations under the License.
use super::utils::w;
use crate::p2p;
use crate::p2p::types::{PeerInfoDisplay, ReasonForBan};
use crate::router::{Handler, ResponseFuture};
use crate::web::*;
use hyper::{Body, Request, StatusCode};
use p2p;
use p2p::types::{PeerInfoDisplay, ReasonForBan};
use router::{Handler, ResponseFuture};
use std::sync::Weak;
use web::*;
pub struct PeersAllHandler {
pub peers: Weak<p2p::Peers>,

View file

@ -13,22 +13,22 @@
// limitations under the License.
use super::utils::w;
use core::core::hash::Hashed;
use core::core::Transaction;
use core::ser;
use crate::core::core::hash::Hashed;
use crate::core::core::Transaction;
use crate::core::ser;
use crate::pool;
use crate::rest::*;
use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::util;
use crate::util::RwLock;
use crate::web::*;
use futures::future::ok;
use futures::Future;
use hyper::{Body, Request, StatusCode};
use pool;
use rest::*;
use router::{Handler, ResponseFuture};
use std::collections::HashMap;
use std::sync::Weak;
use types::*;
use url::form_urlencoded;
use util;
use util::RwLock;
use web::*;
/// Get basic information about the transaction pool.
/// GET /v1/pool
@ -60,7 +60,7 @@ pub struct PoolPushHandler {
}
impl PoolPushHandler {
fn update_pool(&self, req: Request<Body>) -> Box<Future<Item = (), Error = Error> + Send> {
fn update_pool(&self, req: Request<Body>) -> Box<dyn Future<Item = (), Error = Error> + Send> {
let params = match req.uri().query() {
Some(query_string) => form_urlencoded::parse(query_string.as_bytes())
.into_owned()

View file

@ -13,14 +13,14 @@
// limitations under the License.
use super::utils::w;
use chain;
use crate::chain;
use crate::p2p;
use crate::rest::*;
use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::web::*;
use hyper::{Body, Request};
use p2p;
use rest::*;
use router::{Handler, ResponseFuture};
use std::sync::Weak;
use types::*;
use web::*;
// RESTful index of available api endpoints
// GET /v1/

View file

@ -13,18 +13,18 @@
// limitations under the License.
use super::utils::w;
use chain;
use crate::chain;
use crate::rest::*;
use crate::router::{Handler, ResponseFuture};
use crate::types::*;
use crate::util;
use crate::util::secp::pedersen::Commitment;
use crate::web::*;
use failure::ResultExt;
use hyper::{Body, Request, StatusCode};
use rest::*;
use router::{Handler, ResponseFuture};
use std::collections::HashMap;
use std::sync::Weak;
use types::*;
use url::form_urlencoded;
use util;
use util::secp::pedersen::Commitment;
use web::*;
// Sum tree handler. Retrieve the roots:
// GET /v1/txhashset/roots

View file

@ -12,14 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use chain;
use core::core::{OutputFeatures, OutputIdentifier};
use crate::chain;
use crate::core::core::{OutputFeatures, OutputIdentifier};
use crate::rest::*;
use crate::types::*;
use crate::util;
use crate::util::secp::pedersen::Commitment;
use failure::ResultExt;
use rest::*;
use std::sync::{Arc, Weak};
use types::*;
use util;
use util::secp::pedersen::Commitment;
// All handlers use `Weak` references instead of `Arc` to avoid cycles that
// can never be destroyed. These 2 functions are simple helpers to reduce the

View file

@ -12,36 +12,31 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_p2p as p2p;
extern crate grin_pool as pool;
extern crate grin_store as store;
extern crate grin_util as util;
extern crate url;
use grin_chain as chain;
use grin_core as core;
use grin_p2p as p2p;
use grin_pool as pool;
extern crate failure;
use grin_util as util;
use failure;
#[macro_use]
extern crate failure_derive;
extern crate hyper;
use hyper;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate ring;
extern crate serde;
use serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
use serde_json;
#[macro_use]
extern crate log;
extern crate futures;
extern crate http;
extern crate hyper_rustls;
extern crate rustls;
extern crate tokio;
extern crate tokio_core;
extern crate tokio_rustls;
extern crate tokio_tcp;
use hyper_rustls;
use rustls;
use tokio_tcp;
pub mod auth;
pub mod client;
@ -51,9 +46,9 @@ mod router;
mod types;
mod web;
pub use auth::BasicAuthMiddleware;
pub use handlers::start_rest_apis;
pub use rest::*;
pub use router::*;
pub use types::*;
pub use web::*;
pub use crate::auth::BasicAuthMiddleware;
pub use crate::handlers::start_rest_apis;
pub use crate::rest::*;
pub use crate::router::*;
pub use crate::types::*;
pub use crate::web::*;

View file

@ -18,12 +18,12 @@
//! To use it, just have your service(s) implement the ApiEndpoint trait and
//! register them on a ApiServer.
use crate::router::{Handler, HandlerObj, ResponseFuture, Router};
use failure::{Backtrace, Context, Fail, ResultExt};
use futures::sync::oneshot;
use futures::Stream;
use hyper::rt::Future;
use hyper::{rt, Body, Request, Server};
use router::{Handler, HandlerObj, ResponseFuture, Router};
use rustls;
use rustls::internal::pemfile;
use std::fmt::{self, Display};
@ -55,7 +55,7 @@ pub enum ErrorKind {
}
impl Fail for Error {
fn cause(&self) -> Option<&Fail> {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
@ -65,7 +65,7 @@ impl Fail for Error {
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}
@ -196,7 +196,8 @@ impl ApiServer {
.map_err(|e| eprintln!("HTTP API server error: {}", e));
rt::run(server);
}).map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
})
.map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
}
/// Starts the TLS ApiServer at the provided address.
@ -228,13 +229,15 @@ impl ApiServer {
error!("accept_async failed: {}", e);
Ok(None)
}
}).filter_map(|x| x);
})
.filter_map(|x| x);
let server = Server::builder(tls)
.serve(router)
.map_err(|e| eprintln!("HTTP API server error: {}", e));
rt::run(server);
}).map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
})
.map_err(|_| ErrorKind::Internal("failed to spawn API thread".to_string()).into())
}
/// Stops the API server, it panics in case of error
@ -258,7 +261,7 @@ impl Handler for LoggingMiddleware {
fn call(
&self,
req: Request<Body>,
mut handlers: Box<Iterator<Item = HandlerObj>>,
mut handlers: Box<dyn Iterator<Item = HandlerObj>>,
) -> ResponseFuture {
debug!("REST call: {} {}", req.method(), req.uri().path());
handlers.next().unwrap().call(req, handlers)

View file

@ -26,7 +26,7 @@ lazy_static! {
static ref WILDCARD_STOP_HASH: u64 = calculate_hash(&"**");
}
pub type ResponseFuture = Box<Future<Item = Response<Body>, Error = hyper::Error> + Send>;
pub type ResponseFuture = Box<dyn Future<Item = Response<Body>, Error = hyper::Error> + Send>;
pub trait Handler {
fn get(&self, _req: Request<Body>) -> ResponseFuture {
@ -68,7 +68,7 @@ pub trait Handler {
fn call(
&self,
req: Request<Body>,
mut _handlers: Box<Iterator<Item = HandlerObj>>,
mut _handlers: Box<dyn Iterator<Item = HandlerObj>>,
) -> ResponseFuture {
match req.method() {
&Method::GET => self.get(req),
@ -105,7 +105,7 @@ struct NodeId(usize);
const MAX_CHILDREN: usize = 16;
pub type HandlerObj = Arc<Handler + Send + Sync>;
pub type HandlerObj = Arc<dyn Handler + Send + Sync>;
#[derive(Clone)]
pub struct Node {
@ -147,7 +147,8 @@ impl Router {
.find(|&id| {
let node_key = self.node(*id).key;
node_key == key || node_key == *WILDCARD_HASH || node_key == *WILDCARD_STOP_HASH
}).cloned()
})
.cloned()
}
fn add_empty_node(&mut self, parent: NodeId, key: u64) -> NodeId {
@ -225,7 +226,7 @@ impl NewService for Router {
type Error = hyper::Error;
type InitError = hyper::Error;
type Service = Router;
type Future = Box<Future<Item = Self::Service, Error = Self::InitError> + Send>;
type Future = Box<dyn Future<Item = Self::Service, Error = Self::InitError> + Send>;
fn new_service(&self) -> Self::Future {
Box::new(future::ok(self.clone()))
}

View file

@ -14,17 +14,17 @@
use std::sync::Arc;
use chain;
use core::core::hash::Hashed;
use core::core::merkle_proof::MerkleProof;
use core::{core, ser};
use p2p;
use crate::chain;
use crate::core::core::hash::Hashed;
use crate::core::core::merkle_proof::MerkleProof;
use crate::core::{core, ser};
use crate::p2p;
use crate::util;
use crate::util::secp::pedersen;
use serde;
use serde::de::MapAccess;
use serde::ser::SerializeStruct;
use std::fmt;
use util;
use util::secp::pedersen;
macro_rules! no_dup {
($field:ident) => {
@ -210,7 +210,7 @@ struct PrintableCommitmentVisitor;
impl<'de> serde::de::Visitor<'de> for PrintableCommitmentVisitor {
type Value = PrintableCommitment;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a Pedersen commitment")
}
@ -361,7 +361,7 @@ impl<'de> serde::de::Deserialize<'de> for OutputPrintable {
impl<'de> serde::de::Visitor<'de> for OutputPrintableVisitor {
type Value = OutputPrintable;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a print able Output")
}
@ -571,7 +571,8 @@ impl BlockPrintable {
Some(&block.header),
include_proof,
)
}).collect();
})
.collect();
let kernels = block
.kernels()
.iter()

View file

@ -1,14 +1,14 @@
use crate::rest::*;
use crate::router::ResponseFuture;
use futures::future::{err, ok};
use futures::{Future, Stream};
use hyper::{Body, Request, Response, StatusCode};
use rest::*;
use router::ResponseFuture;
use serde::{Deserialize, Serialize};
use serde_json;
use std::fmt::Debug;
/// Parse request body
pub fn parse_body<T>(req: Request<Body>) -> Box<Future<Item = T, Error = Error> + Send>
pub fn parse_body<T>(req: Request<Body>) -> Box<dyn Future<Item = T, Error = Error> + Send>
where
for<'de> T: Deserialize<'de> + Send + 'static,
{

View file

@ -1,8 +1,7 @@
extern crate grin_api as api;
extern crate grin_util as util;
extern crate hyper;
use grin_api as api;
use grin_util as util;
use api::*;
use crate::api::*;
use hyper::{Body, Request};
use std::net::SocketAddr;
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
@ -41,7 +40,7 @@ impl Handler for CounterMiddleware {
fn call(
&self,
req: Request<Body>,
mut handlers: Box<Iterator<Item = HandlerObj>>,
mut handlers: Box<dyn Iterator<Item = HandlerObj>>,
) -> ResponseFuture {
self.counter.fetch_add(1, Ordering::SeqCst);
handlers.next().unwrap().call(req, handlers)

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = ".."
edition = "2018"
[dependencies]
bitflags = "1"

View file

@ -15,32 +15,30 @@
//! Facade and handler for the rest of the blockchain implementation
//! and mostly the chain pipeline.
use crate::core::core::hash::{Hash, Hashed, ZERO_HASH};
use crate::core::core::merkle_proof::MerkleProof;
use crate::core::core::verifier_cache::VerifierCache;
use crate::core::core::{
Block, BlockHeader, BlockSums, Committed, Output, OutputIdentifier, Transaction, TxKernelEntry,
};
use crate::core::global;
use crate::core::pow;
use crate::error::{Error, ErrorKind};
use crate::lmdb;
use crate::pipe;
use crate::store;
use crate::txhashset;
use crate::types::{
BlockStatus, ChainAdapter, NoStatus, Options, Tip, TxHashSetRoots, TxHashsetWriteStatus,
};
use crate::util::secp::pedersen::{Commitment, RangeProof};
use crate::util::{Mutex, RwLock, StopState};
use grin_store::Error::NotFoundErr;
use std::collections::HashMap;
use std::fs::File;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use util::RwLock;
use lmdb;
use core::core::hash::{Hash, Hashed, ZERO_HASH};
use core::core::merkle_proof::MerkleProof;
use core::core::verifier_cache::VerifierCache;
use core::core::{
Block, BlockHeader, BlockSums, Committed, Output, OutputIdentifier, Transaction, TxKernelEntry,
};
use core::global;
use core::pow;
use error::{Error, ErrorKind};
use grin_store::Error::NotFoundErr;
use pipe;
use store;
use txhashset;
use types::{
BlockStatus, ChainAdapter, NoStatus, Options, Tip, TxHashSetRoots, TxHashsetWriteStatus,
};
use util::secp::pedersen::{Commitment, RangeProof};
/// Orphan pool size is limited by MAX_ORPHAN_SIZE
pub const MAX_ORPHAN_SIZE: usize = 200;
@ -144,13 +142,14 @@ impl OrphanBlockPool {
pub struct Chain {
db_root: String,
store: Arc<store::ChainStore>,
adapter: Arc<ChainAdapter + Send + Sync>,
adapter: Arc<dyn ChainAdapter + Send + Sync>,
orphans: Arc<OrphanBlockPool>,
txhashset: Arc<RwLock<txhashset::TxHashSet>>,
verifier_cache: Arc<RwLock<VerifierCache>>,
verifier_cache: Arc<RwLock<dyn VerifierCache>>,
// POW verification function
pow_verifier: fn(&BlockHeader) -> Result<(), pow::Error>,
archive_mode: bool,
stop_state: Arc<Mutex<StopState>>,
genesis: BlockHeader,
}
@ -161,62 +160,79 @@ impl Chain {
pub fn init(
db_root: String,
db_env: Arc<lmdb::Environment>,
adapter: Arc<ChainAdapter + Send + Sync>,
adapter: Arc<dyn ChainAdapter + Send + Sync>,
genesis: Block,
pow_verifier: fn(&BlockHeader) -> Result<(), pow::Error>,
verifier_cache: Arc<RwLock<VerifierCache>>,
verifier_cache: Arc<RwLock<dyn VerifierCache>>,
archive_mode: bool,
stop_state: Arc<Mutex<StopState>>,
) -> Result<Chain, Error> {
let chain_store = store::ChainStore::new(db_env)?;
let chain = {
// Note: We take a lock on the stop_state here and do not release it until
// we have finished chain initialization.
let stop_state_local = stop_state.clone();
let stop_lock = stop_state_local.lock();
if stop_lock.is_stopped() {
return Err(ErrorKind::Stopped.into());
}
let store = Arc::new(chain_store);
let store = Arc::new(store::ChainStore::new(db_env)?);
// open the txhashset, creating a new one if necessary
let mut txhashset = txhashset::TxHashSet::open(db_root.clone(), store.clone(), None)?;
// open the txhashset, creating a new one if necessary
let mut txhashset = txhashset::TxHashSet::open(db_root.clone(), store.clone(), None)?;
setup_head(genesis.clone(), store.clone(), &mut txhashset)?;
setup_head(&genesis, &store, &mut txhashset)?;
Chain::log_heads(&store)?;
{
let head = store.head()?;
debug!(
"init: head: {} @ {} [{}]",
head.total_difficulty.to_num(),
head.height,
head.last_block_h,
);
}
Chain {
db_root,
store,
adapter,
orphans: Arc::new(OrphanBlockPool::new()),
txhashset: Arc::new(RwLock::new(txhashset)),
pow_verifier,
verifier_cache,
archive_mode,
stop_state,
genesis: genesis.header.clone(),
}
};
{
let header_head = store.header_head()?;
debug!(
"init: header_head: {} @ {} [{}]",
header_head.total_difficulty.to_num(),
header_head.height,
header_head.last_block_h,
);
}
// Run chain compaction. Laptops and other intermittent nodes
// may not run long enough to trigger daily compaction.
// So run it explicitly here on startup (its fast enough to do so).
// Note: we release the stop_lock from above as compact also requires a lock.
chain.compact()?;
{
let sync_head = store.get_sync_head()?;
debug!(
"init: sync_head: {} @ {} [{}]",
sync_head.total_difficulty.to_num(),
sync_head.height,
sync_head.last_block_h,
);
}
Ok(chain)
}
Ok(Chain {
db_root: db_root,
store: store,
adapter: adapter,
orphans: Arc::new(OrphanBlockPool::new()),
txhashset: Arc::new(RwLock::new(txhashset)),
pow_verifier,
verifier_cache,
archive_mode,
genesis: genesis.header.clone(),
})
fn log_heads(store: &store::ChainStore) -> Result<(), Error> {
let head = store.head()?;
debug!(
"init: head: {} @ {} [{}]",
head.total_difficulty.to_num(),
head.height,
head.last_block_h,
);
let header_head = store.header_head()?;
debug!(
"init: header_head: {} @ {} [{}]",
header_head.total_difficulty.to_num(),
header_head.height,
header_head.last_block_h,
);
let sync_head = store.get_sync_head()?;
debug!(
"init: sync_head: {} @ {} [{}]",
sync_head.total_difficulty.to_num(),
sync_head.height,
sync_head.last_block_h,
);
Ok(())
}
/// Processes a single block, then checks for orphans, processing
@ -253,6 +269,15 @@ impl Chain {
/// or false if it has added to a fork (or orphan?).
fn process_block_single(&self, b: Block, opts: Options) -> Result<Option<Tip>, Error> {
let (maybe_new_head, prev_head) = {
// Note: We take a lock on the stop_state here and do not release it until
// we have finished processing this single block.
// We take care to write both the txhashset *and* the batch while we
// have the stop_state lock.
let stop_lock = self.stop_state.lock();
if stop_lock.is_stopped() {
return Err(ErrorKind::Stopped.into());
}
let mut txhashset = self.txhashset.write();
let batch = self.store.batch()?;
let mut ctx = self.new_ctx(opts, batch, &mut txhashset)?;
@ -260,6 +285,11 @@ impl Chain {
let prev_head = ctx.batch.head()?;
let maybe_new_head = pipe::process_block(&b, &mut ctx);
// We have flushed txhashset extension changes to disk
// but not yet committed the batch.
// A node shutdown at this point can be catastrophic...
// We prevent this via the stop_lock (see above).
if let Ok(_) = maybe_new_head {
ctx.batch.commit()?;
}
@ -324,11 +354,12 @@ impl Chain {
/// Process a block header received during "header first" propagation.
pub fn process_block_header(&self, bh: &BlockHeader, opts: Options) -> Result<(), Error> {
// We take a write lock on the txhashset and create a new batch
// but this is strictly readonly so we do not commit the batch.
let mut txhashset = self.txhashset.write();
let batch = self.store.batch()?;
let mut ctx = self.new_ctx(opts, batch, &mut txhashset)?;
pipe::process_block_header(bh, &mut ctx)?;
ctx.batch.commit()?;
Ok(())
}
@ -336,6 +367,15 @@ impl Chain {
/// This is only ever used during sync and is based on sync_head.
/// We update header_head here if our total work increases.
pub fn sync_block_headers(&self, headers: &[BlockHeader], opts: Options) -> Result<(), Error> {
// Note: We take a lock on the stop_state here and do not release it until
// we have finished processing this single block.
// We take care to write both the txhashset *and* the batch while we
// have the stop_state lock.
let stop_lock = self.stop_state.lock();
if stop_lock.is_stopped() {
return Err(ErrorKind::Stopped.into());
}
let mut txhashset = self.txhashset.write();
let batch = self.store.batch()?;
let mut ctx = self.new_ctx(opts, batch, &mut txhashset)?;
@ -782,7 +822,7 @@ impl Chain {
&self,
h: Hash,
txhashset_data: File,
status: &TxHashsetWriteStatus,
status: &dyn TxHashsetWriteStatus,
) -> Result<(), Error> {
status.on_setup();
@ -865,20 +905,19 @@ impl Chain {
}
fn compact_txhashset(&self) -> Result<(), Error> {
debug!("Starting blockchain compaction.");
debug!("Starting txhashset compaction...");
{
// Note: We take a lock on the stop_state here and do not release it until
// we have finished processing this chain compaction operation.
let stop_lock = self.stop_state.lock();
if stop_lock.is_stopped() {
return Err(ErrorKind::Stopped.into());
}
let mut txhashset = self.txhashset.write();
txhashset.compact()?;
txhashset::extending_readonly(&mut txhashset, |extension| {
extension.dump_output_pmmr();
Ok(())
})?;
}
// Now check we can still successfully validate the chain state after
// compacting, shouldn't be necessary once all of this is well-oiled
debug!("Validating state after compaction.");
self.validate(true)?;
debug!("... finished txhashset compaction.");
Ok(())
}
@ -892,7 +931,11 @@ impl Chain {
let horizon = global::cut_through_horizon() as u64;
let head = self.head()?;
let tail = self.tail()?;
let tail = match self.tail() {
Ok(tail) => tail,
Err(_) => Tip::from_header(&self.genesis),
};
let cutoff = head.height.saturating_sub(horizon);
@ -988,7 +1031,8 @@ impl Chain {
if outputs.0 != rangeproofs.0 || outputs.1.len() != rangeproofs.1.len() {
return Err(ErrorKind::TxHashSetErr(String::from(
"Output and rangeproof sets don't match",
)).into());
))
.into());
}
let mut output_vec: Vec<Output> = vec![];
for (ref x, &y) in outputs.1.iter().zip(rangeproofs.1.iter()) {
@ -1131,7 +1175,7 @@ impl Chain {
/// Builds an iterator on blocks starting from the current chain head and
/// running backward. Specialized to return information pertaining to block
/// difficulty calculation (timestamp and previous difficulties).
pub fn difficulty_iter(&self) -> store::DifficultyIter {
pub fn difficulty_iter(&self) -> store::DifficultyIter<'_> {
let head = self.head().unwrap();
let store = self.store.clone();
store::DifficultyIter::from(head.last_block_h, store)
@ -1146,8 +1190,8 @@ impl Chain {
}
fn setup_head(
genesis: Block,
store: Arc<store::ChainStore>,
genesis: &Block,
store: &store::ChainStore,
txhashset: &mut txhashset::TxHashSet,
) -> Result<(), Error> {
let mut batch = store.batch()?;
@ -1241,10 +1285,8 @@ fn setup_head(
let tip = Tip::from_header(&genesis.header);
batch.save_head(&tip)?;
batch.save_block_header(&genesis.header)?;
if genesis.kernels().len() > 0 {
let (utxo_sum, kernel_sum) = (sums, &genesis as &Committed).verify_kernel_sums(
let (utxo_sum, kernel_sum) = (sums, genesis as &Committed).verify_kernel_sums(
genesis.header.overage(),
genesis.header.total_kernel_offset(),
)?;

View file

@ -13,17 +13,16 @@
// limitations under the License.
//! Error types for chain
use crate::core::core::{block, committed, transaction};
use crate::core::ser;
use crate::keychain;
use crate::util::secp;
use crate::util::secp::pedersen::Commitment;
use failure::{Backtrace, Context, Fail};
use grin_store as store;
use std::fmt::{self, Display};
use std::io;
use core::core::{block, committed, transaction};
use core::ser;
use grin_store as store;
use keychain;
use util::secp;
use util::secp::pedersen::Commitment;
/// Error definition
#[derive(Debug, Fail)]
pub struct Error {
@ -129,10 +128,13 @@ pub enum ErrorKind {
/// Error from summing and verifying kernel sums via committed trait.
#[fail(display = "Committed Trait: Error summing and verifying kernel sums")]
Committed(committed::Error),
/// We cannot process data once the Grin server has been stopped.
#[fail(display = "Stopped (Grin Shutting Down)")]
Stopped,
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let cause = match self.cause() {
Some(c) => format!("{}", c),
None => String::from("Unknown"),
@ -155,7 +157,7 @@ impl Error {
self.inner.get_context().clone()
}
/// get cause
pub fn cause(&self) -> Option<&Fail> {
pub fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
/// get backtrace

View file

@ -22,24 +22,19 @@
#[macro_use]
extern crate bitflags;
extern crate byteorder;
extern crate croaring;
extern crate lmdb_zero as lmdb;
extern crate lru_cache;
extern crate serde;
use lmdb_zero as lmdb;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate chrono;
extern crate failure;
#[macro_use]
extern crate failure_derive;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store;
extern crate grin_util as util;
use failure;
use grin_core as core;
use grin_keychain as keychain;
use grin_store;
use grin_util as util;
mod chain;
mod error;
@ -50,7 +45,7 @@ pub mod types;
// Re-export the base interface
pub use chain::{Chain, MAX_ORPHAN_SIZE};
pub use error::{Error, ErrorKind};
pub use store::ChainStore;
pub use types::{BlockStatus, ChainAdapter, Options, Tip, TxHashsetWriteStatus};
pub use crate::chain::{Chain, MAX_ORPHAN_SIZE};
pub use crate::error::{Error, ErrorKind};
pub use crate::store::ChainStore;
pub use crate::types::{BlockStatus, ChainAdapter, Options, Tip, TxHashsetWriteStatus};

View file

@ -14,25 +14,23 @@
//! Implementation of the chain block acceptance (or refusal) pipeline.
use std::sync::Arc;
use util::RwLock;
use crate::chain::OrphanBlockPool;
use crate::core::consensus;
use crate::core::core::hash::Hashed;
use crate::core::core::verifier_cache::VerifierCache;
use crate::core::core::Committed;
use crate::core::core::{Block, BlockHeader, BlockSums};
use crate::core::global;
use crate::core::pow;
use crate::error::{Error, ErrorKind};
use crate::store;
use crate::txhashset;
use crate::types::{Options, Tip};
use crate::util::RwLock;
use chrono::prelude::Utc;
use chrono::Duration;
use chain::OrphanBlockPool;
use core::consensus;
use core::core::hash::Hashed;
use core::core::verifier_cache::VerifierCache;
use core::core::Committed;
use core::core::{Block, BlockHeader, BlockSums};
use core::global;
use core::pow;
use error::{Error, ErrorKind};
use grin_store;
use store;
use txhashset;
use types::{Options, Tip};
use std::sync::Arc;
/// Contextual information required to process a new block and either reject or
/// accept it.
@ -46,7 +44,7 @@ pub struct BlockContext<'a> {
/// The active batch to use for block processing.
pub batch: store::Batch<'a>,
/// The verifier cache (caching verifier for rangeproofs and kernel signatures)
pub verifier_cache: Arc<RwLock<VerifierCache>>,
pub verifier_cache: Arc<RwLock<dyn VerifierCache>>,
/// Recent orphan blocks to avoid double-processing
pub orphans: Arc<OrphanBlockPool>,
}
@ -56,7 +54,7 @@ pub struct BlockContext<'a> {
fn process_header_for_block(
header: &BlockHeader,
is_fork: bool,
ctx: &mut BlockContext,
ctx: &mut BlockContext<'_>,
) -> Result<(), Error> {
txhashset::header_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
extension.force_rollback();
@ -77,7 +75,7 @@ fn process_header_for_block(
// Check if we already know about this block for various reasons
// from cheapest to most expensive (delay hitting the db until last).
fn check_known(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
fn check_known(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
check_known_head(&block.header, ctx)?;
check_known_orphans(&block.header, ctx)?;
check_known_store(&block.header, ctx)?;
@ -87,12 +85,12 @@ fn check_known(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
/// Runs the block processing pipeline, including validation and finding a
/// place for the new block in the chain.
/// Returns new head if chain head updated.
pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, Error> {
pub fn process_block(b: &Block, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {
// TODO should just take a promise for a block with a full header so we don't
// spend resources reading the full block when its header is invalid
debug!(
"pipe: process_block {} at {}, in/out/kern: {}/{}/{}",
"pipe: process_block {} at {} [in/out/kern: {}/{}/{}]",
b.hash(),
b.header.height,
b.inputs().len(),
@ -183,7 +181,7 @@ pub fn process_block(b: &Block, ctx: &mut BlockContext) -> Result<Option<Tip>, E
/// This is only ever used during sync and uses a context based on sync_head.
pub fn sync_block_headers(
headers: &[BlockHeader],
ctx: &mut BlockContext,
ctx: &mut BlockContext<'_>,
) -> Result<Option<Tip>, Error> {
if let Some(header) = headers.first() {
debug!(
@ -251,7 +249,7 @@ pub fn sync_block_headers(
/// We validate the header but we do not store it or update header head based
/// on this. We will update these once we get the block back after requesting
/// it.
pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
debug!(
"pipe: process_block_header: {} at {}",
header.hash(),
@ -266,7 +264,7 @@ pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext) -> Res
/// Quick in-memory check to fast-reject any block header we've already handled
/// recently. Keeps duplicates from the network in check.
/// ctx here is specific to the header_head (tip of the header chain)
fn check_header_known(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
fn check_header_known(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let header_head = ctx.batch.header_head()?;
if header.hash() == header_head.last_block_h || header.hash() == header_head.prev_block_h {
return Err(ErrorKind::Unfit("header already known".to_string()).into());
@ -277,7 +275,7 @@ fn check_header_known(header: &BlockHeader, ctx: &mut BlockContext) -> Result<()
/// Quick in-memory check to fast-reject any block handled recently.
/// Keeps duplicates from the network in check.
/// Checks against the last_block_h and prev_block_h of the chain head.
fn check_known_head(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
fn check_known_head(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let head = ctx.batch.head()?;
let bh = header.hash();
if bh == head.last_block_h || bh == head.prev_block_h {
@ -287,7 +285,7 @@ fn check_known_head(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(),
}
/// Check if this block is in the set of known orphans.
fn check_known_orphans(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
fn check_known_orphans(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
if ctx.orphans.contains(&header.hash()) {
Err(ErrorKind::Unfit("already known in orphans".to_string()).into())
} else {
@ -296,7 +294,7 @@ fn check_known_orphans(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(
}
// Check if this block is in the store already.
fn check_known_store(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
fn check_known_store(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
match ctx.batch.block_exists(&header.hash()) {
Ok(true) => {
let head = ctx.batch.head()?;
@ -321,7 +319,10 @@ fn check_known_store(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(),
// Find the previous header from the store.
// Return an Orphan error if we cannot find the previous header.
fn prev_header_store(header: &BlockHeader, batch: &mut store::Batch) -> Result<BlockHeader, Error> {
fn prev_header_store(
header: &BlockHeader,
batch: &mut store::Batch<'_>,
) -> Result<BlockHeader, Error> {
let prev = batch.get_previous_header(&header).map_err(|e| match e {
grin_store::Error::NotFoundErr(_) => ErrorKind::Orphan,
_ => ErrorKind::StoreErr(e, "check prev header".into()),
@ -332,7 +333,7 @@ fn prev_header_store(header: &BlockHeader, batch: &mut store::Batch) -> Result<B
/// First level of block validation that only needs to act on the block header
/// to make it as cheap as possible. The different validations are also
/// arranged by order of cost to have as little DoS surface as possible.
fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), Error> {
fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
// check version, enforces scheduled hard fork
if !consensus::valid_header_version(header.height, header.version) {
error!(
@ -425,7 +426,7 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext) -> Result<(), E
Ok(())
}
fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
fn validate_block(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let prev = ctx.batch.get_previous_header(&block.header)?;
block
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
@ -437,7 +438,10 @@ fn validate_block(block: &Block, ctx: &mut BlockContext) -> Result<(), Error> {
/// Verify the block is not attempting to spend coinbase outputs
/// before they have sufficiently matured.
/// Note: requires a txhashset extension.
fn verify_coinbase_maturity(block: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
fn verify_coinbase_maturity(
block: &Block,
ext: &mut txhashset::Extension<'_>,
) -> Result<(), Error> {
ext.verify_coinbase_maturity(&block.inputs(), block.header.height)?;
Ok(())
}
@ -447,7 +451,7 @@ fn verify_coinbase_maturity(block: &Block, ext: &mut txhashset::Extension) -> Re
/// This allows us to verify kernel sums across the full utxo and kernel sets
/// based on block_sums of previous block, accounting for the inputs|outputs|kernels
/// of the new block.
fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {
// TODO - this is 2 db calls, can we optimize this?
// Retrieve the block_sums for the previous block.
let prev = ext.batch.get_previous_header(&b.header)?;
@ -462,7 +466,7 @@ fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Er
// Verify the kernel sums for the block_sums with the new block applied.
let (utxo_sum, kernel_sum) =
(block_sums, b as &Committed).verify_kernel_sums(overage, offset)?;
(block_sums, b as &dyn Committed).verify_kernel_sums(overage, offset)?;
// Save the new block_sums for the new block to the db via the batch.
ext.batch.save_block_sums(
@ -478,7 +482,10 @@ fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Er
/// Fully validate the block by applying it to the txhashset extension.
/// Check both the txhashset roots and sizes are correct after applying the block.
fn apply_block_to_txhashset(block: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
fn apply_block_to_txhashset(
block: &Block,
ext: &mut txhashset::Extension<'_>,
) -> Result<(), Error> {
ext.validate_header_root(&block.header)?;
ext.apply_block(block)?;
ext.validate_roots()?;
@ -488,7 +495,7 @@ fn apply_block_to_txhashset(block: &Block, ext: &mut txhashset::Extension) -> Re
/// Officially adds the block to our chain.
/// Header must be added separately (assume this has been done previously).
fn add_block(b: &Block, batch: &store::Batch) -> Result<(), Error> {
fn add_block(b: &Block, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_block(b)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save block".to_owned()))?;
@ -496,7 +503,7 @@ fn add_block(b: &Block, batch: &store::Batch) -> Result<(), Error> {
}
/// Update the block chain tail so we can know the exact tail of full blocks in this node
fn update_body_tail(bh: &BlockHeader, batch: &store::Batch) -> Result<(), Error> {
fn update_body_tail(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {
let tip = Tip::from_header(bh);
batch
.save_body_tail(&tip)
@ -506,7 +513,7 @@ fn update_body_tail(bh: &BlockHeader, batch: &store::Batch) -> Result<(), Error>
}
/// Officially adds the block header to our header chain.
fn add_block_header(bh: &BlockHeader, batch: &store::Batch) -> Result<(), Error> {
fn add_block_header(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_block_header(bh)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header".to_owned()))?;
@ -516,7 +523,7 @@ fn add_block_header(bh: &BlockHeader, batch: &store::Batch) -> Result<(), Error>
/// Directly updates the head if we've just appended a new block to it or handle
/// the situation where we've just added enough work to have a fork with more
/// work than the head.
fn update_head(b: &Block, ctx: &BlockContext) -> Result<Option<Tip>, Error> {
fn update_head(b: &Block, ctx: &BlockContext<'_>) -> Result<Option<Tip>, Error> {
// if we made a fork with more work than the head (which should also be true
// when extending the head), update it
let head = ctx.batch.head()?;
@ -544,7 +551,7 @@ fn has_more_work(header: &BlockHeader, head: &Tip) -> bool {
}
/// Update the sync head so we can keep syncing from where we left off.
fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch) -> Result<(), Error> {
fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch<'_>) -> Result<(), Error> {
let tip = Tip::from_header(bh);
batch
.save_sync_head(&tip)
@ -554,7 +561,7 @@ fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch) -> Result<(), Er
}
/// Update the header head if this header has most work.
fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option<Tip>, Error> {
fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {
let header_head = ctx.batch.header_head()?;
if has_more_work(&bh, &header_head) {
let tip = Tip::from_header(bh);
@ -576,7 +583,7 @@ fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext) -> Result<Option
/// Rewind the header chain and reapply headers on a fork.
pub fn rewind_and_apply_header_fork(
header: &BlockHeader,
ext: &mut txhashset::HeaderExtension,
ext: &mut txhashset::HeaderExtension<'_>,
) -> Result<(), Error> {
let mut fork_hashes = vec![];
let mut current = ext.batch.get_previous_header(header)?;
@ -606,7 +613,7 @@ pub fn rewind_and_apply_header_fork(
/// to find to fork root. Rewind the txhashset to the root and apply all the
/// forked blocks prior to the one being processed to set the txhashset in
/// the expected state.
pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension) -> Result<(), Error> {
pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {
// extending a fork, first identify the block where forking occurred
// keeping the hashes of blocks along the fork
let mut fork_hashes = vec![];
@ -641,7 +648,7 @@ pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension) -> Resul
Ok(())
}
fn validate_utxo(block: &Block, ext: &txhashset::Extension) -> Result<(), Error> {
fn validate_utxo(block: &Block, ext: &txhashset::Extension<'_>) -> Result<(), Error> {
let utxo = ext.utxo_view();
utxo.validate_block(block)?;
Ok(())

View file

@ -14,20 +14,17 @@
//! Implements storage primitives required by the chain
use std::sync::Arc;
use crate::core::consensus::HeaderInfo;
use crate::core::core::hash::{Hash, Hashed};
use crate::core::core::{Block, BlockHeader, BlockSums};
use crate::core::pow::Difficulty;
use crate::lmdb;
use crate::types::Tip;
use crate::util::secp::pedersen::Commitment;
use croaring::Bitmap;
use lmdb;
use util::secp::pedersen::Commitment;
use core::consensus::HeaderInfo;
use core::core::hash::{Hash, Hashed};
use core::core::{Block, BlockHeader, BlockSums};
use core::pow::Difficulty;
use grin_store as store;
use grin_store::{option_to_not_found, to_key, Error};
use types::Tip;
use std::sync::Arc;
const STORE_SUBPATH: &'static str = "chain";
@ -117,7 +114,7 @@ impl ChainStore {
}
/// Builds a new batch to be used with this store.
pub fn batch(&self) -> Result<Batch, Error> {
pub fn batch(&self) -> Result<Batch<'_>, Error> {
Ok(Batch {
db: self.db.batch()?,
})
@ -202,8 +199,8 @@ impl<'a> Batch<'a> {
/// Save the block and the associated input bitmap.
/// Note: the block header is not saved to the db here, assumes this has already been done.
pub fn save_block(&self, b: &Block) -> Result<(), Error> {
// Build the "input bitmap" for this new block and cache it locally.
self.build_and_cache_block_input_bitmap(&b)?;
// Build the "input bitmap" for this new block and store it in the db.
self.build_and_store_block_input_bitmap(&b)?;
// Save the block itself to the db.
self.db
@ -308,7 +305,7 @@ impl<'a> Batch<'a> {
Ok(bitmap)
}
fn build_and_cache_block_input_bitmap(&self, block: &Block) -> Result<Bitmap, Error> {
fn build_and_store_block_input_bitmap(&self, block: &Block) -> Result<Bitmap, Error> {
// Build the bitmap.
let bitmap = self.build_block_input_bitmap(block)?;
@ -329,7 +326,7 @@ impl<'a> Batch<'a> {
} else {
match self.get_block(bh) {
Ok(block) => {
let bitmap = self.build_and_cache_block_input_bitmap(&block)?;
let bitmap = self.build_and_store_block_input_bitmap(&block)?;
Ok(bitmap)
}
Err(e) => Err(e),
@ -345,7 +342,7 @@ impl<'a> Batch<'a> {
/// Creates a child of this batch. It will be merged with its parent on
/// commit, abandoned otherwise.
pub fn child(&mut self) -> Result<Batch, Error> {
pub fn child(&mut self) -> Result<Batch<'_>, Error> {
Ok(Batch {
db: self.db.child()?,
})
@ -384,7 +381,7 @@ impl<'a> DifficultyIter<'a> {
/// Build a new iterator using the provided chain store batch and starting from
/// the provided block hash.
pub fn from_batch(start: Hash, batch: Batch) -> DifficultyIter {
pub fn from_batch(start: Hash, batch: Batch<'_>) -> DifficultyIter<'_> {
DifficultyIter {
start,
store: None,

View file

@ -14,12 +14,11 @@
//! Lightweight readonly view into kernel MMR for convenience.
use core::core::pmmr::RewindablePMMR;
use core::core::{BlockHeader, TxKernel};
use error::{Error, ErrorKind};
use crate::core::core::pmmr::RewindablePMMR;
use crate::core::core::{BlockHeader, TxKernel};
use crate::error::{Error, ErrorKind};
use crate::store::Batch;
use grin_store::pmmr::PMMRBackend;
use store::Batch;
/// Rewindable (but readonly) view of the kernel set (based on kernel MMR).
pub struct RewindableKernelView<'a> {
@ -32,7 +31,7 @@ impl<'a> RewindableKernelView<'a> {
/// Build a new readonly kernel view.
pub fn new(
pmmr: RewindablePMMR<'a, TxKernel, PMMRBackend<TxKernel>>,
batch: &'a Batch,
batch: &'a Batch<'_>,
header: BlockHeader,
) -> RewindableKernelView<'a> {
RewindableKernelView {
@ -45,7 +44,7 @@ impl<'a> RewindableKernelView<'a> {
/// Accessor for the batch used in this view.
/// We will discard this batch (rollback) at the end, so be aware of this.
/// Nothing will get written to the db/index via this view.
pub fn batch(&self) -> &'a Batch {
pub fn batch(&self) -> &'a Batch<'_> {
self.batch
}
@ -73,7 +72,8 @@ impl<'a> RewindableKernelView<'a> {
return Err(ErrorKind::InvalidTxHashSet(format!(
"Kernel root at {} does not match",
self.header.height
)).into());
))
.into());
}
Ok(())
}

View file

@ -15,35 +15,31 @@
//! Utility structs to handle the 3 MMRs (output, rangeproof,
//! kernel) along the overall header MMR conveniently and transactionally.
use crate::core::core::committed::Committed;
use crate::core::core::hash::{Hash, Hashed};
use crate::core::core::merkle_proof::MerkleProof;
use crate::core::core::pmmr::{self, ReadonlyPMMR, RewindablePMMR, PMMR};
use crate::core::core::{
Block, BlockHeader, Input, Output, OutputFeatures, OutputIdentifier, TxKernel, TxKernelEntry,
};
use crate::core::global;
use crate::core::ser::{PMMRIndexHashable, PMMRable};
use crate::error::{Error, ErrorKind};
use crate::store::{Batch, ChainStore};
use crate::txhashset::{RewindableKernelView, UTXOView};
use crate::types::{Tip, TxHashSetRoots, TxHashsetWriteStatus};
use crate::util::secp::pedersen::{Commitment, RangeProof};
use crate::util::{file, secp_static, zip};
use croaring::Bitmap;
use grin_store;
use grin_store::pmmr::{PMMRBackend, PMMR_FILES};
use grin_store::types::prune_noop;
use std::collections::HashSet;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Instant, SystemTime, UNIX_EPOCH};
use croaring::Bitmap;
use util::secp::pedersen::{Commitment, RangeProof};
use core::core::committed::Committed;
use core::core::hash::{Hash, Hashed};
use core::core::merkle_proof::MerkleProof;
use core::core::pmmr::{self, ReadonlyPMMR, RewindablePMMR, PMMR};
use core::core::{
Block, BlockHeader, Input, Output, OutputFeatures, OutputIdentifier, TxKernel, TxKernelEntry,
};
use core::global;
use core::ser::{PMMRIndexHashable, PMMRable};
use error::{Error, ErrorKind};
use grin_store;
use grin_store::pmmr::{PMMRBackend, PMMR_FILES};
use grin_store::types::prune_noop;
use store::{Batch, ChainStore};
use txhashset::{RewindableKernelView, UTXOView};
use types::{Tip, TxHashSetRoots, TxHashsetWriteStatus};
use util::{file, secp_static, zip};
const HEADERHASHSET_SUBDIR: &'static str = "header";
const TXHASHSET_SUBDIR: &'static str = "txhashset";
@ -163,7 +159,7 @@ impl TxHashSet {
pub fn is_unspent(&self, output_id: &OutputIdentifier) -> Result<(Hash, u64), Error> {
match self.commit_index.get_output_pos(&output_id.commit) {
Ok(pos) => {
let output_pmmr =
let output_pmmr: ReadonlyPMMR<'_, Output, _> =
ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.last_pos);
if let Some(hash) = output_pmmr.get_hash(pos) {
if hash == output_id.hash_with_index(pos - 1) {
@ -314,7 +310,7 @@ impl TxHashSet {
/// The unit of work is always discarded (always rollback) as this is read-only.
pub fn extending_readonly<'a, F, T>(trees: &'a mut TxHashSet, inner: F) -> Result<T, Error>
where
F: FnOnce(&mut Extension) -> Result<T, Error>,
F: FnOnce(&mut Extension<'_>) -> Result<T, Error>,
{
let commit_index = trees.commit_index.clone();
let batch = commit_index.batch()?;
@ -351,7 +347,7 @@ where
/// Based on the current txhashset output_pmmr.
pub fn utxo_view<'a, F, T>(trees: &'a TxHashSet, inner: F) -> Result<T, Error>
where
F: FnOnce(&UTXOView) -> Result<T, Error>,
F: FnOnce(&UTXOView<'_>) -> Result<T, Error>,
{
let res: Result<T, Error>;
{
@ -374,7 +370,7 @@ where
/// when we are done with the view.
pub fn rewindable_kernel_view<'a, F, T>(trees: &'a TxHashSet, inner: F) -> Result<T, Error>
where
F: FnOnce(&mut RewindableKernelView) -> Result<T, Error>,
F: FnOnce(&mut RewindableKernelView<'_>) -> Result<T, Error>,
{
let res: Result<T, Error>;
{
@ -400,11 +396,11 @@ where
/// of work is abandoned. Otherwise, the unit of work is permanently applied.
pub fn extending<'a, F, T>(
trees: &'a mut TxHashSet,
batch: &'a mut Batch,
batch: &'a mut Batch<'_>,
inner: F,
) -> Result<T, Error>
where
F: FnOnce(&mut Extension) -> Result<T, Error>,
F: FnOnce(&mut Extension<'_>) -> Result<T, Error>,
{
let sizes: (u64, u64, u64, u64);
let res: Result<T, Error>;
@ -470,11 +466,11 @@ where
/// the header_head as they diverge during sync.
pub fn sync_extending<'a, F, T>(
trees: &'a mut TxHashSet,
batch: &'a mut Batch,
batch: &'a mut Batch<'_>,
inner: F,
) -> Result<T, Error>
where
F: FnOnce(&mut HeaderExtension) -> Result<T, Error>,
F: FnOnce(&mut HeaderExtension<'_>) -> Result<T, Error>,
{
let size: u64;
let res: Result<T, Error>;
@ -529,11 +525,11 @@ where
/// to allow headers to be validated before we receive the full block data.
pub fn header_extending<'a, F, T>(
trees: &'a mut TxHashSet,
batch: &'a mut Batch,
batch: &'a mut Batch<'_>,
inner: F,
) -> Result<T, Error>
where
F: FnOnce(&mut HeaderExtension) -> Result<T, Error>,
F: FnOnce(&mut HeaderExtension<'_>) -> Result<T, Error>,
{
let size: u64;
let res: Result<T, Error>;
@ -604,7 +600,7 @@ pub struct HeaderExtension<'a> {
impl<'a> HeaderExtension<'a> {
fn new(
pmmr: PMMR<'a, BlockHeader, PMMRBackend<BlockHeader>>,
batch: &'a Batch,
batch: &'a Batch<'_>,
header: BlockHeader,
) -> HeaderExtension<'a> {
HeaderExtension {
@ -807,7 +803,7 @@ impl<'a> Committed for Extension<'a> {
}
impl<'a> Extension<'a> {
fn new(trees: &'a mut TxHashSet, batch: &'a Batch, header: BlockHeader) -> Extension<'a> {
fn new(trees: &'a mut TxHashSet, batch: &'a Batch<'_>, header: BlockHeader) -> Extension<'a> {
Extension {
header,
header_pmmr: PMMR::at(
@ -1240,7 +1236,7 @@ impl<'a> Extension<'a> {
pub fn validate(
&self,
fast_validation: bool,
status: &TxHashsetWriteStatus,
status: &dyn TxHashsetWriteStatus,
) -> Result<((Commitment, Commitment)), Error> {
self.validate_mmrs()?;
self.validate_roots()?;
@ -1320,7 +1316,7 @@ impl<'a> Extension<'a> {
)
}
fn verify_kernel_signatures(&self, status: &TxHashsetWriteStatus) -> Result<(), Error> {
fn verify_kernel_signatures(&self, status: &dyn TxHashsetWriteStatus) -> Result<(), Error> {
let now = Instant::now();
let mut kern_count = 0;
@ -1347,7 +1343,7 @@ impl<'a> Extension<'a> {
Ok(())
}
fn verify_rangeproofs(&self, status: &TxHashsetWriteStatus) -> Result<(), Error> {
fn verify_rangeproofs(&self, status: &dyn TxHashsetWriteStatus) -> Result<(), Error> {
let now = Instant::now();
let mut commits: Vec<Commitment> = vec![];
@ -1470,7 +1466,8 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
.file_name()
.and_then(|n| n.to_str().map(|s| String::from(s)))
})
}).collect();
})
.collect();
let dir_difference: Vec<String> = subdirectories_found
.difference(&subdirectories_expected)
@ -1496,7 +1493,8 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
} else {
String::from(s)
}
}).collect();
})
.collect();
let subdirectories = fs::read_dir(txhashset_path)?;
for subdirectory in subdirectories {
@ -1509,7 +1507,8 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
.file_name()
.and_then(|n| n.to_str().map(|s| String::from(s)))
})
}).collect();
})
.collect();
let difference: Vec<String> = pmmr_files_found
.difference(&pmmr_files_expected)
.cloned()
@ -1540,7 +1539,7 @@ fn check_and_remove_files(txhashset_path: &PathBuf, header: &BlockHeader) -> Res
pub fn input_pos_to_rewind(
block_header: &BlockHeader,
head_header: &BlockHeader,
batch: &Batch,
batch: &Batch<'_>,
) -> Result<Bitmap, Error> {
if head_header.height < block_header.height {
debug!(

View file

@ -14,12 +14,12 @@
//! Lightweight readonly view into output MMR for convenience.
use core::core::pmmr::ReadonlyPMMR;
use core::core::{Block, Input, Output, Transaction};
use core::ser::PMMRIndexHashable;
use error::{Error, ErrorKind};
use crate::core::core::pmmr::ReadonlyPMMR;
use crate::core::core::{Block, Input, Output, Transaction};
use crate::core::ser::PMMRIndexHashable;
use crate::error::{Error, ErrorKind};
use crate::store::Batch;
use grin_store::pmmr::PMMRBackend;
use store::Batch;
/// Readonly view of the UTXO set (based on output MMR).
pub struct UTXOView<'a> {
@ -31,7 +31,7 @@ impl<'a> UTXOView<'a> {
/// Build a new UTXO view.
pub fn new(
pmmr: ReadonlyPMMR<'a, Output, PMMRBackend<Output>>,
batch: &'a Batch,
batch: &'a Batch<'_>,
) -> UTXOView<'a> {
UTXOView { pmmr, batch }
}

View file

@ -14,10 +14,10 @@
//! Base types that the block chain pipeline requires.
use core::core::hash::{Hash, Hashed, ZERO_HASH};
use core::core::{Block, BlockHeader};
use core::pow::Difficulty;
use core::ser;
use crate::core::core::hash::{Hash, Hashed, ZERO_HASH};
use crate::core::core::{Block, BlockHeader};
use crate::core::pow::Difficulty;
use crate::core::ser;
bitflags! {
/// Options for block validation
@ -98,7 +98,7 @@ impl ser::Writeable for Tip {
}
impl ser::Readable for Tip {
fn read(reader: &mut ser::Reader) -> Result<Tip, ser::Error> {
fn read(reader: &mut dyn ser::Reader) -> Result<Tip, ser::Error> {
let height = reader.read_u64()?;
let last = Hash::read(reader)?;
let prev = Hash::read(reader)?;

View file

@ -12,29 +12,24 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate chrono;
extern crate env_logger;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate grin_util as util;
extern crate rand;
use self::chain::types::NoopAdapter;
use self::chain::Chain;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::core::{Block, BlockHeader, Transaction};
use self::core::global::{self, ChainTypes};
use self::core::libtx;
use self::core::pow::{self, Difficulty};
use self::core::{consensus, genesis};
use self::keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use self::util::{Mutex, RwLock, StopState};
use chrono::Duration;
use grin_chain as chain;
use grin_core as core;
use grin_keychain as keychain;
use grin_store as store;
use grin_util as util;
use std::fs;
use std::sync::Arc;
use util::RwLock;
use chain::types::NoopAdapter;
use chain::Chain;
use core::core::verifier_cache::LruVerifierCache;
use core::core::{Block, BlockHeader, Transaction};
use core::global::{self, ChainTypes};
use core::libtx;
use core::pow::{self, Difficulty};
use core::{consensus, genesis};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
@ -55,7 +50,9 @@ fn setup(dir_name: &str) -> Chain {
pow::verify_size,
verifier_cache,
false,
).unwrap()
Arc::new(Mutex::new(StopState::new())),
)
.unwrap()
}
fn reload_chain(dir_name: &str) -> Chain {
@ -69,7 +66,9 @@ fn reload_chain(dir_name: &str) -> Chain {
pow::verify_size,
verifier_cache,
false,
).unwrap()
Arc::new(Mutex::new(StopState::new())),
)
.unwrap()
}
#[test]
@ -98,7 +97,8 @@ fn data_files() {
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
chain
.process_block(b.clone(), chain::Options::MINE)

View file

@ -12,30 +12,26 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate chrono;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate grin_util as util;
extern crate rand;
use self::chain::types::NoopAdapter;
use self::chain::Chain;
use self::core::core::hash::Hashed;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::core::{Block, BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
use self::core::genesis;
use self::core::global::ChainTypes;
use self::core::libtx::{self, build, reward};
use self::core::pow::Difficulty;
use self::core::{consensus, global, pow};
use self::keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use self::util::{Mutex, RwLock, StopState};
use chrono::Duration;
use grin_chain as chain;
use grin_core as core;
use grin_keychain as keychain;
use grin_store as store;
use grin_util as util;
use std::fs;
use std::sync::Arc;
use util::RwLock;
use chain::types::NoopAdapter;
use chain::Chain;
use core::core::hash::Hashed;
use core::core::verifier_cache::LruVerifierCache;
use core::core::{Block, BlockHeader, OutputFeatures, OutputIdentifier, Transaction};
use core::genesis;
use core::global::ChainTypes;
use core::libtx::{self, build, reward};
use core::pow::Difficulty;
use core::{consensus, global, pow};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
@ -54,7 +50,9 @@ fn setup(dir_name: &str, genesis: Block) -> Chain {
pow::verify_size,
verifier_cache,
false,
).unwrap()
Arc::new(Mutex::new(StopState::new())),
)
.unwrap()
}
#[test]
@ -89,7 +87,8 @@ fn mine_genesis_reward_chain() {
Difficulty::unit(),
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
mine_some_on_top(".grin.genesis", genesis, &keychain);
}
@ -124,7 +123,8 @@ where
next_header_info.difficulty,
global::proofsize(),
edge_bits,
).unwrap();
)
.unwrap();
b.header.pow.proof.edge_bits = edge_bits;
let bhash = b.hash();
@ -309,7 +309,8 @@ fn spend_in_fork_and_compact() {
build::with_fee(20000),
],
&kc,
).unwrap();
)
.unwrap();
let next = prepare_block_tx(&kc, &fork_head, &chain, 7, vec![&tx1]);
let prev_main = next.header.clone();
@ -325,7 +326,8 @@ fn spend_in_fork_and_compact() {
build::with_fee(20000),
],
&kc,
).unwrap();
)
.unwrap();
let next = prepare_block_tx(&kc, &prev_main, &chain, 9, vec![&tx2]);
let prev_main = next.header.clone();
@ -351,16 +353,12 @@ fn spend_in_fork_and_compact() {
let head = chain.head_header().unwrap();
assert_eq!(head.height, 6);
assert_eq!(head.hash(), prev_main.hash());
assert!(
chain
.is_unspent(&OutputIdentifier::from_output(&tx2.outputs()[0]))
.is_ok()
);
assert!(
chain
.is_unspent(&OutputIdentifier::from_output(&tx1.outputs()[0]))
.is_err()
);
assert!(chain
.is_unspent(&OutputIdentifier::from_output(&tx2.outputs()[0]))
.is_ok());
assert!(chain
.is_unspent(&OutputIdentifier::from_output(&tx1.outputs()[0]))
.is_err());
// make the fork win
let fork_next = prepare_fork_block(&kc, &prev_fork, &chain, 10);
@ -374,16 +372,12 @@ fn spend_in_fork_and_compact() {
let head = chain.head_header().unwrap();
assert_eq!(head.height, 7);
assert_eq!(head.hash(), prev_fork.hash());
assert!(
chain
.is_unspent(&OutputIdentifier::from_output(&tx2.outputs()[0]))
.is_ok()
);
assert!(
chain
.is_unspent(&OutputIdentifier::from_output(&tx1.outputs()[0]))
.is_err()
);
assert!(chain
.is_unspent(&OutputIdentifier::from_output(&tx2.outputs()[0]))
.is_ok());
assert!(chain
.is_unspent(&OutputIdentifier::from_output(&tx1.outputs()[0]))
.is_err());
// add 20 blocks to go past the test horizon
let mut prev = prev_fork;
@ -438,7 +432,8 @@ fn output_header_mappings() {
next_header_info.difficulty,
global::proofsize(),
edge_bits,
).unwrap();
)
.unwrap();
b.header.pow.proof.edge_bits = edge_bits;
chain.process_block(b, chain::Options::MINE).unwrap();
@ -547,7 +542,9 @@ fn actual_diff_iter_output() {
pow::verify_size,
verifier_cache,
false,
).unwrap();
Arc::new(Mutex::new(StopState::new())),
)
.unwrap();
let iter = chain.difficulty_iter();
let mut last_time = 0;
let mut first = true;

View file

@ -12,24 +12,21 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate env_logger;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate rand;
use self::chain::{Error, Tip};
use self::core::core::hash::Hashed;
use self::core::core::Block;
use self::core::global::{self, ChainTypes};
use self::core::libtx;
use self::core::pow::{self, Difficulty};
use self::keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use env_logger;
use grin_chain as chain;
use grin_core as core;
use grin_keychain as keychain;
use grin_store as store;
use std::fs;
use std::sync::Arc;
use chain::{Error, Tip};
use core::core::hash::Hashed;
use core::core::Block;
use core::global::{self, ChainTypes};
use core::libtx;
use core::pow::{self, Difficulty};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
}

View file

@ -12,29 +12,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate chrono;
extern crate env_logger;
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate grin_util as util;
extern crate rand;
use self::chain::types::NoopAdapter;
use self::chain::ErrorKind;
use self::core::core::transaction;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::global::{self, ChainTypes};
use self::core::libtx::{self, build};
use self::core::pow::Difficulty;
use self::core::{consensus, pow};
use self::keychain::{ExtKeychain, ExtKeychainPath, Keychain};
use self::util::{Mutex, RwLock, StopState};
use chrono::Duration;
use env_logger;
use grin_chain as chain;
use grin_core as core;
use grin_keychain as keychain;
use grin_store as store;
use grin_util as util;
use std::fs;
use std::sync::Arc;
use util::RwLock;
use chain::types::NoopAdapter;
use chain::ErrorKind;
use core::core::transaction;
use core::core::verifier_cache::LruVerifierCache;
use core::global::{self, ChainTypes};
use core::libtx::{self, build};
use core::pow::Difficulty;
use core::{consensus, pow};
use keychain::{ExtKeychain, ExtKeychainPath, Keychain};
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
@ -59,7 +55,9 @@ fn test_coinbase_maturity() {
pow::verify_size,
verifier_cache,
false,
).unwrap();
Arc::new(Mutex::new(StopState::new())),
)
.unwrap();
let prev = chain.head_header().unwrap();
@ -82,15 +80,14 @@ fn test_coinbase_maturity() {
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
assert_eq!(block.outputs().len(), 1);
let coinbase_output = block.outputs()[0];
assert!(
coinbase_output
.features
.contains(transaction::OutputFeatures::COINBASE_OUTPUT)
);
assert!(coinbase_output
.features
.contains(transaction::OutputFeatures::COINBASE_OUTPUT));
chain
.process_block(block.clone(), chain::Options::MINE)
@ -112,7 +109,8 @@ fn test_coinbase_maturity() {
build::with_fee(2),
],
&keychain,
).unwrap();
)
.unwrap();
let txs = vec![coinbase_txn.clone()];
let fees = txs.iter().map(|tx| tx.fee()).sum();
@ -139,7 +137,8 @@ fn test_coinbase_maturity() {
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
// mine enough blocks to increase the height sufficiently for
// coinbase to reach maturity and be spendable in the next block
@ -162,7 +161,8 @@ fn test_coinbase_maturity() {
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
chain.process_block(block, chain::Options::MINE).unwrap();
}
@ -189,7 +189,8 @@ fn test_coinbase_maturity() {
next_header_info.difficulty,
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
let result = chain.process_block(block, chain::Options::MINE);
match result {

View file

@ -12,11 +12,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate grin_chain as chain;
extern crate grin_core as core;
extern crate grin_keychain as keychain;
extern crate grin_store as store;
extern crate grin_util as util;
use grin_chain as chain;
use grin_core as core;
use grin_store as store;
use grin_util as util;
use std::collections::HashSet;
use std::fs::{self, File, OpenOptions};
@ -25,10 +25,10 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use chain::store::ChainStore;
use chain::txhashset;
use core::core::BlockHeader;
use util::file;
use crate::chain::store::ChainStore;
use crate::chain::txhashset;
use crate::core::core::BlockHeader;
use crate::util::file;
fn clean_output_dir(dir_name: &str) {
let _ = fs::remove_dir_all(dir_name);
@ -83,7 +83,8 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("kernel")
.join("strange0"),
).unwrap();
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
@ -98,13 +99,15 @@ fn write_file(db_root: String) {
.join("txhashset")
.join("strange_dir")
.join("strange2"),
).unwrap();
)
.unwrap();
fs::create_dir(
Path::new(&db_root)
.join("txhashset")
.join("strange_dir")
.join("strange_subdir"),
).unwrap();
)
.unwrap();
OpenOptions::new()
.create(true)
.write(true)
@ -114,7 +117,8 @@ fn write_file(db_root: String) {
.join("strange_dir")
.join("strange_subdir")
.join("strange3"),
).unwrap();
)
.unwrap();
}
fn txhashset_contains_expected_files(dirname: String, path_buf: PathBuf) -> bool {

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = ".."
edition = "2018"
[dependencies]
rand = "0.5"

View file

@ -36,14 +36,16 @@ fn comments() -> HashMap<String, String> {
#########################################
#Server connection details
".to_string(),
"
.to_string(),
);
retval.insert(
"api_http_addr".to_string(),
"
#the address on which services will listen, e.g. Transaction Pool
".to_string(),
"
.to_string(),
);
retval.insert(
@ -51,7 +53,8 @@ fn comments() -> HashMap<String, String> {
"
#path of the secret token used by the API to authenticate the calls
#comment the it to disable basic auth
".to_string(),
"
.to_string(),
);
retval.insert(
@ -59,7 +62,8 @@ fn comments() -> HashMap<String, String> {
"
#the directory, relative to current, in which the grin blockchain
#is stored
".to_string(),
"
.to_string(),
);
retval.insert(
@ -73,7 +77,8 @@ fn comments() -> HashMap<String, String> {
#Testnet2 - Testnet2 genesis block (cuckoo 30)
#Testnet3 - Testnet3 genesis block (cuckoo 30)
#Testnet4 - Testnet4 genesis block (cuckatoo 29+)
".to_string(),
"
.to_string(),
);
retval.insert(
@ -83,21 +88,24 @@ fn comments() -> HashMap<String, String> {
#want to run a full chain validation. Can be:
#\"EveryBlock\" - run full chain validation when processing each block (except during sync)
#\"Disabled\" - disable full chain validation (just run regular block validation)
".to_string(),
"
.to_string(),
);
retval.insert(
"archive_mode".to_string(),
"
#run the node in \"full archive\" mode (default is fast-sync, pruned node)
".to_string(),
"
.to_string(),
);
retval.insert(
"skip_sync_wait".to_string(),
"
#skip waiting for sync on startup, (optional param, mostly for testing)
".to_string(),
"
.to_string(),
);
retval.insert(
@ -105,7 +113,8 @@ fn comments() -> HashMap<String, String> {
"
#whether to run the ncurses TUI. Ncurses must be installed and this
#will also disable logging to stdout
".to_string(),
"
.to_string(),
);
retval.insert(
@ -114,7 +123,8 @@ fn comments() -> HashMap<String, String> {
#Whether to run a test miner. This is only for developer testing (chaintype
#usertesting) at cuckoo 16, and will only mine into the default wallet port.
#real mining should use the standalone grin-miner
".to_string(),
"
.to_string(),
);
retval.insert(
@ -123,34 +133,39 @@ fn comments() -> HashMap<String, String> {
#########################################
### DANDELION CONFIGURATION ###
#########################################
".to_string(),
"
.to_string(),
);
retval.insert(
"relay_secs".to_string(),
"
#dandelion relay time (choose new relay peer every n secs)
".to_string(),
"
.to_string(),
);
retval.insert(
"embargo_secs".to_string(),
"
#fluff and broadcast after embargo expires if tx not seen on network
".to_string(),
"
.to_string(),
);
retval.insert(
"patience_secs".to_string(),
"
#run dandelion stem/fluff processing every n secs (stem tx aggregation in this window)
".to_string(),
"
.to_string(),
);
retval.insert(
"stem_probability".to_string(),
"
#dandelion stem probability (stem 90% of the time, fluff 10% of the time)
".to_string(),
"
.to_string(),
);
retval.insert(
@ -162,7 +177,8 @@ fn comments() -> HashMap<String, String> {
### SERVER P2P CONFIGURATION ###
#########################################
#The P2P server details (i.e. the server that communicates with other
".to_string(),
"
.to_string(),
);
retval.insert(
@ -171,21 +187,24 @@ fn comments() -> HashMap<String, String> {
#The interface on which to listen.
#0.0.0.0 will listen on all interfaces, allowing others to interact
#127.0.0.1 will listen on the local machine only
".to_string(),
"
.to_string(),
);
retval.insert(
"port".to_string(),
"
#The port on which to listen.
".to_string(),
"
.to_string(),
);
retval.insert(
"seeding_type".to_string(),
"
#how to seed this server, can be None, List or DNSSeed
".to_string(),
"
.to_string(),
);
retval.insert(
@ -214,7 +233,8 @@ fn comments() -> HashMap<String, String> {
# 15 = Bit flags for FULL_NODE
#This structure needs to be changed internally, to make it more configurable
".to_string(),
"
.to_string(),
);
retval.insert(
@ -223,35 +243,40 @@ fn comments() -> HashMap<String, String> {
#########################################
### MEMPOOL CONFIGURATION ###
#########################################
".to_string(),
"
.to_string(),
);
retval.insert(
"accept_fee_base".to_string(),
"
#base fee that's accepted into the pool
".to_string(),
"
.to_string(),
);
retval.insert(
"max_pool_size".to_string(),
"
#maximum number of transactions allowed in the pool
".to_string(),
"
.to_string(),
);
retval.insert(
"max_stempool_size".to_string(),
"
#maximum number of transactions allowed in the stempool
".to_string(),
"
.to_string(),
);
retval.insert(
"mineable_max_weight".to_string(),
"
#maximum total weight of transactions that can get selected to build a block
".to_string(),
"
.to_string(),
);
retval.insert(
@ -260,21 +285,24 @@ fn comments() -> HashMap<String, String> {
################################################
### STRATUM MINING SERVER CONFIGURATION ###
################################################
".to_string(),
"
.to_string(),
);
retval.insert(
"enable_stratum_server".to_string(),
"
#whether stratum server is enabled
".to_string(),
"
.to_string(),
);
retval.insert(
"stratum_server_addr".to_string(),
"
#what port and address for the stratum server to listen on
".to_string(),
"
.to_string(),
);
retval.insert(
@ -282,28 +310,32 @@ fn comments() -> HashMap<String, String> {
"
#the amount of time, in seconds, to attempt to mine on a particular
#header before stopping and re-collecting transactions from the pool
".to_string(),
"
.to_string(),
);
retval.insert(
"minimum_share_difficulty".to_string(),
"
#the minimum acceptable share difficulty to request from miners
".to_string(),
"
.to_string(),
);
retval.insert(
"wallet_listener_url".to_string(),
"
#the wallet receiver to which coinbase rewards will be sent
".to_string(),
"
.to_string(),
);
retval.insert(
"burn_reward".to_string(),
"
#whether to ignore the reward (mostly for testing)
".to_string(),
"
.to_string(),
);
retval.insert(
@ -312,14 +344,16 @@ fn comments() -> HashMap<String, String> {
#########################################
### WALLET CONFIGURATION ###
#########################################
".to_string(),
"
.to_string(),
);
retval.insert(
"api_listen_interface".to_string(),
"
#host IP for wallet listener, change to \"0.0.0.0\" to receive grins
".to_string(),
"
.to_string(),
);
retval.insert(
@ -332,7 +366,8 @@ fn comments() -> HashMap<String, String> {
#private key for the TLS certificate
#tls_certificate_key = \"\"
".to_string(),
"
.to_string(),
);
retval.insert(
@ -340,31 +375,36 @@ fn comments() -> HashMap<String, String> {
"
#path of the secret token used by the API to authenticate the calls
#comment it to disable basic auth
".to_string(),
"
.to_string(),
);
retval.insert(
"check_node_api_http_addr".to_string(),
"
#where the wallet should find a running node
".to_string(),
"
.to_string(),
);
retval.insert(
"node_api_secret_path".to_string(),
"
#location of the node api secret for basic auth on the Grin API
".to_string(),
"
.to_string(),
);
retval.insert(
"data_file_dir".to_string(),
"
#where to find wallet files (seed, data, etc)
".to_string(),
"
.to_string(),
);
retval.insert(
"dark_background_color_scheme".to_string(),
"
#Whether to use the black background color scheme for command line
".to_string(),
"
.to_string(),
);
retval.insert(
@ -373,49 +413,56 @@ fn comments() -> HashMap<String, String> {
#########################################
### LOGGING CONFIGURATION ###
#########################################
".to_string(),
"
.to_string(),
);
retval.insert(
"log_to_stdout".to_string(),
"
#whether to log to stdout
".to_string(),
"
.to_string(),
);
retval.insert(
"stdout_log_level".to_string(),
"
#log level for stdout: Error, Warning, Info, Debug, Trace
".to_string(),
"
.to_string(),
);
retval.insert(
"log_to_file".to_string(),
"
#whether to log to a file
".to_string(),
"
.to_string(),
);
retval.insert(
"file_log_level".to_string(),
"
#log level for file: Error, Warning, Info, Debug, Trace
".to_string(),
"
.to_string(),
);
retval.insert(
"log_file_path".to_string(),
"
#log file path
".to_string(),
"
.to_string(),
);
retval.insert(
"log_file_append".to_string(),
"
#whether to append to the log file (true), or replace it on every run (false)
".to_string(),
"
.to_string(),
);
retval.insert(
@ -423,7 +470,8 @@ fn comments() -> HashMap<String, String> {
"
#maximum log file size in bytes before performing log rotation
#comment it to disable log rotation
".to_string(),
"
.to_string(),
);
retval

View file

@ -25,13 +25,13 @@ use std::io::Read;
use std::path::PathBuf;
use toml;
use comments::insert_comments;
use servers::ServerConfig;
use types::{
use crate::comments::insert_comments;
use crate::servers::ServerConfig;
use crate::types::{
ConfigError, ConfigMembers, GlobalConfig, GlobalWalletConfig, GlobalWalletConfigMembers,
};
use util::LoggingConfig;
use wallet::WalletConfig;
use crate::util::LoggingConfig;
use crate::wallet::WalletConfig;
/// The default file name to use when trying to derive
/// the node config file location

View file

@ -20,20 +20,19 @@
#![deny(unused_mut)]
#![warn(missing_docs)]
extern crate dirs;
extern crate rand;
use dirs;
#[macro_use]
extern crate serde_derive;
extern crate toml;
use toml;
extern crate grin_p2p as p2p;
extern crate grin_servers as servers;
extern crate grin_util as util;
extern crate grin_wallet as wallet;
use grin_servers as servers;
use grin_util as util;
use grin_wallet as wallet;
mod comments;
pub mod config;
pub mod types;
pub use config::{initial_setup_server, initial_setup_wallet};
pub use types::{ConfigError, ConfigMembers, GlobalConfig, GlobalWalletConfig};
pub use crate::config::{initial_setup_server, initial_setup_wallet};
pub use crate::types::{ConfigError, ConfigMembers, GlobalConfig, GlobalWalletConfig};

View file

@ -18,9 +18,9 @@ use std::fmt;
use std::io;
use std::path::PathBuf;
use servers::ServerConfig;
use util::LoggingConfig;
use wallet::WalletConfig;
use crate::servers::ServerConfig;
use crate::util::LoggingConfig;
use crate::wallet::WalletConfig;
/// Error type wrapping config errors.
#[derive(Debug)]
@ -39,7 +39,7 @@ pub enum ConfigError {
}
impl fmt::Display for ConfigError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
ConfigError::ParseError(ref file_name, ref message) => write!(
f,

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = ".."
edition = "2018"
[dependencies]
bitflags = "1"

View file

@ -77,5 +77,6 @@ fn tx() -> Transaction {
with_fee(2),
],
&keychain,
).unwrap()
)
.unwrap()
}

View file

@ -20,8 +20,8 @@
use std::cmp::{max, min};
use global;
use pow::Difficulty;
use crate::global;
use crate::pow::Difficulty;
/// A grin is divisible to 10^9, following the SI prefixes
pub const GRIN_BASE: u64 = 1_000_000_000;

View file

@ -25,9 +25,9 @@ pub mod pmmr;
pub mod transaction;
pub mod verifier_cache;
use consensus::GRIN_BASE;
use crate::consensus::GRIN_BASE;
use util::secp::pedersen::Commitment;
use crate::util::secp::pedersen::Commitment;
pub use self::block::*;
pub use self::block_sums::*;

View file

@ -14,28 +14,28 @@
//! Blocks and blockheaders
use crate::util::RwLock;
use chrono::naive::{MAX_DATE, MIN_DATE};
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use std::collections::HashSet;
use std::fmt;
use std::iter::FromIterator;
use std::sync::Arc;
use util::RwLock;
use consensus::{reward, REWARD};
use core::committed::{self, Committed};
use core::compact_block::{CompactBlock, CompactBlockBody};
use core::hash::{Hash, Hashed, ZERO_HASH};
use core::verifier_cache::VerifierCache;
use core::{
use crate::consensus::{reward, REWARD};
use crate::core::committed::{self, Committed};
use crate::core::compact_block::{CompactBlock, CompactBlockBody};
use crate::core::hash::{Hash, Hashed, ZERO_HASH};
use crate::core::verifier_cache::VerifierCache;
use crate::core::{
transaction, Commitment, Input, KernelFeatures, Output, OutputFeatures, Transaction,
TransactionBody, TxKernel,
};
use global;
use keychain::{self, BlindingFactor};
use pow::{Difficulty, Proof, ProofOfWork};
use ser::{self, PMMRable, Readable, Reader, Writeable, Writer};
use util::{secp, static_secp_instance};
use crate::global;
use crate::keychain::{self, BlindingFactor};
use crate::pow::{Difficulty, Proof, ProofOfWork};
use crate::ser::{self, PMMRable, Readable, Reader, Writeable, Writer};
use crate::util::{secp, static_secp_instance};
/// Errors thrown by Block validation
#[derive(Debug, Clone, Eq, PartialEq, Fail)]
@ -104,7 +104,7 @@ impl From<keychain::Error> for Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Block Error (display needs implementation")
}
}
@ -180,7 +180,7 @@ impl Writeable for BlockHeader {
/// Deserialization of a block header
impl Readable for BlockHeader {
fn read(reader: &mut Reader) -> Result<BlockHeader, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<BlockHeader, ser::Error> {
let (version, height, timestamp) = ser_multiread!(reader, read_u16, read_u64, read_i64);
let prev_hash = Hash::read(reader)?;
let prev_root = Hash::read(reader)?;
@ -306,7 +306,7 @@ impl Writeable for Block {
/// Implementation of Readable for a block, defines how to read a full block
/// from a binary stream.
impl Readable for Block {
fn read(reader: &mut Reader) -> Result<Block, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Block, ser::Error> {
let header = BlockHeader::read(reader)?;
let body = TransactionBody::read(reader)?;
@ -463,7 +463,8 @@ impl Block {
..Default::default()
},
body: agg_tx.into(),
}.cut_through()
}
.cut_through()
}
/// Consumes this block and returns a new block with the coinbase output
@ -572,7 +573,7 @@ impl Block {
pub fn validate(
&self,
prev_kernel_offset: &BlindingFactor,
verifier: Arc<RwLock<VerifierCache>>,
verifier: Arc<RwLock<dyn VerifierCache>>,
) -> Result<Commitment, Error> {
self.body.validate(true, verifier)?;

View file

@ -15,10 +15,10 @@
//! BlockSums per-block running totals for utxo_sum and kernel_sum.
//! Allows fast "full" verification of kernel sums at a given block height.
use core::committed::Committed;
use ser::{self, Readable, Reader, Writeable, Writer};
use util::secp::pedersen::Commitment;
use util::secp_static;
use crate::core::committed::Committed;
use crate::ser::{self, Readable, Reader, Writeable, Writer};
use crate::util::secp::pedersen::Commitment;
use crate::util::secp_static;
/// The output_sum and kernel_sum for a given block.
/// This is used to validate the next block being processed by applying
@ -41,7 +41,7 @@ impl Writeable for BlockSums {
}
impl Readable for BlockSums {
fn read(reader: &mut Reader) -> Result<BlockSums, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<BlockSums, ser::Error> {
Ok(BlockSums {
utxo_sum: Commitment::read(reader)?,
kernel_sum: Commitment::read(reader)?,
@ -62,7 +62,7 @@ impl Default for BlockSums {
/// It's a tuple but we can verify the "full" kernel sums on it.
/// This means we can take a previous block_sums, apply a new block to it
/// and verify the full kernel sums (full UTXO and kernel sets).
impl<'a> Committed for (BlockSums, &'a Committed) {
impl<'a> Committed for (BlockSums, &'a dyn Committed) {
fn inputs_committed(&self) -> Vec<Commitment> {
self.1.inputs_committed()
}

View file

@ -14,12 +14,12 @@
//! The Committed trait and associated errors.
use keychain;
use keychain::BlindingFactor;
use crate::keychain;
use crate::keychain::BlindingFactor;
use util::secp::key::SecretKey;
use util::secp::pedersen::Commitment;
use util::{secp, secp_static, static_secp_instance};
use crate::util::secp::key::SecretKey;
use crate::util::secp::pedersen::Commitment;
use crate::util::{secp, secp_static, static_secp_instance};
/// Errors from summing and verifying kernel excesses via committed trait.
#[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -16,11 +16,11 @@
use rand::{thread_rng, Rng};
use core::block::{Block, BlockHeader, Error};
use core::hash::Hashed;
use core::id::ShortIdentifiable;
use core::{KernelFeatures, Output, OutputFeatures, ShortId, TxKernel};
use ser::{self, read_multi, Readable, Reader, VerifySortedAndUnique, Writeable, Writer};
use crate::core::block::{Block, BlockHeader, Error};
use crate::core::hash::Hashed;
use crate::core::id::ShortIdentifiable;
use crate::core::{KernelFeatures, Output, OutputFeatures, ShortId, TxKernel};
use crate::ser::{self, read_multi, Readable, Reader, VerifySortedAndUnique, Writeable, Writer};
/// Container for full (full) outputs and kernels and kern_ids for a compact block.
#[derive(Debug, Clone)]
@ -83,7 +83,7 @@ impl CompactBlockBody {
}
impl Readable for CompactBlockBody {
fn read(reader: &mut Reader) -> Result<CompactBlockBody, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<CompactBlockBody, ser::Error> {
let (out_full_len, kern_full_len, kern_id_len) =
ser_multiread!(reader, read_u64, read_u64, read_u64);
@ -214,7 +214,7 @@ impl Writeable for CompactBlock {
/// Implementation of Readable for a compact block, defines how to read a
/// compact block from a binary stream.
impl Readable for CompactBlock {
fn read(reader: &mut Reader) -> Result<CompactBlock, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<CompactBlock, ser::Error> {
let header = BlockHeader::read(reader)?;
let nonce = reader.read_u64()?;
let body = CompactBlockBody::read(reader)?;

View file

@ -23,10 +23,10 @@ use std::convert::AsRef;
use std::ops::Add;
use std::{fmt, ops};
use blake2::blake2b::Blake2b;
use crate::blake2::blake2b::Blake2b;
use ser::{self, AsFixedBytes, Error, FixedLength, Readable, Reader, Writeable, Writer};
use util;
use crate::ser::{self, AsFixedBytes, Error, FixedLength, Readable, Reader, Writeable, Writer};
use crate::util;
/// A hash consisting of all zeroes, used as a sentinel. No known preimage.
pub const ZERO_HASH: Hash = Hash([0; 32]);
@ -37,7 +37,7 @@ pub const ZERO_HASH: Hash = Hash([0; 32]);
pub struct Hash(pub [u8; 32]);
impl fmt::Debug for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for i in self.0[..4].iter() {
write!(f, "{:02x}", i)?;
}
@ -46,7 +46,7 @@ impl fmt::Debug for Hash {
}
impl fmt::Display for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
@ -136,7 +136,7 @@ impl AsRef<[u8]> for Hash {
}
impl Readable for Hash {
fn read(reader: &mut Reader) -> Result<Hash, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Hash, ser::Error> {
let v = reader.read_fixed_bytes(32)?;
let mut a = [0; 32];
a.copy_from_slice(&v[..]);

View file

@ -20,9 +20,9 @@ use std::cmp::Ordering;
use byteorder::{ByteOrder, LittleEndian};
use siphasher::sip::SipHasher24;
use core::hash::{Hash, Hashed};
use ser::{self, Readable, Reader, Writeable, Writer};
use util;
use crate::core::hash::{Hash, Hashed};
use crate::ser::{self, Readable, Reader, Writeable, Writer};
use crate::util;
/// The size of a short id used to identify inputs|outputs|kernels (6 bytes)
pub const SHORT_ID_SIZE: usize = 6;
@ -79,7 +79,7 @@ pub struct ShortId([u8; 6]);
hashable_ord!(ShortId);
impl ::std::fmt::Debug for ShortId {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(f, "{}(", stringify!(ShortId))?;
write!(f, "{}", self.to_hex())?;
write!(f, ")")
@ -87,7 +87,7 @@ impl ::std::fmt::Debug for ShortId {
}
impl Readable for ShortId {
fn read(reader: &mut Reader) -> Result<ShortId, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<ShortId, ser::Error> {
let v = reader.read_fixed_bytes(SHORT_ID_SIZE)?;
let mut a = [0; SHORT_ID_SIZE];
a.copy_from_slice(&v[..]);
@ -131,7 +131,7 @@ impl ShortId {
#[cfg(test)]
mod test {
use super::*;
use ser::{Writeable, Writer};
use crate::ser::{Writeable, Writer};
#[test]
fn short_id_ord() {

View file

@ -14,11 +14,11 @@
//! Merkle Proofs
use core::hash::Hash;
use core::pmmr;
use ser;
use ser::{PMMRIndexHashable, Readable, Reader, Writeable, Writer};
use util;
use crate::core::hash::Hash;
use crate::core::pmmr;
use crate::ser;
use crate::ser::{PMMRIndexHashable, Readable, Reader, Writeable, Writer};
use crate::util;
/// Merkle proof errors.
#[derive(Clone, Debug, PartialEq)]
@ -47,7 +47,7 @@ impl Writeable for MerkleProof {
}
impl Readable for MerkleProof {
fn read(reader: &mut Reader) -> Result<MerkleProof, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<MerkleProof, ser::Error> {
let mmr_size = reader.read_u64()?;
let path_len = reader.read_u64()?;
let mut path = Vec::with_capacity(path_len as usize);
@ -95,7 +95,7 @@ impl MerkleProof {
pub fn verify(
&self,
root: Hash,
element: &PMMRIndexHashable,
element: &dyn PMMRIndexHashable,
node_pos: u64,
) -> Result<(), MerkleProofError> {
let mut proof = self.clone();
@ -111,7 +111,7 @@ impl MerkleProof {
fn verify_consume(
&mut self,
root: Hash,
element: &PMMRIndexHashable,
element: &dyn PMMRIndexHashable,
node_pos: u64,
peaks_pos: &[u64],
) -> Result<(), MerkleProofError> {

View file

@ -14,9 +14,9 @@
use croaring::Bitmap;
use core::hash::Hash;
use core::BlockHeader;
use ser::PMMRable;
use crate::core::hash::Hash;
use crate::core::BlockHeader;
use crate::ser::PMMRable;
/// Storage backend for the MMR, just needs to be indexed by order of insertion.
/// The PMMR itself does not need the Backend to be accurate on the existence

View file

@ -17,11 +17,11 @@ use std::u64;
use croaring::Bitmap;
use core::hash::{Hash, ZERO_HASH};
use core::merkle_proof::MerkleProof;
use core::pmmr::{Backend, ReadonlyPMMR};
use core::BlockHeader;
use ser::{PMMRIndexHashable, PMMRable};
use crate::core::hash::{Hash, ZERO_HASH};
use crate::core::merkle_proof::MerkleProof;
use crate::core::pmmr::{Backend, ReadonlyPMMR};
use crate::core::BlockHeader;
use crate::ser::{PMMRIndexHashable, PMMRable};
/// 64 bits all ones: 0b11111111...1
const ALL_ONES: u64 = u64::MAX;
@ -36,7 +36,7 @@ const ALL_ONES: u64 = u64::MAX;
pub struct PMMR<'a, T, B>
where
T: PMMRable,
B: 'a + Backend<T>,
B: Backend<T>,
{
/// The last position in the PMMR
pub last_pos: u64,
@ -51,7 +51,7 @@ where
B: 'a + Backend<T>,
{
/// Build a new prunable Merkle Mountain Range using the provided backend.
pub fn new(backend: &'a mut B) -> PMMR<T, B> {
pub fn new(backend: &'a mut B) -> PMMR<'_, T, B> {
PMMR {
backend,
last_pos: 0,
@ -61,7 +61,7 @@ where
/// Build a new prunable Merkle Mountain Range pre-initialized until
/// last_pos with the provided backend.
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<T, B> {
pub fn at(backend: &'a mut B, last_pos: u64) -> PMMR<'_, T, B> {
PMMR {
backend,
last_pos,
@ -70,7 +70,7 @@ where
}
/// Build a "readonly" view of this PMMR.
pub fn readonly_pmmr(&self) -> ReadonlyPMMR<T, B> {
pub fn readonly_pmmr(&self) -> ReadonlyPMMR<'_, T, B> {
ReadonlyPMMR::at(&self.backend, self.last_pos)
}
@ -83,7 +83,8 @@ where
// here we want to get from underlying hash file
// as the pos *may* have been "removed"
self.backend.get_from_file(pi)
}).collect()
})
.collect()
}
fn peak_path(&self, peak_pos: u64) -> Vec<Hash> {

View file

@ -16,16 +16,16 @@
use std::marker;
use core::hash::{Hash, ZERO_HASH};
use core::pmmr::pmmr::{bintree_rightmost, insertion_to_pmmr_index, peaks};
use core::pmmr::{is_leaf, Backend};
use ser::{PMMRIndexHashable, PMMRable};
use crate::core::hash::{Hash, ZERO_HASH};
use crate::core::pmmr::pmmr::{bintree_rightmost, insertion_to_pmmr_index, peaks};
use crate::core::pmmr::{is_leaf, Backend};
use crate::ser::{PMMRIndexHashable, PMMRable};
/// Readonly view of a PMMR.
pub struct ReadonlyPMMR<'a, T, B>
where
T: PMMRable,
B: 'a + Backend<T>,
B: Backend<T>,
{
/// The last position in the PMMR
last_pos: u64,
@ -41,7 +41,7 @@ where
B: 'a + Backend<T>,
{
/// Build a new readonly PMMR.
pub fn new(backend: &'a B) -> ReadonlyPMMR<T, B> {
pub fn new(backend: &'a B) -> ReadonlyPMMR<'_, T, B> {
ReadonlyPMMR {
backend,
last_pos: 0,
@ -51,7 +51,7 @@ where
/// Build a new readonly PMMR pre-initialized to
/// last_pos with the provided backend.
pub fn at(backend: &'a B, last_pos: u64) -> ReadonlyPMMR<T, B> {
pub fn at(backend: &'a B, last_pos: u64) -> ReadonlyPMMR<'_, T, B> {
ReadonlyPMMR {
backend,
last_pos,
@ -116,7 +116,8 @@ where
// here we want to get from underlying hash file
// as the pos *may* have been "removed"
self.backend.get_from_file(pi)
}).collect()
})
.collect()
}
/// Total size of the tree, including intermediary nodes and ignoring any

View file

@ -17,15 +17,15 @@
use std::marker;
use core::hash::{Hash, ZERO_HASH};
use core::pmmr::{bintree_postorder_height, is_leaf, peaks, Backend};
use ser::{PMMRIndexHashable, PMMRable};
use crate::core::hash::{Hash, ZERO_HASH};
use crate::core::pmmr::{bintree_postorder_height, is_leaf, peaks, Backend};
use crate::ser::{PMMRIndexHashable, PMMRable};
/// Rewindable (but still readonly) view of a PMMR.
pub struct RewindablePMMR<'a, T, B>
where
T: PMMRable,
B: 'a + Backend<T>,
B: Backend<T>,
{
/// The last position in the PMMR
last_pos: u64,
@ -41,7 +41,7 @@ where
B: 'a + Backend<T>,
{
/// Build a new readonly PMMR.
pub fn new(backend: &'a B) -> RewindablePMMR<T, B> {
pub fn new(backend: &'a B) -> RewindablePMMR<'_, T, B> {
RewindablePMMR {
backend,
last_pos: 0,
@ -51,7 +51,7 @@ where
/// Build a new readonly PMMR pre-initialized to
/// last_pos with the provided backend.
pub fn at(backend: &'a B, last_pos: u64) -> RewindablePMMR<T, B> {
pub fn at(backend: &'a B, last_pos: u64) -> RewindablePMMR<'_, T, B> {
RewindablePMMR {
backend,
last_pos,
@ -118,7 +118,8 @@ where
// here we want to get from underlying hash file
// as the pos *may* have been "removed"
self.backend.get_from_file(pi)
}).collect()
})
.collect()
}
/// Total size of the tree, including intermediary nodes and ignoring any

View file

@ -14,28 +14,26 @@
//! Transactions
use crate::consensus;
use crate::core::hash::Hashed;
use crate::core::verifier_cache::VerifierCache;
use crate::core::{committed, Committed};
use crate::keychain::{self, BlindingFactor};
use crate::ser::{
self, read_multi, FixedLength, PMMRable, Readable, Reader, VerifySortedAndUnique, Writeable,
Writer,
};
use crate::util;
use crate::util::secp;
use crate::util::secp::pedersen::{Commitment, RangeProof};
use crate::util::static_secp_instance;
use crate::util::RwLock;
use byteorder::{BigEndian, ByteOrder};
use std::cmp::max;
use std::cmp::Ordering;
use std::collections::HashSet;
use std::sync::Arc;
use std::{error, fmt};
use util::RwLock;
use byteorder::{BigEndian, ByteOrder};
use consensus;
use core::hash::Hashed;
use core::verifier_cache::VerifierCache;
use core::{committed, Committed};
use keychain::{self, BlindingFactor};
use ser::{
self, read_multi, FixedLength, PMMRable, Readable, Reader, VerifySortedAndUnique, Writeable,
Writer,
};
use util;
use util::secp;
use util::secp::pedersen::{Commitment, RangeProof};
use util::static_secp_instance;
bitflags! {
/// Options for a kernel's structure or use
@ -99,7 +97,7 @@ impl error::Error for Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
_ => write!(f, "some kind of keychain error"),
}
@ -178,7 +176,7 @@ impl Writeable for TxKernel {
}
impl Readable for TxKernel {
fn read(reader: &mut Reader) -> Result<TxKernel, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<TxKernel, ser::Error> {
let features =
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
Ok(TxKernel {
@ -287,7 +285,7 @@ impl Writeable for TxKernelEntry {
}
impl Readable for TxKernelEntry {
fn read(reader: &mut Reader) -> Result<TxKernelEntry, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<TxKernelEntry, ser::Error> {
let features =
KernelFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
let kernel = TxKernel {
@ -372,7 +370,7 @@ impl Writeable for TransactionBody {
/// Implementation of Readable for a body, defines how to read a
/// body from a binary stream.
impl Readable for TransactionBody {
fn read(reader: &mut Reader) -> Result<TransactionBody, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<TransactionBody, ser::Error> {
let (input_len, output_len, kernel_len) =
ser_multiread!(reader, read_u64, read_u64, read_u64);
@ -637,7 +635,7 @@ impl TransactionBody {
pub fn validate(
&self,
with_reward: bool,
verifier: Arc<RwLock<VerifierCache>>,
verifier: Arc<RwLock<dyn VerifierCache>>,
) -> Result<(), Error> {
self.validate_read(with_reward)?;
@ -717,7 +715,7 @@ impl Writeable for Transaction {
/// Implementation of Readable for a transaction, defines how to read a full
/// transaction from a binary stream.
impl Readable for Transaction {
fn read(reader: &mut Reader) -> Result<Transaction, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Transaction, ser::Error> {
let offset = BlindingFactor::read(reader)?;
let body = TransactionBody::read(reader)?;
let tx = Transaction { offset, body };
@ -868,7 +866,7 @@ impl Transaction {
/// Validates all relevant parts of a fully built transaction. Checks the
/// excess value against the signature as well as range proofs for each
/// output.
pub fn validate(&self, verifier: Arc<RwLock<VerifierCache>>) -> Result<(), Error> {
pub fn validate(&self, verifier: Arc<RwLock<dyn VerifierCache>>) -> Result<(), Error> {
self.body.validate(false, verifier)?;
self.body.verify_features()?;
self.verify_kernel_sums(self.overage(), self.offset)?;
@ -999,12 +997,12 @@ pub fn deaggregate(mk_tx: Transaction, txs: Vec<Transaction>) -> Result<Transact
let total_kernel_offset = {
let secp = static_secp_instance();
let secp = secp.lock();
let mut positive_key = vec![mk_tx.offset]
let positive_key = vec![mk_tx.offset]
.into_iter()
.filter(|x| *x != BlindingFactor::zero())
.filter_map(|x| x.secret_key(&secp).ok())
.collect::<Vec<_>>();
let mut negative_keys = kernel_offsets
let negative_keys = kernel_offsets
.into_iter()
.filter(|x| *x != BlindingFactor::zero())
.filter_map(|x| x.secret_key(&secp).ok())
@ -1063,7 +1061,7 @@ impl Writeable for Input {
/// Implementation of Readable for a transaction Input, defines how to read
/// an Input from a binary stream.
impl Readable for Input {
fn read(reader: &mut Reader) -> Result<Input, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Input, ser::Error> {
let features =
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
@ -1149,7 +1147,7 @@ impl Writeable for Output {
/// Implementation of Readable for a transaction Output, defines how to read
/// an Output from a binary stream.
impl Readable for Output {
fn read(reader: &mut Reader) -> Result<Output, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Output, ser::Error> {
let features =
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
@ -1276,7 +1274,7 @@ impl Writeable for OutputIdentifier {
}
impl Readable for OutputIdentifier {
fn read(reader: &mut Reader) -> Result<OutputIdentifier, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<OutputIdentifier, ser::Error> {
let features =
OutputFeatures::from_bits(reader.read_u8()?).ok_or(ser::Error::CorruptedData)?;
Ok(OutputIdentifier {
@ -1307,10 +1305,10 @@ pub fn kernel_sig_msg(fee: u64, lock_height: u64) -> Result<secp::Message, Error
#[cfg(test)]
mod test {
use super::*;
use core::hash::Hash;
use core::id::{ShortId, ShortIdentifiable};
use keychain::{ExtKeychain, Keychain};
use util::secp;
use crate::core::hash::Hash;
use crate::core::id::{ShortId, ShortIdentifiable};
use crate::keychain::{ExtKeychain, Keychain};
use crate::util::secp;
#[test]
fn test_kernel_ser_deser() {

View file

@ -17,8 +17,8 @@
use lru_cache::LruCache;
use core::hash::{Hash, Hashed};
use core::{Output, TxKernel};
use crate::core::hash::{Hash, Hashed};
use crate::core::{Output, TxKernel};
/// Verifier cache for caching expensive verification results.
/// Specifically the following -
@ -65,7 +65,8 @@ impl VerifierCache for LruVerifierCache {
.kernel_sig_verification_cache
.get_mut(&x.hash())
.unwrap_or(&mut false)
}).cloned()
})
.cloned()
.collect::<Vec<_>>();
trace!(
"lru_verifier_cache: kernel sigs: {}, not cached (must verify): {}",
@ -83,7 +84,8 @@ impl VerifierCache for LruVerifierCache {
.rangeproof_verification_cache
.get_mut(&x.proof.hash())
.unwrap_or(&mut false)
}).cloned()
})
.cloned()
.collect::<Vec<_>>();
trace!(
"lru_verifier_cache: rangeproofs: {}, not cached (must verify): {}",

View file

@ -16,9 +16,9 @@
use chrono::prelude::{TimeZone, Utc};
use core;
use global;
use pow::{Difficulty, Proof, ProofOfWork};
use crate::core;
use crate::global;
use crate::pow::{Difficulty, Proof, ProofOfWork};
/// Genesis block definition for development networks. The proof of work size
/// is small enough to mine it on the fly, so it does not contain its own

View file

@ -16,18 +16,18 @@
//! having to pass them all over the place, but aren't consensus values.
//! should be used sparingly.
use consensus::HeaderInfo;
use consensus::{
use crate::consensus::HeaderInfo;
use crate::consensus::{
graph_weight, BASE_EDGE_BITS, BLOCK_TIME_SEC, COINBASE_MATURITY, CUT_THROUGH_HORIZON,
DAY_HEIGHT, DEFAULT_MIN_EDGE_BITS, DIFFICULTY_ADJUST_WINDOW, INITIAL_DIFFICULTY, PROOFSIZE,
SECOND_POW_EDGE_BITS, STATE_SYNC_THRESHOLD, T4_CUCKAROO_HARDFORK, UNIT_DIFFICULTY,
};
use pow::{self, new_cuckaroo_ctx, new_cuckatoo_ctx, EdgeType, PoWContext};
use crate::pow::{self, new_cuckaroo_ctx, new_cuckatoo_ctx, EdgeType, PoWContext};
/// An enum collecting sets of parameters used throughout the
/// code wherever mining is needed. This should allow for
/// different sets of parameters for different purposes,
/// e.g. CI, User testing, production values
use util::RwLock;
use crate::util::RwLock;
/// Define these here, as they should be developer-set, not really tweakable
/// by users
@ -130,7 +130,7 @@ pub enum PoWContextTypes {
Cuckaroo,
}
lazy_static!{
lazy_static! {
/// The mining parameter mode
pub static ref CHAIN_TYPE: RwLock<ChainTypes> =
RwLock::new(ChainTypes::Mainnet);

View file

@ -23,28 +23,18 @@
#[macro_use]
extern crate bitflags;
extern crate blake2_rfc as blake2;
extern crate byteorder;
extern crate croaring;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
use blake2_rfc as blake2;
use grin_keychain as keychain;
use grin_util as util;
#[macro_use]
extern crate lazy_static;
extern crate lru_cache;
extern crate num_bigint as bigint;
extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate siphasher;
#[macro_use]
extern crate log;
extern crate chrono;
extern crate failure;
extern crate uuid;
use failure;
#[macro_use]
extern crate failure_derive;
#[macro_use]
pub mod macros;

View file

@ -16,11 +16,11 @@
//! This module interfaces into the underlying
//! [Rust Aggsig library](https://github.com/mimblewimble/rust-secp256k1-zkp/blob/master/src/aggsig.rs)
use keychain::{BlindingFactor, Identifier, Keychain};
use libtx::error::{Error, ErrorKind};
use util::secp::key::{PublicKey, SecretKey};
use util::secp::pedersen::Commitment;
use util::secp::{self, aggsig, Message, Secp256k1, Signature};
use crate::keychain::{BlindingFactor, Identifier, Keychain};
use crate::libtx::error::{Error, ErrorKind};
use crate::util::secp::key::{PublicKey, SecretKey};
use crate::util::secp::pedersen::Commitment;
use crate::util::secp::{self, aggsig, Message, Secp256k1, Signature};
/// Creates a new secure nonce (as a SecretKey), guaranteed to be usable during
/// aggsig creation.

View file

@ -25,13 +25,12 @@
//! build::transaction(vec![input_rand(75), output_rand(42), output_rand(32),
//! with_fee(1)])
use core::{Input, Output, OutputFeatures, Transaction, TxKernel};
use keychain::{BlindSum, BlindingFactor, Identifier, Keychain};
use libtx::Error;
use libtx::{aggsig, proof};
use crate::core::{Input, Output, OutputFeatures, Transaction, TxKernel};
use crate::keychain::{BlindSum, BlindingFactor, Identifier, Keychain};
use crate::libtx::{aggsig, proof, Error};
/// Context information available to transaction combinators.
pub struct Context<'a, K: 'a>
pub struct Context<'a, K>
where
K: Keychain,
{
@ -40,8 +39,10 @@ where
/// Function type returned by the transaction combinators. Transforms a
/// (Transaction, BlindSum) pair into another, provided some context.
pub type Append<K> = for<'a> Fn(&'a mut Context<K>, (Transaction, TxKernel, BlindSum))
-> (Transaction, TxKernel, BlindSum);
pub type Append<K> = dyn for<'a> Fn(
&'a mut Context<'_, K>,
(Transaction, TxKernel, BlindSum),
) -> (Transaction, TxKernel, BlindSum);
/// Adds an input with the provided value and blinding key to the transaction
/// being built.
@ -244,17 +245,17 @@ where
Ok(tx)
}
// Just a simple test, most exhaustive tests in the core mod.rs.
// Just a simple test, most exhaustive tests in the core.
#[cfg(test)]
mod test {
use crate::util::RwLock;
use std::sync::Arc;
use util::RwLock;
use super::*;
use core::verifier_cache::{LruVerifierCache, VerifierCache};
use keychain::{ExtKeychain, ExtKeychainPath};
use crate::core::verifier_cache::{LruVerifierCache, VerifierCache};
use crate::keychain::{ExtKeychain, ExtKeychainPath};
fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
fn verifier_cache() -> Arc<RwLock<dyn VerifierCache>> {
Arc::new(RwLock::new(LruVerifierCache::new()))
}
@ -275,7 +276,8 @@ mod test {
with_fee(2),
],
&keychain,
).unwrap();
)
.unwrap();
tx.validate(vc.clone()).unwrap();
}
@ -297,7 +299,8 @@ mod test {
with_fee(2),
],
&keychain,
).unwrap();
)
.unwrap();
tx.validate(vc.clone()).unwrap();
}
@ -313,7 +316,8 @@ mod test {
let tx = transaction(
vec![input(6, key_id1), output(2, key_id2), with_fee(4)],
&keychain,
).unwrap();
)
.unwrap();
tx.validate(vc.clone()).unwrap();
}

View file

@ -16,9 +16,9 @@
use failure::{Backtrace, Context, Fail};
use std::fmt::{self, Display};
use core::{committed, transaction};
use keychain;
use util::secp;
use crate::core::{committed, transaction};
use crate::keychain;
use crate::util::secp;
/// Lib tx error definition
#[derive(Debug)]
@ -53,7 +53,7 @@ pub enum ErrorKind {
}
impl Fail for Error {
fn cause(&self) -> Option<&Fail> {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
@ -63,7 +63,7 @@ impl Fail for Error {
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}

View file

@ -28,10 +28,10 @@ pub mod proof;
pub mod reward;
pub mod slate;
use consensus;
use core::Transaction;
use crate::consensus;
use crate::core::Transaction;
pub use libtx::error::{Error, ErrorKind};
pub use crate::libtx::error::{Error, ErrorKind};
const DEFAULT_BASE_FEE: u64 = consensus::MILLI_GRIN;

View file

@ -14,12 +14,12 @@
//! Rangeproof library functions
use blake2;
use keychain::{Identifier, Keychain};
use libtx::error::{Error, ErrorKind};
use util::secp::key::SecretKey;
use util::secp::pedersen::{Commitment, ProofInfo, ProofMessage, RangeProof};
use util::secp::{self, Secp256k1};
use crate::blake2;
use crate::keychain::{Identifier, Keychain};
use crate::libtx::error::{Error, ErrorKind};
use crate::util::secp::key::SecretKey;
use crate::util::secp::pedersen::{Commitment, ProofInfo, ProofMessage, RangeProof};
use crate::util::secp::{self, Secp256k1};
fn create_nonce<K>(k: &K, commit: &Commitment) -> Result<SecretKey, Error>
where

View file

@ -14,15 +14,13 @@
//! Builds the blinded output and related signature proof for the block
//! reward.
use keychain::{Identifier, Keychain};
use consensus::reward;
use core::transaction::kernel_sig_msg;
use core::KernelFeatures;
use core::{Output, OutputFeatures, TxKernel};
use libtx::error::Error;
use libtx::{aggsig, proof};
use util::static_secp_instance;
use crate::consensus::reward;
use crate::core::transaction::kernel_sig_msg;
use crate::core::{KernelFeatures, Output, OutputFeatures, TxKernel};
use crate::keychain::{Identifier, Keychain};
use crate::libtx::error::Error;
use crate::libtx::{aggsig, proof};
use crate::util::static_secp_instance;
/// output a reward output
pub fn output<K>(

View file

@ -15,25 +15,22 @@
//! Functions for building partial transactions to be passed
//! around during an interactive wallet exchange
use crate::blake2::blake2b::blake2b;
use crate::core::committed::Committed;
use crate::core::transaction::kernel_sig_msg;
use crate::core::verifier_cache::LruVerifierCache;
use crate::core::{amount_to_hr_string, Transaction};
use crate::keychain::{BlindSum, BlindingFactor, Keychain};
use crate::libtx::error::{Error, ErrorKind};
use crate::libtx::{aggsig, build, tx_fee};
use crate::util::secp;
use crate::util::secp::key::{PublicKey, SecretKey};
use crate::util::secp::Signature;
use crate::util::RwLock;
use rand::thread_rng;
use std::sync::Arc;
use uuid::Uuid;
use core::committed::Committed;
use core::transaction::kernel_sig_msg;
use core::verifier_cache::LruVerifierCache;
use core::{amount_to_hr_string, Transaction};
use keychain::{BlindSum, BlindingFactor, Keychain};
use libtx::error::{Error, ErrorKind};
use libtx::{aggsig, build, tx_fee};
use util::secp;
use util::secp::key::{PublicKey, SecretKey};
use util::secp::Signature;
use util::RwLock;
use blake2::blake2b::blake2b;
/// Public data for each participant in the slate
#[derive(Serialize, Deserialize, Debug, Clone)]

View file

@ -75,7 +75,7 @@ macro_rules! tee {
#[macro_export]
macro_rules! ser_multiread {
($rdr:ident, $($read_call:ident $(($val:expr)),*),*) => {
( $(try!($rdr.$read_call($($val),*))),* )
( $(r#try!($rdr.$read_call($($val),*))),* )
}
}
@ -89,7 +89,7 @@ macro_rules! ser_multiread {
#[macro_export]
macro_rules! ser_multiwrite {
($wrtr:ident, $([ $write_call:ident, $val:expr ]),* ) => {
$( try!($wrtr.$write_call($val)) );*
$( r#try!($wrtr.$write_call($val)) );*
}
}

View file

@ -28,13 +28,8 @@
#![deny(unused_mut)]
#![warn(missing_docs)]
extern crate blake2_rfc as blake2;
extern crate chrono;
extern crate num;
extern crate rand;
extern crate serde;
extern crate grin_util as util;
use chrono;
use num;
#[macro_use]
mod common;
@ -46,16 +41,16 @@ pub mod lean;
mod siphash;
mod types;
use crate::core::{Block, BlockHeader};
use crate::genesis;
use crate::global;
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use core::{Block, BlockHeader};
use genesis;
use global;
pub use self::common::EdgeType;
pub use self::types::*;
pub use pow::cuckaroo::{new_cuckaroo_ctx, CuckarooContext};
pub use pow::cuckatoo::{new_cuckatoo_ctx, CuckatooContext};
pub use pow::error::Error;
pub use crate::pow::cuckaroo::{new_cuckaroo_ctx, CuckarooContext};
pub use crate::pow::cuckatoo::{new_cuckatoo_ctx, CuckatooContext};
pub use crate::pow::error::Error;
const MAX_SOLS: u32 = 10;
@ -134,9 +129,9 @@ pub fn pow_size(
#[cfg(test)]
mod test {
use super::*;
use genesis;
use global;
use global::ChainTypes;
use crate::genesis;
use crate::global;
use crate::global::ChainTypes;
/// We'll be generating genesis blocks differently
#[test]
@ -151,7 +146,8 @@ mod test {
Difficulty::min(),
global::proofsize(),
global::min_edge_bits(),
).unwrap();
)
.unwrap();
assert_ne!(b.header.pow.nonce, 310);
assert!(b.header.pow.to_difficulty(0) >= Difficulty::min());
assert!(verify_size(&b.header).is_ok());

View file

@ -14,12 +14,12 @@
//! Common types and traits for cuckoo/cuckatoo family of solvers
use blake2::blake2b::blake2b;
use crate::blake2::blake2b::blake2b;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use pow::error::{Error, ErrorKind};
use pow::num::{PrimInt, ToPrimitive};
use pow::siphash::siphash24;
use crate::pow::error::{Error, ErrorKind};
use crate::pow::num::{PrimInt, ToPrimitive};
use crate::pow::siphash::siphash24;
use std::fmt;
use std::hash::Hash;
use std::io::Cursor;
@ -44,7 +44,7 @@ impl<T> fmt::Display for Edge<T>
where
T: EdgeType,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"(u: {}, v: {})",
@ -68,7 +68,7 @@ impl<T> fmt::Display for Link<T>
where
T: EdgeType,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"(next: {}, to: {})",

View file

@ -23,15 +23,18 @@
//! In Cuckaroo, edges are calculated by repeatedly hashing the seeds to
//! obtain blocks of values. Nodes are then extracted from those edges.
use pow::common::{CuckooParams, EdgeType};
use pow::error::{Error, ErrorKind};
use pow::siphash::siphash_block;
use pow::{PoWContext, Proof};
use crate::pow::common::{CuckooParams, EdgeType};
use crate::pow::error::{Error, ErrorKind};
use crate::pow::siphash::siphash_block;
use crate::pow::{PoWContext, Proof};
/// Instantiate a new CuckarooContext as a PowContext. Note that this can't
/// be moved in the PoWContext trait as this particular trait needs to be
/// convertible to an object trait.
pub fn new_cuckaroo_ctx<T>(edge_bits: u8, proof_size: usize) -> Result<Box<PoWContext<T>>, Error>
pub fn new_cuckaroo_ctx<T>(
edge_bits: u8,
proof_size: usize,
) -> Result<Box<dyn PoWContext<T>>, Error>
where
T: EdgeType + 'static,
{

View file

@ -17,10 +17,10 @@ use std::mem;
use byteorder::{BigEndian, WriteBytesExt};
use croaring::Bitmap;
use pow::common::{CuckooParams, EdgeType, Link};
use pow::error::{Error, ErrorKind};
use pow::{PoWContext, Proof};
use util;
use crate::pow::common::{CuckooParams, EdgeType, Link};
use crate::pow::error::{Error, ErrorKind};
use crate::pow::{PoWContext, Proof};
use crate::util;
struct Graph<T>
where
@ -159,7 +159,7 @@ pub fn new_cuckatoo_ctx<T>(
edge_bits: u8,
proof_size: usize,
max_sols: u32,
) -> Result<Box<PoWContext<T>>, Error>
) -> Result<Box<dyn PoWContext<T>>, Error>
where
T: EdgeType + 'static,
{

View file

@ -53,7 +53,7 @@ pub enum ErrorKind {
}
impl Fail for Error {
fn cause(&self) -> Option<&Fail> {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
@ -63,7 +63,7 @@ impl Fail for Error {
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}

View file

@ -15,10 +15,10 @@
use croaring::Bitmap;
use pow::common::CuckooParams;
use pow::cuckatoo::CuckatooContext;
use pow::error::Error;
use pow::Proof;
use crate::pow::common::CuckooParams;
use crate::pow::cuckatoo::CuckatooContext;
use crate::pow::error::Error;
use crate::pow::Proof;
/// Lean miner implementation aiming to be as short and simple as possible.
/// As a consequence, it's a little less than 10 times slower than John
@ -88,7 +88,7 @@ impl Lean {
#[cfg(test)]
mod test {
use super::*;
use pow::types::PoWContext;
use crate::pow::types::PoWContext;
#[test]
fn lean_miner() {

View file

@ -21,13 +21,13 @@ use std::{fmt, iter};
use rand::{thread_rng, Rng};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use consensus::{graph_weight, MIN_DIFFICULTY, SECOND_POW_EDGE_BITS};
use core::hash::Hashed;
use global;
use ser::{self, FixedLength, Readable, Reader, Writeable, Writer};
use crate::consensus::{graph_weight, MIN_DIFFICULTY, SECOND_POW_EDGE_BITS};
use crate::core::hash::Hashed;
use crate::global;
use crate::ser::{self, FixedLength, Readable, Reader, Writeable, Writer};
use pow::common::EdgeType;
use pow::error::Error;
use crate::pow::common::EdgeType;
use crate::pow::error::Error;
/// Generic trait for a solver/verifier providing common interface into Cuckoo-family PoW
/// Mostly used for verification, but also for test mining if necessary
@ -105,7 +105,7 @@ impl Difficulty {
}
impl fmt::Display for Difficulty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.num)
}
}
@ -153,7 +153,7 @@ impl Writeable for Difficulty {
}
impl Readable for Difficulty {
fn read(reader: &mut Reader) -> Result<Difficulty, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Difficulty, ser::Error> {
let data = reader.read_u64()?;
Ok(Difficulty { num: data })
}
@ -186,7 +186,7 @@ struct DiffVisitor;
impl<'de> de::Visitor<'de> for DiffVisitor {
type Value = Difficulty;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a difficulty")
}
@ -241,7 +241,7 @@ impl Default for ProofOfWork {
impl ProofOfWork {
/// Read implementation, can't define as trait impl as we need a version
pub fn read(_ver: u16, reader: &mut Reader) -> Result<ProofOfWork, ser::Error> {
pub fn read(_ver: u16, reader: &mut dyn Reader) -> Result<ProofOfWork, ser::Error> {
let total_difficulty = Difficulty::read(reader)?;
let secondary_scaling = reader.read_u32()?;
let nonce = reader.read_u64()?;
@ -325,7 +325,7 @@ pub struct Proof {
}
impl fmt::Debug for Proof {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Cuckoo{}(", self.edge_bits)?;
for (i, val) in self.nonces[..].iter().enumerate() {
write!(f, "{:x}", val)?;
@ -389,7 +389,7 @@ impl Proof {
}
impl Readable for Proof {
fn read(reader: &mut Reader) -> Result<Proof, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<Proof, ser::Error> {
let edge_bits = reader.read_u8()?;
if edge_bits == 0 || edge_bits > 64 {
return Err(ser::Error::CorruptedData);

View file

@ -19,21 +19,20 @@
//! To use it simply implement `Writeable` or `Readable` and then use the
//! `serialize` or `deserialize` functions on them as appropriate.
use std::time::Duration;
use crate::core::hash::{Hash, Hashed};
use crate::keychain::{BlindingFactor, Identifier, IDENTIFIER_SIZE};
use crate::util::read_write::read_exact;
use crate::util::secp::constants::{
AGG_SIGNATURE_SIZE, MAX_PROOF_SIZE, PEDERSEN_COMMITMENT_SIZE, SECRET_KEY_SIZE,
};
use crate::util::secp::pedersen::{Commitment, RangeProof};
use crate::util::secp::Signature;
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
use core::hash::{Hash, Hashed};
use keychain::{BlindingFactor, Identifier, IDENTIFIER_SIZE};
use std::fmt::Debug;
use std::io::{self, Read, Write};
use std::marker;
use std::time::Duration;
use std::{cmp, error, fmt};
use util::read_write::read_exact;
use util::secp::constants::{
AGG_SIGNATURE_SIZE, MAX_PROOF_SIZE, PEDERSEN_COMMITMENT_SIZE, SECRET_KEY_SIZE,
};
use util::secp::pedersen::{Commitment, RangeProof};
use util::secp::Signature;
/// Possible errors deriving from serializing or deserializing.
#[derive(Clone, Eq, PartialEq, Debug)]
@ -68,7 +67,7 @@ impl From<io::Error> for Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::IOErr(ref e, ref _k) => write!(f, "{}", e),
Error::UnexpectedData {
@ -86,7 +85,7 @@ impl fmt::Display for Error {
}
impl error::Error for Error {
fn cause(&self) -> Option<&error::Error> {
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
Error::IOErr(ref _e, ref _k) => Some(self),
_ => None,
@ -210,14 +209,14 @@ pub trait Writeable {
pub struct IteratingReader<'a, T> {
count: u64,
curr: u64,
reader: &'a mut Reader,
reader: &'a mut dyn Reader,
_marker: marker::PhantomData<T>,
}
impl<'a, T> IteratingReader<'a, T> {
/// Constructor to create a new iterating reader for the provided underlying reader.
/// Takes a count so we know how many to iterate over.
pub fn new(reader: &'a mut Reader, count: u64) -> IteratingReader<'a, T> {
pub fn new(reader: &'a mut dyn Reader, count: u64) -> IteratingReader<'a, T> {
let curr = 0;
IteratingReader {
count,
@ -244,7 +243,7 @@ where
}
/// Reads multiple serialized items into a Vec.
pub fn read_multi<T>(reader: &mut Reader, count: u64) -> Result<Vec<T>, Error>
pub fn read_multi<T>(reader: &mut dyn Reader, count: u64) -> Result<Vec<T>, Error>
where
T: Readable,
{
@ -270,17 +269,17 @@ where
Self: Sized,
{
/// Reads the data necessary to this Readable from the provided reader
fn read(reader: &mut Reader) -> Result<Self, Error>;
fn read(reader: &mut dyn Reader) -> Result<Self, Error>;
}
/// Deserializes a Readable from any std::io::Read implementation.
pub fn deserialize<T: Readable>(source: &mut Read) -> Result<T, Error> {
pub fn deserialize<T: Readable>(source: &mut dyn Read) -> Result<T, Error> {
let mut reader = BinReader { source };
T::read(&mut reader)
}
/// Serializes a Writeable into any std::io::Write implementation.
pub fn serialize<W: Writeable>(sink: &mut Write, thing: &W) -> Result<(), Error> {
pub fn serialize<W: Writeable>(sink: &mut dyn Write, thing: &W) -> Result<(), Error> {
let mut writer = BinWriter { sink };
thing.write(&mut writer)
}
@ -295,7 +294,7 @@ pub fn ser_vec<W: Writeable>(thing: &W) -> Result<Vec<u8>, Error> {
/// Utility to read from a binary source
struct BinReader<'a> {
source: &'a mut Read,
source: &'a mut dyn Read,
}
fn map_io_err(err: io::Error) -> Error {
@ -359,14 +358,14 @@ impl<'a> Reader for BinReader<'a> {
/// Tracks total bytes read so we can verify we read the right number afterwards.
pub struct StreamingReader<'a> {
total_bytes_read: u64,
stream: &'a mut Read,
stream: &'a mut dyn Read,
timeout: Duration,
}
impl<'a> StreamingReader<'a> {
/// Create a new streaming reader with the provided underlying stream.
/// Also takes a duration to be used for each individual read_exact call.
pub fn new(stream: &'a mut Read, timeout: Duration) -> StreamingReader<'a> {
pub fn new(stream: &'a mut dyn Read, timeout: Duration) -> StreamingReader<'a> {
StreamingReader {
total_bytes_read: 0,
stream,
@ -440,7 +439,7 @@ impl<'a> Reader for StreamingReader<'a> {
}
impl Readable for Commitment {
fn read(reader: &mut Reader) -> Result<Commitment, Error> {
fn read(reader: &mut dyn Reader) -> Result<Commitment, Error> {
let a = reader.read_fixed_bytes(PEDERSEN_COMMITMENT_SIZE)?;
let mut c = [0; PEDERSEN_COMMITMENT_SIZE];
c[..PEDERSEN_COMMITMENT_SIZE].clone_from_slice(&a[..PEDERSEN_COMMITMENT_SIZE]);
@ -461,7 +460,7 @@ impl Writeable for BlindingFactor {
}
impl Readable for BlindingFactor {
fn read(reader: &mut Reader) -> Result<BlindingFactor, Error> {
fn read(reader: &mut dyn Reader) -> Result<BlindingFactor, Error> {
let bytes = reader.read_fixed_bytes(BlindingFactor::LEN)?;
Ok(BlindingFactor::from_slice(&bytes))
}
@ -478,7 +477,7 @@ impl Writeable for Identifier {
}
impl Readable for Identifier {
fn read(reader: &mut Reader) -> Result<Identifier, Error> {
fn read(reader: &mut dyn Reader) -> Result<Identifier, Error> {
let bytes = reader.read_fixed_bytes(IDENTIFIER_SIZE)?;
Ok(Identifier::from_bytes(&bytes))
}
@ -491,7 +490,7 @@ impl Writeable for RangeProof {
}
impl Readable for RangeProof {
fn read(reader: &mut Reader) -> Result<RangeProof, Error> {
fn read(reader: &mut dyn Reader) -> Result<RangeProof, Error> {
let len = reader.read_u64()?;
let max_len = cmp::min(len as usize, MAX_PROOF_SIZE);
let p = reader.read_fixed_bytes(max_len)?;
@ -518,7 +517,7 @@ impl PMMRable for RangeProof {
}
impl Readable for Signature {
fn read(reader: &mut Reader) -> Result<Signature, Error> {
fn read(reader: &mut dyn Reader) -> Result<Signature, Error> {
let a = reader.read_fixed_bytes(Signature::LEN)?;
let mut c = [0; Signature::LEN];
c[..Signature::LEN].clone_from_slice(&a[..Signature::LEN]);
@ -560,12 +559,12 @@ impl<T: Hashed> VerifySortedAndUnique<T> for Vec<T> {
/// Utility wrapper for an underlying byte Writer. Defines higher level methods
/// to write numbers, byte vectors, hashes, etc.
pub struct BinWriter<'a> {
sink: &'a mut Write,
sink: &'a mut dyn Write,
}
impl<'a> BinWriter<'a> {
/// Wraps a standard Write in a new BinWriter
pub fn new(write: &'a mut Write) -> BinWriter<'a> {
pub fn new(write: &'a mut dyn Write) -> BinWriter<'a> {
BinWriter { sink: write }
}
}
@ -591,7 +590,7 @@ macro_rules! impl_int {
}
impl Readable for $int {
fn read(reader: &mut Reader) -> Result<$int, Error> {
fn read(reader: &mut dyn Reader) -> Result<$int, Error> {
reader.$r_fn()
}
}
@ -609,7 +608,7 @@ impl<T> Readable for Vec<T>
where
T: Readable,
{
fn read(reader: &mut Reader) -> Result<Vec<T>, Error> {
fn read(reader: &mut dyn Reader) -> Result<Vec<T>, Error> {
let mut buf = Vec::new();
loop {
let elem = T::read(reader);
@ -651,7 +650,7 @@ impl<A: Writeable, B: Writeable> Writeable for (A, B) {
}
impl<A: Readable, B: Readable> Readable for (A, B) {
fn read(reader: &mut Reader) -> Result<(A, B), Error> {
fn read(reader: &mut dyn Reader) -> Result<(A, B), Error> {
Ok((Readable::read(reader)?, Readable::read(reader)?))
}
}
@ -674,7 +673,7 @@ impl<A: Writeable, B: Writeable, C: Writeable, D: Writeable> Writeable for (A, B
}
impl<A: Readable, B: Readable, C: Readable> Readable for (A, B, C) {
fn read(reader: &mut Reader) -> Result<(A, B, C), Error> {
fn read(reader: &mut dyn Reader) -> Result<(A, B, C), Error> {
Ok((
Readable::read(reader)?,
Readable::read(reader)?,
@ -684,7 +683,7 @@ impl<A: Readable, B: Readable, C: Readable> Readable for (A, B, C) {
}
impl<A: Readable, B: Readable, C: Readable, D: Readable> Readable for (A, B, C, D) {
fn read(reader: &mut Reader) -> Result<(A, B, C, D), Error> {
fn read(reader: &mut dyn Reader) -> Result<(A, B, C, D), Error> {
Ok((
Readable::read(reader)?,
Readable::read(reader)?,
@ -784,22 +783,22 @@ impl AsFixedBytes for String {
self.len()
}
}
impl AsFixedBytes for ::core::hash::Hash {
impl AsFixedBytes for crate::core::hash::Hash {
fn len(&self) -> usize {
32
}
}
impl AsFixedBytes for ::util::secp::pedersen::RangeProof {
impl AsFixedBytes for crate::util::secp::pedersen::RangeProof {
fn len(&self) -> usize {
self.plen
}
}
impl AsFixedBytes for ::util::secp::Signature {
impl AsFixedBytes for crate::util::secp::Signature {
fn len(&self) -> usize {
64
}
}
impl AsFixedBytes for ::util::secp::pedersen::Commitment {
impl AsFixedBytes for crate::util::secp::pedersen::Commitment {
fn len(&self) -> usize {
PEDERSEN_COMMITMENT_SIZE
}
@ -809,7 +808,7 @@ impl AsFixedBytes for BlindingFactor {
SECRET_KEY_SIZE
}
}
impl AsFixedBytes for ::keychain::Identifier {
impl AsFixedBytes for crate::keychain::Identifier {
fn len(&self) -> usize {
IDENTIFIER_SIZE
}

View file

@ -12,32 +12,28 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate chrono;
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
pub mod common;
use crate::common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
use crate::core::consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
use crate::core::core::block::Error;
use crate::core::core::hash::Hashed;
use crate::core::core::id::ShortIdentifiable;
use crate::core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use crate::core::core::Committed;
use crate::core::core::{Block, BlockHeader, CompactBlock, KernelFeatures, OutputFeatures};
use crate::core::libtx::build::{self, input, output, with_fee};
use crate::core::{global, ser};
use crate::keychain::{BlindingFactor, ExtKeychain, Keychain};
use crate::util::secp;
use crate::util::RwLock;
use chrono::Duration;
use grin_core as core;
use grin_keychain as keychain;
use grin_util as util;
use std::sync::Arc;
use std::time::Instant;
use util::RwLock;
pub mod common;
use chrono::Duration;
use common::{new_block, tx1i2o, tx2i1o, txspend1i1o};
use grin_core::consensus::{BLOCK_OUTPUT_WEIGHT, MAX_BLOCK_WEIGHT};
use grin_core::core::block::Error;
use grin_core::core::hash::Hashed;
use grin_core::core::id::ShortIdentifiable;
use grin_core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use grin_core::core::Committed;
use grin_core::core::{Block, BlockHeader, CompactBlock, KernelFeatures, OutputFeatures};
use grin_core::libtx::build::{self, input, output, with_fee};
use grin_core::{global, ser};
use keychain::{BlindingFactor, ExtKeychain, Keychain};
use util::secp;
fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
fn verifier_cache() -> Arc<RwLock<dyn VerifierCache>> {
Arc::new(RwLock::new(LruVerifierCache::new()))
}
@ -66,10 +62,9 @@ fn too_large_block() {
let prev = BlockHeader::default();
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let b = new_block(vec![&tx], &keychain, &prev, &key_id);
assert!(
b.validate(&BlindingFactor::zero(), verifier_cache())
.is_err()
);
assert!(b
.validate(&BlindingFactor::zero(), verifier_cache())
.is_err());
}
#[test]
@ -96,7 +91,8 @@ fn block_with_cut_through() {
let mut btx2 = build::transaction(
vec![input(7, key_id1), output(5, key_id2.clone()), with_fee(2)],
&keychain,
).unwrap();
)
.unwrap();
// spending tx2 - reuse key_id2
@ -149,10 +145,9 @@ fn empty_block_with_coinbase_is_valid() {
// the block should be valid here (single coinbase output with corresponding
// txn kernel)
assert!(
b.validate(&BlindingFactor::zero(), verifier_cache())
.is_ok()
);
assert!(b
.validate(&BlindingFactor::zero(), verifier_cache())
.is_ok());
}
#[test]
@ -165,20 +160,17 @@ fn remove_coinbase_output_flag() {
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
b.outputs()[0]
.features
.contains(OutputFeatures::COINBASE_OUTPUT)
);
assert!(b.outputs()[0]
.features
.contains(OutputFeatures::COINBASE_OUTPUT));
b.outputs_mut()[0]
.features
.remove(OutputFeatures::COINBASE_OUTPUT);
assert_eq!(b.verify_coinbase(), Err(Error::CoinbaseSumMismatch));
assert!(
b.verify_kernel_sums(b.header.overage(), b.header.total_kernel_offset())
.is_ok()
);
assert!(b
.verify_kernel_sums(b.header.overage(), b.header.total_kernel_offset())
.is_ok());
assert_eq!(
b.validate(&BlindingFactor::zero(), verifier_cache()),
Err(Error::CoinbaseSumMismatch)
@ -194,11 +186,9 @@ fn remove_coinbase_kernel_flag() {
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let mut b = new_block(vec![], &keychain, &prev, &key_id);
assert!(
b.kernels()[0]
.features
.contains(KernelFeatures::COINBASE_KERNEL)
);
assert!(b.kernels()[0]
.features
.contains(KernelFeatures::COINBASE_KERNEL));
b.kernels_mut()[0]
.features
.remove(KernelFeatures::COINBASE_KERNEL);

View file

@ -14,16 +14,18 @@
//! Common test functions
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
use grin_core::core::block::{Block, BlockHeader};
use grin_core::core::Transaction;
use grin_core::libtx::build::{self, input, output, with_fee};
use grin_core::libtx::reward;
use grin_core::pow::Difficulty;
use keychain::{Identifier, Keychain};
use crate::core::core::{
block::{Block, BlockHeader},
Transaction,
};
use crate::core::libtx::{
build::{self, input, output, with_fee},
reward,
};
use crate::core::pow::Difficulty;
use crate::keychain::{Identifier, Keychain};
use grin_core as core;
use grin_keychain as keychain;
// utility producing a transaction with 2 inputs and a single outputs
pub fn tx2i1o() -> Transaction {
@ -40,7 +42,8 @@ pub fn tx2i1o() -> Transaction {
with_fee(2),
],
&keychain,
).unwrap()
)
.unwrap()
}
// utility producing a transaction with a single input and output
@ -52,7 +55,8 @@ pub fn tx1i1o() -> Transaction {
build::transaction(
vec![input(5, key_id1), output(3, key_id2), with_fee(2)],
&keychain,
).unwrap()
)
.unwrap()
}
// utility producing a transaction with a single input
@ -72,7 +76,8 @@ pub fn tx1i2o() -> Transaction {
with_fee(2),
],
&keychain,
).unwrap()
)
.unwrap()
}
// utility to create a block without worrying about the key or previous
@ -93,7 +98,8 @@ where
txs.into_iter().cloned().collect(),
Difficulty::min(),
reward_output,
).unwrap()
)
.unwrap()
}
// utility producing a transaction that spends an output with the provided
@ -105,5 +111,6 @@ where
build::transaction(
vec![input(v, key_id1), output(3, key_id2), with_fee(2)],
keychain,
).unwrap()
)
.unwrap()
}

View file

@ -12,14 +12,12 @@
// limitations under the License.
//! core consensus.rs tests (separated to de-clutter consensus.rs)
#[macro_use]
extern crate grin_core as core;
extern crate chrono;
use grin_core as core;
use self::core::consensus::*;
use self::core::global;
use self::core::pow::Difficulty;
use chrono::prelude::Utc;
use core::consensus::*;
use core::global;
use core::pow::Difficulty;
use std::fmt::{self, Display};
/// Last n blocks for difficulty calculation purposes
@ -63,7 +61,7 @@ pub struct DiffStats {
}
impl Display for DiffBlock {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let output = format!(
"Block Number: {} Difficulty: {}, Time: {}, Duration: {}",
self.block_number, self.difficulty, self.time, self.duration
@ -92,7 +90,8 @@ fn repeat(interval: u64, diff: HeaderInfo, len: u64, cur_time: Option<u64>) -> V
diff.secondary_scaling,
diff.is_secondary,
)
}).collect::<Vec<_>>()
})
.collect::<Vec<_>>()
}
// Creates a new chain with a genesis at a simulated difficulty
@ -146,7 +145,8 @@ fn get_diff_stats(chain_sim: &Vec<HeaderInfo>) -> DiffStats {
time: n.timestamp,
duration: dur,
}
}).collect();
})
.collect();
let block_time_sum = sum_entries.iter().fold(0, |sum, t| sum + t.duration);
let block_diff_sum = sum_entries.iter().fold(0, |sum, d| sum + d.difficulty);
@ -168,7 +168,8 @@ fn get_diff_stats(chain_sim: &Vec<HeaderInfo>) -> DiffStats {
time: n.timestamp,
duration: dur,
}
}).collect();
})
.collect();
DiffStats {
height: tip_height as u64,

View file

@ -13,27 +13,26 @@
// limitations under the License.
//! Core tests
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
use std::sync::Arc;
use util::RwLock;
pub mod common;
use common::{new_block, tx1i1o, tx1i2o, tx2i1o};
use grin_core::core::block::BlockHeader;
use grin_core::core::block::Error::KernelLockHeight;
use grin_core::core::hash::{Hashed, ZERO_HASH};
use grin_core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use grin_core::core::{aggregate, deaggregate, KernelFeatures, Output, Transaction};
use grin_core::libtx::build::{
use self::core::core::block::BlockHeader;
use self::core::core::block::Error::KernelLockHeight;
use self::core::core::hash::{Hashed, ZERO_HASH};
use self::core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use self::core::core::{aggregate, deaggregate, KernelFeatures, Output, Transaction};
use self::core::libtx::build::{
self, initial_tx, input, output, with_excess, with_fee, with_lock_height,
};
use grin_core::ser;
use keychain::{BlindingFactor, ExtKeychain, Keychain};
use util::static_secp_instance;
use self::core::ser;
use self::keychain::{BlindingFactor, ExtKeychain, Keychain};
use self::util::static_secp_instance;
use self::util::RwLock;
use crate::common::{new_block, tx1i1o, tx1i2o, tx2i1o};
use grin_core as core;
use grin_keychain as keychain;
use grin_util as util;
use std::sync::Arc;
#[test]
fn simple_tx_ser() {
@ -87,10 +86,11 @@ fn test_zero_commit_fails() {
with_fee(1),
],
&keychain,
).unwrap();
)
.unwrap();
}
fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
fn verifier_cache() -> Arc<RwLock<dyn VerifierCache>> {
Arc::new(RwLock::new(LruVerifierCache::new()))
}
@ -110,7 +110,8 @@ fn build_tx_kernel() {
with_fee(2),
],
&keychain,
).unwrap();
)
.unwrap();
// check the tx is valid
tx.validate(verifier_cache()).unwrap();
@ -244,13 +245,15 @@ fn multi_kernel_transaction_deaggregation_4() {
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
])
.unwrap();
assert!(tx12345.validate(vc.clone()).is_ok());
let deaggregated_tx5 = deaggregate(
tx12345.clone(),
vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone()],
).unwrap();
)
.unwrap();
assert!(deaggregated_tx5.validate(vc.clone()).is_ok());
assert_eq!(tx5, deaggregated_tx5);
}
@ -277,7 +280,8 @@ fn multi_kernel_transaction_deaggregation_5() {
tx3.clone(),
tx4.clone(),
tx5.clone(),
]).unwrap();
])
.unwrap();
let tx12 = aggregate(vec![tx1.clone(), tx2.clone()]).unwrap();
let tx34 = aggregate(vec![tx3.clone(), tx4.clone()]).unwrap();
@ -330,7 +334,8 @@ fn hash_output() {
with_fee(1),
],
&keychain,
).unwrap();
)
.unwrap();
let h = tx.outputs()[0].hash();
assert!(h != ZERO_HASH);
let h2 = tx.outputs()[1].hash();
@ -401,7 +406,8 @@ fn tx_build_exchange() {
output(4, key_id4),
],
&keychain,
).unwrap();
)
.unwrap();
tx_final.validate(verifier_cache()).unwrap();
}
@ -482,7 +488,8 @@ fn test_block_with_timelocked_tx() {
with_lock_height(1),
],
&keychain,
).unwrap();
)
.unwrap();
let previous_header = BlockHeader::default();
@ -499,7 +506,8 @@ fn test_block_with_timelocked_tx() {
with_lock_height(2),
],
&keychain,
).unwrap();
)
.unwrap();
let previous_header = BlockHeader::default();
let b = new_block(vec![&tx1], &keychain, &previous_header, &key_id3.clone());

View file

@ -12,17 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate grin_core as core;
extern crate croaring;
mod vec_backend;
use core::core::merkle_proof::MerkleProof;
use core::core::pmmr::PMMR;
use core::ser;
use core::ser::PMMRIndexHashable;
use vec_backend::{TestElem, VecBackend};
use self::core::core::merkle_proof::MerkleProof;
use self::core::core::pmmr::PMMR;
use self::core::ser;
use self::core::ser::PMMRIndexHashable;
use crate::vec_backend::{TestElem, VecBackend};
use grin_core as core;
#[test]
fn empty_merkle_proof() {

View file

@ -12,21 +12,16 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//! PMMR tests
extern crate chrono;
extern crate croaring;
extern crate grin_core as core;
mod vec_backend;
use self::core::core::hash::Hash;
use self::core::core::pmmr::{self, PMMR};
use self::core::ser::PMMRIndexHashable;
use crate::vec_backend::{TestElem, VecBackend};
use chrono::prelude::Utc;
use grin_core as core;
use std::u64;
use core::core::hash::Hash;
use core::core::pmmr::{self, PMMR};
use core::ser::PMMRIndexHashable;
use vec_backend::{TestElem, VecBackend};
#[test]
fn some_peak_map() {
assert_eq!(pmmr::peak_map_height(0), (0b0, 0));
@ -442,7 +437,7 @@ fn pmmr_prune() {
// pruning a leaf with no parent should do nothing
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(16).unwrap();
assert_eq!(orig_root, pmmr.root());
}
@ -451,7 +446,7 @@ fn pmmr_prune() {
// pruning leaves with no shared parent just removes 1 element
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(2).unwrap();
assert_eq!(orig_root, pmmr.root());
}
@ -459,7 +454,7 @@ fn pmmr_prune() {
assert_eq!(ba.remove_list.len(), 2);
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(4).unwrap();
assert_eq!(orig_root, pmmr.root());
}
@ -468,7 +463,7 @@ fn pmmr_prune() {
// pruning a non-leaf node has no effect
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(3).unwrap_err();
assert_eq!(orig_root, pmmr.root());
}
@ -477,7 +472,7 @@ fn pmmr_prune() {
// TODO - no longer true (leaves only now) - pruning sibling removes subtree
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(5).unwrap();
assert_eq!(orig_root, pmmr.root());
}
@ -487,7 +482,7 @@ fn pmmr_prune() {
// TODO - no longer true (leaves only now) - pruning all leaves under level >1
// removes all subtree
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
pmmr.prune(1).unwrap();
assert_eq!(orig_root, pmmr.root());
}
@ -496,7 +491,7 @@ fn pmmr_prune() {
// pruning everything should only leave us with a single peak
{
let mut pmmr: PMMR<TestElem, _> = PMMR::at(&mut ba, sz);
let mut pmmr: PMMR<'_, TestElem, _> = PMMR::at(&mut ba, sz);
for n in 1..16 {
let _ = pmmr.prune(n);
}

View file

@ -13,16 +13,15 @@
// limitations under the License.
//! Transaction integration tests
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
pub mod common;
use grin_core::core::{Output, OutputFeatures};
use grin_core::libtx::proof;
use grin_core::ser;
use keychain::{ExtKeychain, Keychain};
use self::core::core::{Output, OutputFeatures};
use self::core::libtx::proof;
use self::core::ser;
use self::keychain::{ExtKeychain, Keychain};
use grin_core as core;
use grin_keychain as keychain;
#[test]
fn test_output_ser_deser() {

View file

@ -12,15 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate croaring;
use self::core::core::hash::Hash;
use self::core::core::pmmr::{self, Backend};
use self::core::core::BlockHeader;
use self::core::ser;
use self::core::ser::{FixedLength, PMMRable, Readable, Reader, Writeable, Writer};
use croaring;
use croaring::Bitmap;
use core::core::hash::Hash;
use core::core::pmmr::{self, Backend};
use core::core::BlockHeader;
use core::ser;
use core::ser::{FixedLength, PMMRable, Readable, Reader, Writeable, Writer};
use grin_core as core;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct TestElem(pub [u32; 4]);
@ -39,15 +38,15 @@ impl PMMRable for TestElem {
impl Writeable for TestElem {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
try!(writer.write_u32(self.0[0]));
try!(writer.write_u32(self.0[1]));
try!(writer.write_u32(self.0[2]));
r#try!(writer.write_u32(self.0[0]));
r#try!(writer.write_u32(self.0[1]));
r#try!(writer.write_u32(self.0[2]));
writer.write_u32(self.0[3])
}
}
impl Readable for TestElem {
fn read(reader: &mut Reader) -> Result<TestElem, ser::Error> {
fn read(reader: &mut dyn Reader) -> Result<TestElem, ser::Error> {
Ok(TestElem([
reader.read_u32()?,
reader.read_u32()?,

View file

@ -12,22 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate chrono;
extern crate grin_core;
extern crate grin_keychain as keychain;
extern crate grin_util as util;
use std::sync::Arc;
use util::RwLock;
pub mod common;
use grin_core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use grin_core::core::{Output, OutputFeatures};
use grin_core::libtx::proof;
use keychain::{ExtKeychain, Keychain};
use self::core::core::verifier_cache::{LruVerifierCache, VerifierCache};
use self::core::core::{Output, OutputFeatures};
use self::core::libtx::proof;
use self::keychain::{ExtKeychain, Keychain};
use self::util::RwLock;
use grin_core as core;
use grin_keychain as keychain;
use grin_util as util;
use std::sync::Arc;
fn verifier_cache() -> Arc<RwLock<VerifierCache>> {
fn verifier_cache() -> Arc<RwLock<dyn VerifierCache>> {
Arc::new(RwLock::new(LruVerifierCache::new()))
}

View file

@ -12,7 +12,7 @@ What's working so far?
## Requirements
* rust 1.30+ (use [rustup]((https://www.rustup.rs/))- i.e. `curl https://sh.rustup.rs -sSf | sh; source $HOME/.cargo/env`)
* rust 1.31+ (use [rustup]((https://www.rustup.rs/))- i.e. `curl https://sh.rustup.rs -sSf | sh; source $HOME/.cargo/env`)
* if rust is already installed, you can simply update version with `rustup update`
* clang
* ncurses and libs (ncurses, ncursesw5)

View file

@ -12,7 +12,7 @@ El lenguaje de programación de Grin `rust` ha compilado metas para la mayoría
## Requisitos
* rust 1.30+ (usa [rustup]((https://www.rustup.rs/))- por ejemplo, `curl https://sh.rustup.rs -sSf | sh; source $HOME/.cargo/env`)
* rust 1.31+ (usa [rustup]((https://www.rustup.rs/))- por ejemplo, `curl https://sh.rustup.rs -sSf | sh; source $HOME/.cargo/env`)
* Si rust está instalado, puede simplemente actualizar la versión con `rustup update`
* clang
* ncurses y libs (ncurses, ncursesw5)

View file

@ -1,7 +1,7 @@
# Multistage docker build, requires docker 17.05
# builder stage
FROM rust:1.30.1 as builder
FROM rust:1.31 as builder
RUN set -ex && \
apt-get update && \

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = '..'
edition = "2018"
[dependencies]
byteorder = "1"

View file

@ -71,7 +71,7 @@ pub enum Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::BadByte(b) => write!(f, "invalid base58 character 0x{:x}", b),
Error::BadChecksum(exp, actual) => write!(
@ -90,7 +90,7 @@ impl fmt::Display for Error {
}
impl error::Error for Error {
fn cause(&self) -> Option<&error::Error> {
fn cause(&self) -> Option<&dyn error::Error> {
None
}
fn description(&self) -> &'static str {
@ -342,7 +342,7 @@ where
}
/// Directly encode a slice as base58 into a `Formatter`.
fn _encode_iter_to_fmt<I>(fmt: &mut fmt::Formatter, data: I) -> fmt::Result
fn _encode_iter_to_fmt<I>(fmt: &mut fmt::Formatter<'_>, data: I) -> fmt::Result
where
I: Iterator<Item = u8> + Clone,
{
@ -364,7 +364,7 @@ pub fn check_encode_slice(data: &[u8]) -> String {
/// Obtain a string with the base58check encoding of a slice
/// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.)
pub fn _check_encode_slice_to_fmt(fmt: &mut fmt::Formatter, data: &[u8]) -> fmt::Result {
pub fn _check_encode_slice_to_fmt(fmt: &mut fmt::Formatter<'_>, data: &[u8]) -> fmt::Result {
let checksum = sha256d_hash(&data);
let iter = data.iter().cloned().chain(checksum[0..4].iter().cloned());
_encode_iter_to_fmt(fmt, iter)
@ -373,7 +373,7 @@ pub fn _check_encode_slice_to_fmt(fmt: &mut fmt::Formatter, data: &[u8]) -> fmt:
#[cfg(test)]
mod tests {
use super::*;
use util::from_hex;
use crate::util::from_hex;
#[test]
fn test_base58_encode() {

View file

@ -37,10 +37,10 @@ use std::io::Cursor;
use std::str::FromStr;
use std::{error, fmt};
use crate::mnemonic;
use crate::util::secp::key::{PublicKey, SecretKey};
use crate::util::secp::{self, ContextFlag, Secp256k1};
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
use mnemonic;
use util::secp::key::{PublicKey, SecretKey};
use util::secp::{self, ContextFlag, Secp256k1};
use digest::generic_array::GenericArray;
use digest::Digest;
@ -48,7 +48,7 @@ use hmac::{Hmac, Mac};
use ripemd160::Ripemd160;
use sha2::{Sha256, Sha512};
use base58;
use crate::base58;
// Create alias for HMAC-SHA256
type HmacSha512 = Hmac<Sha512>;
@ -259,7 +259,7 @@ impl From<ChildNumber> for u32 {
}
impl fmt::Display for ChildNumber {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
ChildNumber::Hardened { index } => write!(f, "{}'", index),
ChildNumber::Normal { index } => write!(f, "{}", index),
@ -303,7 +303,7 @@ pub enum Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::CannotDeriveFromHardenedKey => {
f.write_str("cannot derive hardened key from public key")
@ -317,7 +317,7 @@ impl fmt::Display for Error {
}
impl error::Error for Error {
fn cause(&self) -> Option<&error::Error> {
fn cause(&self) -> Option<&dyn error::Error> {
if let Error::Ecdsa(ref e) = *self {
Some(e)
} else {
@ -377,7 +377,7 @@ impl ExtendedPrivKey {
Err(e) => return Err(Error::MnemonicError(e)),
};
let mut hasher = BIP32GrinHasher::new();
let key = try!(ExtendedPrivKey::new_master(secp, &mut hasher, &seed));
let key = r#try!(ExtendedPrivKey::new_master(secp, &mut hasher, &seed));
Ok(key)
}
@ -429,7 +429,8 @@ impl ExtendedPrivKey {
hasher.append_sha512(&be_n);
let result = hasher.result_sha512();
let mut sk = SecretKey::from_slice(secp, &result[..32]).map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key).map_err(Error::Ecdsa)?;
sk.add_assign(secp, &self.secret_key)
.map_err(Error::Ecdsa)?;
Ok(ExtendedPrivKey {
network: self.network,
@ -518,7 +519,7 @@ impl ExtendedPubKey {
BigEndian::write_u32(&mut be_n, n);
hasher.append_sha512(&be_n);
let mut result = hasher.result_sha512();
let result = hasher.result_sha512();
let secret_key = SecretKey::from_slice(secp, &result[..32])?;
let chain_code = ChainCode::from(&result[32..]);
@ -574,7 +575,7 @@ impl ExtendedPubKey {
}
impl fmt::Display for ExtendedPrivKey {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut ret = [0; 78];
ret[0..4].copy_from_slice(&self.network[0..4]);
ret[4] = self.depth as u8;
@ -619,7 +620,7 @@ impl FromStr for ExtendedPrivKey {
}
impl fmt::Display for ExtendedPubKey {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let secp = Secp256k1::without_caps();
let mut ret = [0; 78];
ret[0..4].copy_from_slice(&self.network[0..4]);
@ -669,8 +670,8 @@ mod tests {
use std::str::FromStr;
use std::string::ToString;
use util::from_hex;
use util::secp::Secp256k1;
use crate::util::from_hex;
use crate::util::secp::Secp256k1;
use super::*;

View file

@ -17,13 +17,13 @@
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use blake2;
use crate::blake2;
use extkey_bip32::{BIP32GrinHasher, ExtendedPrivKey};
use types::{BlindSum, BlindingFactor, Error, ExtKeychainPath, Identifier, Keychain};
use util::secp::key::SecretKey;
use util::secp::pedersen::Commitment;
use util::secp::{self, Message, Secp256k1, Signature};
use crate::extkey_bip32::{BIP32GrinHasher, ExtendedPrivKey};
use crate::types::{BlindSum, BlindingFactor, Error, ExtKeychainPath, Identifier, Keychain};
use crate::util::secp::key::SecretKey;
use crate::util::secp::pedersen::Commitment;
use crate::util::secp::{self, Message, Secp256k1, Signature};
#[derive(Clone, Debug)]
pub struct ExtKeychain {
@ -95,7 +95,8 @@ impl Keychain for ExtKeychain {
} else {
None
}
}).collect();
})
.collect();
let mut neg_keys: Vec<SecretKey> = blind_sum
.negative_key_ids
@ -107,7 +108,8 @@ impl Keychain for ExtKeychain {
} else {
None
}
}).collect();
})
.collect();
pos_keys.extend(
&blind_sum
@ -152,10 +154,10 @@ impl Keychain for ExtKeychain {
#[cfg(test)]
mod test {
use keychain::ExtKeychain;
use types::{BlindSum, BlindingFactor, ExtKeychainPath, Keychain};
use util::secp;
use util::secp::key::SecretKey;
use crate::keychain::ExtKeychain;
use crate::types::{BlindSum, BlindingFactor, ExtKeychainPath, Keychain};
use crate::util::secp;
use crate::util::secp::key::SecretKey;
#[test]
fn test_key_derivation() {
@ -191,7 +193,8 @@ mod test {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
).unwrap();
)
.unwrap();
let skey2 = SecretKey::from_slice(
&keychain.secp,
@ -199,7 +202,8 @@ mod test {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 2,
],
).unwrap();
)
.unwrap();
// adding secret keys 1 and 2 to give secret key 3
let mut skey3 = skey1.clone();
@ -228,7 +232,8 @@ mod test {
&BlindSum::new()
.add_blinding_factor(BlindingFactor::from_secret_key(skey1))
.add_blinding_factor(BlindingFactor::from_secret_key(skey2))
).unwrap(),
)
.unwrap(),
BlindingFactor::from_secret_key(skey3),
);
}

View file

@ -14,22 +14,14 @@
//! Library module for the key holder functionalities provided by Grin.
extern crate blake2_rfc as blake2;
extern crate byteorder;
use blake2_rfc as blake2;
#[macro_use]
extern crate grin_util as util;
extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate digest;
extern crate hmac;
extern crate log;
extern crate pbkdf2;
extern crate ripemd160;
extern crate serde_json;
extern crate sha2;
extern crate uuid;
#[macro_use]
extern crate lazy_static;
@ -39,8 +31,8 @@ pub mod mnemonic;
mod types;
pub mod keychain;
pub use extkey_bip32::ChildNumber;
pub use keychain::ExtKeychain;
pub use types::{
pub use crate::extkey_bip32::ChildNumber;
pub use crate::keychain::ExtKeychain;
pub use crate::types::{
BlindSum, BlindingFactor, Error, ExtKeychainPath, Identifier, Keychain, IDENTIFIER_SIZE,
};

View file

@ -40,7 +40,7 @@ pub enum Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::BadWord(ref b) => write!(f, "invalid bip39 word {}", b),
Error::BadChecksum(exp, actual) => write!(
@ -72,7 +72,7 @@ pub fn to_entropy(mnemonic: &str) -> Result<Vec<u8>, Error> {
}
// u11 vector of indexes for each word
let mut indexes: Vec<u16> = try!(words.iter().map(|x| search(x)).collect());
let mut indexes: Vec<u16> = r#try!(words.iter().map(|x| search(x)).collect());
let checksum_bits = words.len() / 3;
let mask = ((1 << checksum_bits) - 1) as u8;
let last = indexes.pop().unwrap();
@ -155,7 +155,7 @@ where
Option<&'a str>: From<T>,
{
// make sure the mnemonic is valid
try!(to_entropy(mnemonic));
r#try!(to_entropy(mnemonic));
let salt = ("mnemonic".to_owned() + Option::from(passphrase).unwrap_or("")).into_bytes();
let data = mnemonic.as_bytes();
@ -169,8 +169,8 @@ where
#[cfg(test)]
mod tests {
use super::{from_entropy, to_entropy, to_seed};
use crate::util::{from_hex, to_hex};
use rand::{thread_rng, Rng};
use util::{from_hex, to_hex};
struct Test<'a> {
mnemonic: &'a str,

View file

@ -22,16 +22,16 @@ use std::ops::Add;
/// commitment generation.
use std::{error, fmt};
use blake2::blake2b::blake2b;
use extkey_bip32::{self, ChildNumber, ExtendedPrivKey};
use crate::blake2::blake2b::blake2b;
use crate::extkey_bip32::{self, ChildNumber, ExtendedPrivKey};
use serde::{de, ser}; //TODO: Convert errors to use ErrorKind
use util;
use util::secp::constants::SECRET_KEY_SIZE;
use util::secp::key::{PublicKey, SecretKey};
use util::secp::pedersen::Commitment;
use util::secp::{self, Message, Secp256k1, Signature};
use util::static_secp_instance;
use crate::util;
use crate::util::secp::constants::SECRET_KEY_SIZE;
use crate::util::secp::key::{PublicKey, SecretKey};
use crate::util::secp::pedersen::Commitment;
use crate::util::secp::{self, Message, Secp256k1, Signature};
use crate::util::static_secp_instance;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
@ -67,7 +67,7 @@ impl error::Error for Error {
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
_ => write!(f, "some kind of keychain error"),
}
@ -100,7 +100,7 @@ struct IdentifierVisitor;
impl<'de> de::Visitor<'de> for IdentifierVisitor {
type Value = Identifier;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("an identifier")
}
@ -206,15 +206,15 @@ impl AsRef<[u8]> for Identifier {
}
impl ::std::fmt::Debug for Identifier {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
try!(write!(f, "{}(", stringify!(Identifier)));
try!(write!(f, "{}", self.to_hex()));
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
r#try!(write!(f, "{}(", stringify!(Identifier)));
r#try!(write!(f, "{}", self.to_hex()));
write!(f, ")")
}
}
impl fmt::Display for Identifier {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.to_hex())
}
}
@ -223,7 +223,7 @@ impl fmt::Display for Identifier {
pub struct BlindingFactor([u8; SECRET_KEY_SIZE]);
impl fmt::Debug for BlindingFactor {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.to_hex())
}
}
@ -440,7 +440,7 @@ pub trait Keychain: Sync + Send + Clone {
fn commit(&self, amount: u64, id: &Identifier) -> Result<Commitment, Error>;
fn blind_sum(&self, blind_sum: &BlindSum) -> Result<BlindingFactor, Error>;
fn sign(&self, msg: &Message, id: &Identifier) -> Result<Signature, Error>;
fn sign_with_blinding(&self, &Message, &BlindingFactor) -> Result<Signature, Error>;
fn sign_with_blinding(&self, _: &Message, _: &BlindingFactor) -> Result<Signature, Error>;
fn secp(&self) -> &Secp256k1;
}
@ -448,9 +448,9 @@ pub trait Keychain: Sync + Send + Clone {
mod test {
use rand::thread_rng;
use types::{BlindingFactor, ExtKeychainPath, Identifier};
use util::secp::key::{SecretKey, ZERO_KEY};
use util::secp::Secp256k1;
use crate::types::{BlindingFactor, ExtKeychainPath, Identifier};
use crate::util::secp::key::{SecretKey, ZERO_KEY};
use crate::util::secp::Secp256k1;
#[test]
fn split_blinding_factor() {

View file

@ -7,6 +7,7 @@ license = "Apache-2.0"
repository = "https://github.com/mimblewimble/grin"
keywords = [ "crypto", "grin", "mimblewimble" ]
workspace = ".."
edition = "2018"
[dependencies]
bitflags = "1"

View file

@ -26,12 +26,12 @@ use std::net::TcpStream;
use std::sync::{mpsc, Arc};
use std::{cmp, thread, time};
use core::ser;
use core::ser::FixedLength;
use msg::{read_body, read_header, read_item, write_to_buf, MsgHeader, Type};
use types::Error;
use util::read_write::{read_exact, write_all};
use util::{RateCounter, RwLock};
use crate::core::ser;
use crate::core::ser::FixedLength;
use crate::msg::{read_body, read_header, read_item, write_to_buf, MsgHeader, Type};
use crate::types::Error;
use crate::util::read_write::{read_exact, write_all};
use crate::util::{RateCounter, RwLock};
/// A trait to be implemented in order to receive messages from the
/// connection. Allows providing an optional response.
@ -39,7 +39,7 @@ pub trait MessageHandler: Send + 'static {
fn consume<'a>(
&self,
msg: Message<'a>,
writer: &'a mut Write,
writer: &'a mut dyn Write,
received_bytes: Arc<RwLock<RateCounter>>,
) -> Result<Option<Response<'a>>, Error>;
}
@ -63,11 +63,11 @@ macro_rules! try_break {
/// header lazily consumes the message body, handling its deserialization.
pub struct Message<'a> {
pub header: MsgHeader,
stream: &'a mut Read,
stream: &'a mut dyn Read,
}
impl<'a> Message<'a> {
fn from_header(header: MsgHeader, stream: &'a mut Read) -> Message<'a> {
fn from_header(header: MsgHeader, stream: &'a mut dyn Read) -> Message<'a> {
Message { header, stream }
}
@ -82,7 +82,7 @@ impl<'a> Message<'a> {
read_item(self.stream)
}
pub fn copy_attachment(&mut self, len: usize, writer: &mut Write) -> Result<usize, Error> {
pub fn copy_attachment(&mut self, len: usize, writer: &mut dyn Write) -> Result<usize, Error> {
let mut written = 0;
while written < len {
let read_len = cmp::min(8000, len - written);
@ -104,12 +104,16 @@ impl<'a> Message<'a> {
pub struct Response<'a> {
resp_type: Type,
body: Vec<u8>,
stream: &'a mut Write,
stream: &'a mut dyn Write,
attachment: Option<File>,
}
impl<'a> Response<'a> {
pub fn new<T: ser::Writeable>(resp_type: Type, body: T, stream: &'a mut Write) -> Response<'a> {
pub fn new<T: ser::Writeable>(
resp_type: Type,
body: T,
stream: &'a mut dyn Write,
) -> Response<'a> {
let body = ser::ser_vec(&body).unwrap();
Response {
resp_type,

Some files were not shown because too many files have changed in this diff Show more