mirror of
https://github.com/mimblewimble/grin.git
synced 2025-01-21 03:21:08 +03:00
Reduce number of unwraps in chain crate (#2679)
This commit is contained in:
parent
45d5686310
commit
dc59f67c7b
12 changed files with 72 additions and 45 deletions
|
@ -765,11 +765,15 @@ impl Chain {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check chain status whether a txhashset downloading is needed
|
/// Check chain status whether a txhashset downloading is needed
|
||||||
pub fn check_txhashset_needed(&self, caller: String, hashes: &mut Option<Vec<Hash>>) -> bool {
|
pub fn check_txhashset_needed(
|
||||||
|
&self,
|
||||||
|
caller: String,
|
||||||
|
hashes: &mut Option<Vec<Hash>>,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
let horizon = global::cut_through_horizon() as u64;
|
let horizon = global::cut_through_horizon() as u64;
|
||||||
let body_head = self.head().unwrap();
|
let body_head = self.head()?;
|
||||||
let header_head = self.header_head().unwrap();
|
let header_head = self.header_head()?;
|
||||||
let sync_head = self.get_sync_head().unwrap();
|
let sync_head = self.get_sync_head()?;
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"{}: body_head - {}, {}, header_head - {}, {}, sync_head - {}, {}",
|
"{}: body_head - {}, {}, header_head - {}, {}, sync_head - {}, {}",
|
||||||
|
@ -787,7 +791,7 @@ impl Chain {
|
||||||
"{}: no need txhashset. header_head.total_difficulty: {} <= body_head.total_difficulty: {}",
|
"{}: no need txhashset. header_head.total_difficulty: {} <= body_head.total_difficulty: {}",
|
||||||
caller, header_head.total_difficulty, body_head.total_difficulty,
|
caller, header_head.total_difficulty, body_head.total_difficulty,
|
||||||
);
|
);
|
||||||
return false;
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut oldest_height = 0;
|
let mut oldest_height = 0;
|
||||||
|
@ -828,13 +832,14 @@ impl Chain {
|
||||||
"{}: need a state sync for txhashset. oldest block which is not on local chain: {} at {}",
|
"{}: need a state sync for txhashset. oldest block which is not on local chain: {} at {}",
|
||||||
caller, oldest_hash, oldest_height,
|
caller, oldest_hash, oldest_height,
|
||||||
);
|
);
|
||||||
return true;
|
Ok(true)
|
||||||
} else {
|
} else {
|
||||||
error!("{}: something is wrong! oldest_height is 0", caller);
|
error!("{}: something is wrong! oldest_height is 0", caller);
|
||||||
return false;
|
Ok(false)
|
||||||
};
|
}
|
||||||
|
} else {
|
||||||
|
Ok(false)
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes a reading view on a txhashset state that's been provided to us.
|
/// Writes a reading view on a txhashset state that's been provided to us.
|
||||||
|
@ -851,7 +856,7 @@ impl Chain {
|
||||||
|
|
||||||
// Initial check whether this txhashset is needed or not
|
// Initial check whether this txhashset is needed or not
|
||||||
let mut hashes: Option<Vec<Hash>> = None;
|
let mut hashes: Option<Vec<Hash>> = None;
|
||||||
if !self.check_txhashset_needed("txhashset_write".to_owned(), &mut hashes) {
|
if !self.check_txhashset_needed("txhashset_write".to_owned(), &mut hashes)? {
|
||||||
warn!("txhashset_write: txhashset received but it's not needed! ignored.");
|
warn!("txhashset_write: txhashset received but it's not needed! ignored.");
|
||||||
return Err(ErrorKind::InvalidTxHashSet("not needed".to_owned()).into());
|
return Err(ErrorKind::InvalidTxHashSet("not needed".to_owned()).into());
|
||||||
}
|
}
|
||||||
|
@ -1230,10 +1235,10 @@ impl Chain {
|
||||||
/// Builds an iterator on blocks starting from the current chain head and
|
/// Builds an iterator on blocks starting from the current chain head and
|
||||||
/// running backward. Specialized to return information pertaining to block
|
/// running backward. Specialized to return information pertaining to block
|
||||||
/// difficulty calculation (timestamp and previous difficulties).
|
/// difficulty calculation (timestamp and previous difficulties).
|
||||||
pub fn difficulty_iter(&self) -> store::DifficultyIter<'_> {
|
pub fn difficulty_iter(&self) -> Result<store::DifficultyIter<'_>, Error> {
|
||||||
let head = self.head().unwrap();
|
let head = self.head()?;
|
||||||
let store = self.store.clone();
|
let store = self.store.clone();
|
||||||
store::DifficultyIter::from(head.last_block_h, store)
|
Ok(store::DifficultyIter::from(head.last_block_h, store))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check whether we have a block without reading it
|
/// Check whether we have a block without reading it
|
||||||
|
|
|
@ -183,16 +183,21 @@ pub fn sync_block_headers(
|
||||||
headers: &[BlockHeader],
|
headers: &[BlockHeader],
|
||||||
ctx: &mut BlockContext<'_>,
|
ctx: &mut BlockContext<'_>,
|
||||||
) -> Result<Option<Tip>, Error> {
|
) -> Result<Option<Tip>, Error> {
|
||||||
if let Some(header) = headers.first() {
|
let first_header = match headers.first() {
|
||||||
debug!(
|
Some(header) => {
|
||||||
"pipe: sync_block_headers: {} headers from {} at {}",
|
debug!(
|
||||||
headers.len(),
|
"pipe: sync_block_headers: {} headers from {} at {}",
|
||||||
header.hash(),
|
headers.len(),
|
||||||
header.height,
|
header.hash(),
|
||||||
);
|
header.height,
|
||||||
} else {
|
);
|
||||||
return Ok(None);
|
header
|
||||||
}
|
}
|
||||||
|
None => {
|
||||||
|
error!("failed to get the first header");
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let all_known = if let Some(last_header) = headers.last() {
|
let all_known = if let Some(last_header) = headers.last() {
|
||||||
ctx.batch.get_block_header(&last_header.hash()).is_ok()
|
ctx.batch.get_block_header(&last_header.hash()).is_ok()
|
||||||
|
@ -201,7 +206,6 @@ pub fn sync_block_headers(
|
||||||
};
|
};
|
||||||
|
|
||||||
if !all_known {
|
if !all_known {
|
||||||
let first_header = headers.first().unwrap();
|
|
||||||
let prev_header = ctx.batch.get_previous_header(&first_header)?;
|
let prev_header = ctx.batch.get_previous_header(&first_header)?;
|
||||||
txhashset::sync_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
|
txhashset::sync_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
|
||||||
extension.rewind(&prev_header)?;
|
extension.rewind(&prev_header)?;
|
||||||
|
|
|
@ -272,7 +272,7 @@ impl<'a> Batch<'a> {
|
||||||
/// Clear all entries from the output_pos index (must be rebuilt after).
|
/// Clear all entries from the output_pos index (must be rebuilt after).
|
||||||
pub fn clear_output_pos(&self) -> Result<(), Error> {
|
pub fn clear_output_pos(&self) -> Result<(), Error> {
|
||||||
let key = to_key(COMMIT_POS_PREFIX, &mut "".to_string().into_bytes());
|
let key = to_key(COMMIT_POS_PREFIX, &mut "".to_string().into_bytes());
|
||||||
for (k, _) in self.db.iter::<u64>(&key).unwrap() {
|
for (k, _) in self.db.iter::<u64>(&key)? {
|
||||||
self.db.delete(&k)?;
|
self.db.delete(&k)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -66,7 +66,10 @@ impl<T: PMMRable> PMMRHandle<T> {
|
||||||
) -> Result<PMMRHandle<T>, Error> {
|
) -> Result<PMMRHandle<T>, Error> {
|
||||||
let path = Path::new(root_dir).join(sub_dir).join(file_name);
|
let path = Path::new(root_dir).join(sub_dir).join(file_name);
|
||||||
fs::create_dir_all(path.clone())?;
|
fs::create_dir_all(path.clone())?;
|
||||||
let backend = PMMRBackend::new(path.to_str().unwrap().to_string(), prunable, header)?;
|
let path_str = path.to_str().ok_or(Error::from(ErrorKind::Other(
|
||||||
|
"invalid file path".to_owned(),
|
||||||
|
)))?;
|
||||||
|
let backend = PMMRBackend::new(path_str.to_string(), prunable, header)?;
|
||||||
let last_pos = backend.unpruned_size();
|
let last_pos = backend.unpruned_size();
|
||||||
Ok(PMMRHandle { backend, last_pos })
|
Ok(PMMRHandle { backend, last_pos })
|
||||||
}
|
}
|
||||||
|
@ -1470,7 +1473,7 @@ fn expected_file(path: &Path) -> bool {
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
)
|
)
|
||||||
.unwrap();
|
.expect("invalid txhashset regular expression");
|
||||||
}
|
}
|
||||||
RE.is_match(&s_path)
|
RE.is_match(&s_path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ fn data_files() {
|
||||||
|
|
||||||
for n in 1..4 {
|
for n in 1..4 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
||||||
let mut b =
|
let mut b =
|
||||||
|
|
|
@ -101,7 +101,7 @@ where
|
||||||
|
|
||||||
for n in 1..4 {
|
for n in 1..4 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(keychain, &pk, 0).unwrap();
|
let reward = libtx::reward::output(keychain, &pk, 0).unwrap();
|
||||||
let mut b =
|
let mut b =
|
||||||
|
@ -409,7 +409,7 @@ fn output_header_mappings() {
|
||||||
|
|
||||||
for n in 1..15 {
|
for n in 1..15 {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
let pk = ExtKeychainPath::new(1, n as u32, 0, 0, 0).to_identifier();
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
||||||
reward_outputs.push(reward.0.clone());
|
reward_outputs.push(reward.0.clone());
|
||||||
|
@ -545,7 +545,7 @@ fn actual_diff_iter_output() {
|
||||||
Arc::new(Mutex::new(StopState::new())),
|
Arc::new(Mutex::new(StopState::new())),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let iter = chain.difficulty_iter();
|
let iter = chain.difficulty_iter().unwrap();
|
||||||
let mut last_time = 0;
|
let mut last_time = 0;
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for elem in iter.into_iter() {
|
for elem in iter.into_iter() {
|
||||||
|
|
|
@ -66,7 +66,7 @@ fn test_coinbase_maturity() {
|
||||||
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
|
let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier();
|
||||||
let key_id4 = ExtKeychainPath::new(1, 4, 0, 0, 0).to_identifier();
|
let key_id4 = ExtKeychainPath::new(1, 4, 0, 0, 0).to_identifier();
|
||||||
|
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let reward = libtx::reward::output(&keychain, &key_id1, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id1, 0).unwrap();
|
||||||
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
|
@ -113,7 +113,7 @@ fn test_coinbase_maturity() {
|
||||||
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
||||||
let reward = libtx::reward::output(&keychain, &key_id3, fees).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id3, fees).unwrap();
|
||||||
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
let reward = libtx::reward::output(&keychain, &pk, 0).unwrap();
|
||||||
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, vec![], Difficulty::min(), reward).unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
block.header.timestamp = prev.timestamp + Duration::seconds(60);
|
||||||
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
block.header.pow.secondary_scaling = next_header_info.secondary_scaling;
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ fn test_coinbase_maturity() {
|
||||||
|
|
||||||
let txs = vec![coinbase_txn];
|
let txs = vec![coinbase_txn];
|
||||||
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
let fees = txs.iter().map(|tx| tx.fee()).sum();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let reward = libtx::reward::output(&keychain, &key_id4, fees).unwrap();
|
let reward = libtx::reward::output(&keychain, &key_id4, fees).unwrap();
|
||||||
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
|
let mut block = core::core::Block::new(&prev, txs, Difficulty::min(), reward).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -413,7 +413,7 @@ impl Server {
|
||||||
// for release
|
// for release
|
||||||
let diff_stats = {
|
let diff_stats = {
|
||||||
let last_blocks: Vec<consensus::HeaderInfo> =
|
let last_blocks: Vec<consensus::HeaderInfo> =
|
||||||
global::difficulty_data_to_vector(self.chain.difficulty_iter())
|
global::difficulty_data_to_vector(self.chain.difficulty_iter()?)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
|
|
@ -69,10 +69,17 @@ impl BodySync {
|
||||||
/// Return true if txhashset download is needed (when requested block is under the horizon).
|
/// Return true if txhashset download is needed (when requested block is under the horizon).
|
||||||
fn body_sync(&mut self) -> bool {
|
fn body_sync(&mut self) -> bool {
|
||||||
let mut hashes: Option<Vec<Hash>> = Some(vec![]);
|
let mut hashes: Option<Vec<Hash>> = Some(vec![]);
|
||||||
if self
|
let txhashset_needed = match self
|
||||||
.chain
|
.chain
|
||||||
.check_txhashset_needed("body_sync".to_owned(), &mut hashes)
|
.check_txhashset_needed("body_sync".to_owned(), &mut hashes)
|
||||||
{
|
{
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(e) => {
|
||||||
|
error!("body_sync: failed to call txhashset_needed: {:?}", e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if txhashset_needed {
|
||||||
debug!(
|
debug!(
|
||||||
"body_sync: cannot sync full blocks earlier than horizon. will request txhashset",
|
"body_sync: cannot sync full blocks earlier than horizon. will request txhashset",
|
||||||
);
|
);
|
||||||
|
|
|
@ -209,12 +209,20 @@ impl SyncRunner {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// sum the last 5 difficulties to give us the threshold
|
// sum the last 5 difficulties to give us the threshold
|
||||||
let threshold = self
|
let threshold = {
|
||||||
.chain
|
let diff_iter = match self.chain.difficulty_iter() {
|
||||||
.difficulty_iter()
|
Ok(v) => v,
|
||||||
.map(|x| x.difficulty)
|
Err(e) => {
|
||||||
.take(5)
|
error!("failed to get difficulty iterator: {:?}", e);
|
||||||
.fold(Difficulty::zero(), |sum, val| sum + val);
|
// we handle 0 height in the caller
|
||||||
|
return (false, 0);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
diff_iter
|
||||||
|
.map(|x| x.difficulty)
|
||||||
|
.take(5)
|
||||||
|
.fold(Difficulty::zero(), |sum, val| sum + val)
|
||||||
|
};
|
||||||
|
|
||||||
let peer_diff = peer_info.total_difficulty();
|
let peer_diff = peer_info.total_difficulty();
|
||||||
if peer_diff > local_diff.clone() + threshold.clone() {
|
if peer_diff > local_diff.clone() + threshold.clone() {
|
||||||
|
|
|
@ -105,7 +105,7 @@ fn build_block(
|
||||||
|
|
||||||
// Determine the difficulty our block should be at.
|
// Determine the difficulty our block should be at.
|
||||||
// Note: do not keep the difficulty_iter in scope (it has an active batch).
|
// Note: do not keep the difficulty_iter in scope (it has an active batch).
|
||||||
let difficulty = consensus::next_difficulty(head.height + 1, chain.difficulty_iter());
|
let difficulty = consensus::next_difficulty(head.height + 1, chain.difficulty_iter()?);
|
||||||
|
|
||||||
// Extract current "mineable" transactions from the pool.
|
// Extract current "mineable" transactions from the pool.
|
||||||
// If this fails for *any* reason then fallback to an empty vec of txs.
|
// If this fails for *any* reason then fallback to an empty vec of txs.
|
||||||
|
|
|
@ -83,7 +83,7 @@ fn get_outputs_by_pmmr_index_local(
|
||||||
/// Adds a block with a given reward to the chain and mines it
|
/// Adds a block with a given reward to the chain and mines it
|
||||||
pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbData) {
|
pub fn add_block_with_reward(chain: &Chain, txs: Vec<&Transaction>, reward: CbData) {
|
||||||
let prev = chain.head_header().unwrap();
|
let prev = chain.head_header().unwrap();
|
||||||
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter());
|
let next_header_info = consensus::next_difficulty(1, chain.difficulty_iter().unwrap());
|
||||||
let out_bin = util::from_hex(reward.output).unwrap();
|
let out_bin = util::from_hex(reward.output).unwrap();
|
||||||
let kern_bin = util::from_hex(reward.kernel).unwrap();
|
let kern_bin = util::from_hex(reward.kernel).unwrap();
|
||||||
let output = ser::deserialize(&mut &out_bin[..]).unwrap();
|
let output = ser::deserialize(&mut &out_bin[..]).unwrap();
|
||||||
|
|
Loading…
Reference in a new issue