mirror of
https://github.com/mimblewimble/grin.git
synced 2025-05-07 01:31:15 +03:00
process restart
This commit is contained in:
parent
5dcbe96d5c
commit
0a0704540f
6 changed files with 76 additions and 22 deletions
chain
servers/src/grin/sync
|
@ -158,7 +158,7 @@ pub struct Chain {
|
|||
pow_verifier: fn(&BlockHeader) -> Result<(), pow::Error>,
|
||||
denylist: Arc<RwLock<Vec<Hash>>>,
|
||||
archive_mode: bool,
|
||||
genesis: BlockHeader,
|
||||
genesis: Block,
|
||||
}
|
||||
|
||||
impl Chain {
|
||||
|
@ -184,7 +184,7 @@ impl Chain {
|
|||
None,
|
||||
)?;
|
||||
|
||||
setup_head(&genesis, &store, &mut header_pmmr, &mut txhashset)?;
|
||||
setup_head(&genesis, &store, &mut header_pmmr, &mut txhashset, false)?;
|
||||
|
||||
// Initialize the output_pos index based on UTXO set
|
||||
// and NRD kernel_pos index based recent kernel history.
|
||||
|
@ -207,7 +207,7 @@ impl Chain {
|
|||
pow_verifier,
|
||||
denylist: Arc::new(RwLock::new(vec![])),
|
||||
archive_mode,
|
||||
genesis: genesis.header,
|
||||
genesis: genesis,
|
||||
};
|
||||
|
||||
chain.log_heads()?;
|
||||
|
@ -242,6 +242,7 @@ impl Chain {
|
|||
|
||||
let header = batch.get_block_header(&head.hash())?;
|
||||
|
||||
error!("RESET CHAIN HEAD");
|
||||
// Rewind and reapply blocks to reset the output/rangeproof/kernel MMR.
|
||||
txhashset::extending(
|
||||
&mut header_pmmr,
|
||||
|
@ -269,6 +270,34 @@ impl Chain {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// wipes the chain head down to genesis, without attempting to rewind
|
||||
/// Used upon PIBD failure, where you want to keep the header chain but
|
||||
/// restart the MMRs from scratch
|
||||
pub fn reset_chain_head_to_genesis(&self) -> Result<(), Error> {
|
||||
error!("RESET CHAIN HEAD TO GENESIS");
|
||||
let mut header_pmmr = self.header_pmmr.write();
|
||||
let mut txhashset = self.txhashset.write();
|
||||
let batch = self.store.batch()?;
|
||||
|
||||
// Change head back to genesis
|
||||
{
|
||||
let head = Tip::from_header(&self.genesis.header);
|
||||
batch.save_body_head(&head)?;
|
||||
batch.commit()?;
|
||||
}
|
||||
|
||||
// Reinit
|
||||
setup_head(
|
||||
&self.genesis,
|
||||
&self.store,
|
||||
&mut header_pmmr,
|
||||
&mut txhashset,
|
||||
true,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset prune lists (when PIBD resets and rolls back the
|
||||
/// entire chain, the prune list needs to be manually wiped
|
||||
/// as it's currently not included as part of rewind)
|
||||
|
@ -309,7 +338,7 @@ impl Chain {
|
|||
|
||||
/// return genesis header
|
||||
pub fn genesis(&self) -> BlockHeader {
|
||||
self.genesis.clone()
|
||||
self.genesis.header.clone()
|
||||
}
|
||||
|
||||
/// Shared store instance.
|
||||
|
@ -702,8 +731,9 @@ impl Chain {
|
|||
// ensure the view is consistent.
|
||||
txhashset::extending_readonly(&mut header_pmmr, &mut txhashset, |ext, batch| {
|
||||
self.rewind_and_apply_fork(&header, ext, batch)?;
|
||||
error!("VALIDATE");
|
||||
ext.extension.validate(
|
||||
&self.genesis,
|
||||
&self.genesis.header,
|
||||
fast_validation,
|
||||
&NoStatus,
|
||||
None,
|
||||
|
@ -739,6 +769,7 @@ impl Chain {
|
|||
|
||||
let (prev_root, roots, sizes) =
|
||||
txhashset::extending_readonly(&mut header_pmmr, &mut txhashset, |ext, batch| {
|
||||
error!("SETTING ROOTS");
|
||||
let previous_header = batch.get_previous_header(&b.header)?;
|
||||
self.rewind_and_apply_fork(&previous_header, ext, batch)?;
|
||||
|
||||
|
@ -783,6 +814,7 @@ impl Chain {
|
|||
) -> Result<MerkleProof, Error> {
|
||||
let mut header_pmmr = self.header_pmmr.write();
|
||||
let mut txhashset = self.txhashset.write();
|
||||
error!("GET MERKLE PROOF");
|
||||
let merkle_proof =
|
||||
txhashset::extending_readonly(&mut header_pmmr, &mut txhashset, |ext, batch| {
|
||||
self.rewind_and_apply_fork(&header, ext, batch)?;
|
||||
|
@ -808,6 +840,7 @@ impl Chain {
|
|||
batch: &Batch,
|
||||
) -> Result<BlockHeader, Error> {
|
||||
let denylist = self.denylist.read().clone();
|
||||
error!("REWIND INNER");
|
||||
pipe::rewind_and_apply_fork(header, ext, batch, &|header| {
|
||||
pipe::validate_header_denylist(header, &denylist)
|
||||
})
|
||||
|
@ -842,6 +875,7 @@ impl Chain {
|
|||
let mut txhashset = self.txhashset.write();
|
||||
|
||||
txhashset::extending_readonly(&mut header_pmmr, &mut txhashset, |ext, batch| {
|
||||
error!("HASHSET READ");
|
||||
self.rewind_and_apply_fork(&header, ext, batch)?;
|
||||
ext.extension.snapshot(batch)?;
|
||||
|
||||
|
@ -943,7 +977,7 @@ impl Chain {
|
|||
self.txhashset(),
|
||||
self.header_pmmr.clone(),
|
||||
header.clone(),
|
||||
self.genesis.clone(),
|
||||
self.genesis.header.clone(),
|
||||
self.store.clone(),
|
||||
))
|
||||
}
|
||||
|
@ -1125,7 +1159,13 @@ impl Chain {
|
|||
|
||||
let header_pmmr = self.header_pmmr.read();
|
||||
let batch = self.store.batch()?;
|
||||
txhashset.verify_kernel_pos_index(&self.genesis, &header_pmmr, &batch, None, None)?;
|
||||
txhashset.verify_kernel_pos_index(
|
||||
&self.genesis.header,
|
||||
&header_pmmr,
|
||||
&batch,
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
|
||||
// all good, prepare a new batch and update all the required records
|
||||
|
@ -1143,8 +1183,15 @@ impl Chain {
|
|||
|
||||
// Validate the extension, generating the utxo_sum and kernel_sum.
|
||||
// Full validation, including rangeproofs and kernel signature verification.
|
||||
let (utxo_sum, kernel_sum) =
|
||||
extension.validate(&self.genesis, false, status, None, None, &header, None)?;
|
||||
let (utxo_sum, kernel_sum) = extension.validate(
|
||||
&self.genesis.header,
|
||||
false,
|
||||
status,
|
||||
None,
|
||||
None,
|
||||
&header,
|
||||
None,
|
||||
)?;
|
||||
|
||||
// Save the block_sums (utxo_sum, kernel_sum) to the db for use later.
|
||||
batch.save_block_sums(
|
||||
|
@ -1231,7 +1278,7 @@ impl Chain {
|
|||
|
||||
let tail = match batch.tail() {
|
||||
Ok(tail) => tail,
|
||||
Err(_) => Tip::from_header(&self.genesis),
|
||||
Err(_) => Tip::from_header(&self.genesis.header),
|
||||
};
|
||||
|
||||
let mut cutoff = head.height.saturating_sub(horizon);
|
||||
|
@ -1643,6 +1690,7 @@ fn setup_head(
|
|||
store: &store::ChainStore,
|
||||
header_pmmr: &mut txhashset::PMMRHandle<BlockHeader>,
|
||||
txhashset: &mut txhashset::TxHashSet,
|
||||
resetting_pibd: bool,
|
||||
) -> Result<(), Error> {
|
||||
let mut batch = store.batch()?;
|
||||
|
||||
|
@ -1689,7 +1737,7 @@ fn setup_head(
|
|||
let head = batch.get_block_header(&head.last_block_h)?;
|
||||
let pibd_tip = store.pibd_head()?;
|
||||
let pibd_head = batch.get_block_header(&pibd_tip.last_block_h)?;
|
||||
if pibd_head.height > head.height {
|
||||
if pibd_head.height > head.height && !resetting_pibd {
|
||||
pibd_in_progress = true;
|
||||
pibd_head
|
||||
} else {
|
||||
|
@ -1709,6 +1757,8 @@ fn setup_head(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
debug!("SETTING UP HEAD 1");
|
||||
|
||||
pipe::rewind_and_apply_fork(&header, ext, batch, &|_| Ok(()))?;
|
||||
|
||||
let extension = &mut ext.extension;
|
||||
|
@ -1756,6 +1806,7 @@ fn setup_head(
|
|||
// delete the "bad" block and try again.
|
||||
let prev_header = batch.get_block_header(&head.prev_block_h)?;
|
||||
|
||||
error!("SETTING UP HEAD 2");
|
||||
txhashset::extending(header_pmmr, txhashset, &mut batch, |ext, batch| {
|
||||
pipe::rewind_and_apply_fork(&prev_header, ext, batch, &|_| Ok(()))
|
||||
})?;
|
||||
|
|
|
@ -122,6 +122,7 @@ pub fn process_block(
|
|||
let txhashset = &mut ctx.txhashset;
|
||||
let batch = &mut ctx.batch;
|
||||
let ctx_specific_validation = &ctx.header_allowed;
|
||||
error!("PROCESSING BLOCK?");
|
||||
let fork_point = txhashset::extending(header_pmmr, txhashset, batch, |ext, batch| {
|
||||
let fork_point = rewind_and_apply_fork(&prev, ext, batch, ctx_specific_validation)?;
|
||||
|
||||
|
@ -630,6 +631,7 @@ pub fn rewind_and_apply_fork(
|
|||
while current.height > 0 && !header_extension.is_on_current_chain(¤t, batch)? {
|
||||
current = batch.get_previous_header(¤t)?;
|
||||
}
|
||||
error!("REWIND AND APPLY FORK!");
|
||||
let fork_point = current;
|
||||
extension.rewind(&fork_point, batch)?;
|
||||
|
||||
|
|
|
@ -237,7 +237,7 @@ impl Desegmenter {
|
|||
// Quick root check first:
|
||||
{
|
||||
let txhashset = self.txhashset.read();
|
||||
txhashset.roots().validate(&self.archive_header)?;
|
||||
txhashset.roots()?.validate(&self.archive_header)?;
|
||||
}
|
||||
|
||||
// TODO: Possibly Keep track of this in the DB so we can pick up where we left off if needed
|
||||
|
|
|
@ -479,19 +479,19 @@ impl TxHashSet {
|
|||
}
|
||||
|
||||
/// Get MMR roots.
|
||||
pub fn roots(&self) -> TxHashSetRoots {
|
||||
pub fn roots(&self) -> Result<TxHashSetRoots, Error> {
|
||||
let output_pmmr = ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.size);
|
||||
let rproof_pmmr = ReadonlyPMMR::at(&self.rproof_pmmr_h.backend, self.rproof_pmmr_h.size);
|
||||
let kernel_pmmr = ReadonlyPMMR::at(&self.kernel_pmmr_h.backend, self.kernel_pmmr_h.size);
|
||||
|
||||
TxHashSetRoots {
|
||||
Ok(TxHashSetRoots {
|
||||
output_roots: OutputRoots {
|
||||
pmmr_root: output_pmmr.root().expect("no root, invalid tree"),
|
||||
pmmr_root: output_pmmr.root().map_err(|_| Error::InvalidRoot)?,
|
||||
bitmap_root: self.bitmap_accumulator.root(),
|
||||
},
|
||||
rproof_root: rproof_pmmr.root().expect("no root, invalid tree"),
|
||||
kernel_root: kernel_pmmr.root().expect("no root, invalid tree"),
|
||||
}
|
||||
rproof_root: rproof_pmmr.root().map_err(|_| Error::InvalidRoot)?,
|
||||
kernel_root: kernel_pmmr.root().map_err(|_| Error::InvalidRoot)?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return Commit's MMR position
|
||||
|
@ -1606,6 +1606,7 @@ impl<'a> Extension<'a> {
|
|||
} else {
|
||||
let mut affected_pos = vec![];
|
||||
let mut current = head_header;
|
||||
error!("REWINDING TWO");
|
||||
while header.height < current.height {
|
||||
let block = batch.get_block(¤t.hash())?;
|
||||
let mut affected_pos_single_block = self.rewind_single_block(&block, batch)?;
|
||||
|
|
|
@ -238,7 +238,7 @@ impl DesegmenterRequestor {
|
|||
}
|
||||
|
||||
pub fn check_roots(&self) {
|
||||
let roots = self.chain.txhashset().read().roots();
|
||||
let roots = self.chain.txhashset().read().roots().unwrap();
|
||||
let archive_header = self.chain.txhashset_archive_header_header_only().unwrap();
|
||||
debug!("Archive Header is {:?}", archive_header);
|
||||
debug!("TXHashset output root is {:?}", roots);
|
||||
|
|
|
@ -111,12 +111,12 @@ impl StateSync {
|
|||
if let Some(d) = desegmenter.write().as_mut() {
|
||||
d.reset();
|
||||
};
|
||||
if let Err(e) = self.chain.reset_chain_head(self.chain.genesis(), false) {
|
||||
error!("pibd_sync restart: chain reset error = {}", e);
|
||||
}
|
||||
if let Err(e) = self.chain.reset_pibd_head() {
|
||||
error!("pibd_sync restart: reset pibd_head error = {}", e);
|
||||
}
|
||||
if let Err(e) = self.chain.reset_chain_head_to_genesis() {
|
||||
error!("pibd_sync restart: chain reset to genesis error = {}", e);
|
||||
}
|
||||
if let Err(e) = self.chain.reset_prune_lists() {
|
||||
error!("pibd_sync restart: reset prune lists error = {}", e);
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue