go back to storing header_head in the db (#3268)

* go back to storing header_head in the db

* simplify and cleanup
This commit is contained in:
Antioch Peverell 2020-04-10 16:12:53 +01:00 committed by GitHub
parent 34ff103bb0
commit bab9f41343
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 85 additions and 79 deletions

View file

@ -1212,25 +1212,11 @@ impl Chain {
.map_err(|e| ErrorKind::StoreErr(e, "chain tail".to_owned()).into())
}
/// Tip (head) of the header chain if read lock can be acquired reasonably quickly.
/// Used by the TUI when updating stats to avoid locking the TUI up.
pub fn try_header_head(&self, timeout: Duration) -> Result<Option<Tip>, Error> {
self.header_pmmr
.try_read_for(timeout)
.map(|ref pmmr| self.read_header_head(pmmr).map(Some))
.unwrap_or(Ok(None))
}
/// Tip (head) of the header chain.
pub fn header_head(&self) -> Result<Tip, Error> {
self.read_header_head(&self.header_pmmr.read())
}
/// Read head from the provided PMMR handle.
fn read_header_head(&self, pmmr: &txhashset::PMMRHandle<BlockHeader>) -> Result<Tip, Error> {
let hash = pmmr.head_hash()?;
let header = self.store.get_block_header(&hash)?;
Ok(Tip::from_header(&header))
self.store
.header_head()
.map_err(|e| ErrorKind::StoreErr(e, "header head".to_owned()).into())
}
/// Block header for the chain head
@ -1422,9 +1408,12 @@ fn setup_head(
) -> Result<(), Error> {
let mut batch = store.batch()?;
// Apply the genesis header to header and sync MMRs to ensure they are non-empty.
// We read header_head and sync_head directly from the MMR and assume they are non-empty.
// Apply the genesis header to header and sync MMRs.
{
if batch.get_block_header(&genesis.hash()).is_err() {
batch.save_block_header(&genesis.header)?;
}
if header_pmmr.last_pos == 0 {
txhashset::header_extending(header_pmmr, &mut batch, |ext, _| {
ext.apply_header(&genesis.header)
@ -1438,6 +1427,14 @@ fn setup_head(
}
}
// Setup our header_head if we do not already have one.
// Migrating back to header_head in db and some nodes may note have one.
if batch.header_head().is_err() {
let hash = header_pmmr.head_hash()?;
let header = batch.get_block_header(&hash)?;
batch.save_header_head(&Tip::from_header(&header))?;
}
// check if we have a head in store, otherwise the genesis block is it
let head_res = batch.head();
let mut head: Tip;
@ -1518,7 +1515,6 @@ fn setup_head(
// Save the genesis header with a "zero" header_root.
// We will update this later once we have the correct header_root.
batch.save_block_header(&genesis.header)?;
batch.save_block(&genesis)?;
batch.save_spent_index(&genesis.hash(), &vec![])?;
batch.save_body_head(&Tip::from_header(&genesis.header))?;

View file

@ -215,6 +215,11 @@ pub fn sync_block_headers(
Ok(())
})?;
let header_head = ctx.batch.header_head()?;
if has_more_work(last_header, &header_head) {
update_header_head(&Tip::from_header(last_header), &mut ctx.batch)?;
}
Ok(())
}
@ -235,12 +240,7 @@ pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) ->
// If it does not increase total_difficulty beyond our current header_head
// then we can (re)accept this header and process the full block (or request it).
// This header is on a fork and we should still accept it as the fork may eventually win.
let header_head = {
let hash = ctx.header_pmmr.head_hash()?;
let header = ctx.batch.get_block_header(&hash)?;
Tip::from_header(&header)
};
let header_head = ctx.batch.header_head()?;
if let Ok(existing) = ctx.batch.get_block_header(&header.hash()) {
if !has_more_work(&existing, &header_head) {
return Ok(());
@ -260,6 +260,10 @@ pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) ->
validate_header(header, ctx)?;
add_block_header(header, &ctx.batch)?;
if has_more_work(header, &header_head) {
update_header_head(&Tip::from_header(header), &mut ctx.batch)?;
}
Ok(())
}
@ -469,6 +473,19 @@ fn add_block_header(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Er
Ok(())
}
fn update_header_head(head: &Tip, batch: &mut store::Batch<'_>) -> Result<(), Error> {
batch
.save_header_head(&head)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header head".to_owned()))?;
debug!(
"header head updated to {} at {}",
head.last_block_h, head.height
);
Ok(())
}
fn update_head(head: &Tip, batch: &mut store::Batch<'_>) -> Result<(), Error> {
batch
.save_body_head(&head)

View file

@ -33,6 +33,7 @@ const BLOCK_HEADER_PREFIX: u8 = b'h';
const BLOCK_PREFIX: u8 = b'b';
const HEAD_PREFIX: u8 = b'H';
const TAIL_PREFIX: u8 = b'T';
const HEADER_HEAD_PREFIX: u8 = b'G';
const OUTPUT_POS_PREFIX: u8 = b'p';
const BLOCK_INPUT_BITMAP_PREFIX: u8 = b'B';
const BLOCK_SUMS_PREFIX: u8 = b'M';
@ -68,6 +69,13 @@ impl ChainStore {
option_to_not_found(self.db.get_ser(&[HEAD_PREFIX]), || "HEAD".to_owned())
}
/// The current header head (may differ from chain head).
pub fn header_head(&self) -> Result<Tip, Error> {
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX]), || {
"HEADER_HEAD".to_owned()
})
}
/// The current chain "tail" (earliest block in the store).
pub fn tail(&self) -> Result<Tip, Error> {
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX]), || "TAIL".to_owned())
@ -155,6 +163,13 @@ impl<'a> Batch<'a> {
option_to_not_found(self.db.get_ser(&[TAIL_PREFIX]), || "TAIL".to_owned())
}
/// The current header head (may differ from chain head).
pub fn header_head(&self) -> Result<Tip, Error> {
option_to_not_found(self.db.get_ser(&[HEADER_HEAD_PREFIX]), || {
"HEADER_HEAD".to_owned()
})
}
/// Header of the block at the head of the block chain (not the same thing as header_head).
pub fn head_header(&self) -> Result<BlockHeader, Error> {
self.get_block_header(&self.head()?.last_block_h)
@ -170,6 +185,11 @@ impl<'a> Batch<'a> {
self.db.put_ser(&[TAIL_PREFIX], t)
}
/// Save header head to db.
pub fn save_header_head(&self, t: &Tip) -> Result<(), Error> {
self.db.put_ser(&[HEADER_HEAD_PREFIX], t)
}
/// get block
pub fn get_block(&self, h: &Hash) -> Result<Block, Error> {
option_to_not_found(

View file

@ -481,13 +481,7 @@ where
trace!("Starting new txhashset (readonly) extension.");
let head = batch.head()?;
// Find header head based on current header MMR (the rightmost leaf node in the MMR).
let header_head = {
let hash = handle.head_hash()?;
let header = batch.get_block_header(&hash)?;
Tip::from_header(&header)
};
let header_head = batch.header_head()?;
let res = {
let header_pmmr = PMMR::at(&mut handle.backend, handle.last_pos);
@ -586,13 +580,7 @@ where
let bitmap_accumulator: BitmapAccumulator;
let head = batch.head()?;
// Find header head based on current header MMR (the rightmost leaf node in the MMR).
let header_head = {
let hash = header_pmmr.head_hash()?;
let header = batch.get_block_header(&hash)?;
Tip::from_header(&header)
};
let header_head = batch.header_head()?;
// create a child transaction so if the state is rolled back by itself, all
// index saving can be undone
@ -671,7 +659,8 @@ where
// index saving can be undone
let child_batch = batch.child()?;
// Find chain head based on current MMR (the rightmost leaf node in the MMR).
// Note: Extending either the sync_head or header_head MMR here.
// Use underlying MMR to determine the "head".
let head = match handle.head_hash() {
Ok(hash) => {
let header = child_batch.get_block_header(&hash)?;

View file

@ -52,8 +52,8 @@ pub struct ServerStats {
pub peer_count: u32,
/// Chain head
pub chain_stats: ChainStats,
/// sync header head
pub header_stats: Option<ChainStats>,
/// Header head (may differ from chain head)
pub header_stats: ChainStats,
/// Whether we're currently syncing
pub sync_status: SyncStatus,
/// Handle to current stratum server stats

View file

@ -502,16 +502,13 @@ impl Server {
total_difficulty: head.total_difficulty(),
};
let header_stats = match self.chain.try_header_head(read_timeout)? {
Some(head) => self.chain.get_block_header(&head.hash()).map(|header| {
Some(ChainStats {
let header_head = self.chain.header_head()?;
let header = self.chain.get_block_header(&header_head.hash())?;
let header_stats = ChainStats {
latest_timestamp: header.timestamp,
height: header.height,
last_block_h: header.prev_hash,
total_difficulty: header.total_difficulty(),
})
})?,
_ => None,
};
let disk_usage_bytes = WalkDir::new(&self.config.db_root)

View file

@ -182,18 +182,7 @@ impl SyncRunner {
// if syncing is needed
let head = unwrap_or_restart_loop!(self.chain.head());
let tail = self.chain.tail().unwrap_or_else(|_| head.clone());
// We still do not fully understand what is blocking this but if this blocks here after
// we download and validate the txhashet we do not reliably proceed to block_sync,
// potentially blocking for an extended period of time (> 10 mins).
// Does not appear to be deadlock as it does resolve itself eventually.
// So as a workaround we try_header_head with a relatively short timeout and simply
// retry the syncer loop.
let maybe_header_head =
unwrap_or_restart_loop!(self.chain.try_header_head(time::Duration::from_secs(1)));
let header_head = unwrap_or_restart_loop!(
maybe_header_head.ok_or("failed to obtain lock for try_header_head")
);
let header_head = unwrap_or_restart_loop!(self.chain.header_head());
// run each sync stage, each of them deciding whether they're needed
// except for state sync that only runs if body sync return true (means txhashset is needed)

View file

@ -270,20 +270,18 @@ impl TUIStatusListener for TUIStatusView {
c.call_on_id("chain_timestamp", |t: &mut TextView| {
t.set_content(stats.chain_stats.latest_timestamp.to_string());
});
if let Some(header_stats) = &stats.header_stats {
c.call_on_id("basic_header_tip_hash", |t: &mut TextView| {
t.set_content(header_stats.last_block_h.to_string() + "...");
t.set_content(stats.header_stats.last_block_h.to_string() + "...");
});
c.call_on_id("basic_header_chain_height", |t: &mut TextView| {
t.set_content(header_stats.height.to_string());
t.set_content(stats.header_stats.height.to_string());
});
c.call_on_id("basic_header_total_difficulty", |t: &mut TextView| {
t.set_content(header_stats.total_difficulty.to_string());
t.set_content(stats.header_stats.total_difficulty.to_string());
});
c.call_on_id("basic_header_timestamp", |t: &mut TextView| {
t.set_content(header_stats.latest_timestamp.to_string());
t.set_content(stats.header_stats.latest_timestamp.to_string());
});
}
if let Some(tx_stats) = &stats.tx_stats {
c.call_on_id("tx_pool_size", |t: &mut TextView| {
t.set_content(tx_stats.tx_pool_size.to_string());