feat: moved optimism commands to create and remove from bin (#9242)

Co-authored-by: Matthias Seitz <matthias.seitz@outlook.de>
This commit is contained in:
Luca Provini
2024-07-03 16:39:31 +02:00
committed by GitHub
parent 1998f44b1b
commit 08fc298e55
10 changed files with 192 additions and 6 deletions

View File

@ -67,6 +67,8 @@ reth-consensus.workspace = true
reth-optimism-primitives.workspace = true
reth-engine-util.workspace = true
reth-prune.workspace = true
reth-stages-api.workspace = true
reth-optimism-cli = { workspace = true, optional = true }
# crypto
alloy-rlp.workspace = true
@ -135,6 +137,8 @@ min-debug-logs = ["tracing/release_max_level_debug"]
min-trace-logs = ["tracing/release_max_level_trace"]
optimism = [
"dep:reth-optimism-cli",
"reth-optimism-cli?/optimism",
"reth-primitives/optimism",
"reth-rpc/optimism",
"reth-provider/optimism",

View File

@ -197,11 +197,11 @@ pub enum Commands<Ext: clap::Args + fmt::Debug = NoArgs> {
/// This syncs RLP encoded OP blocks below Bedrock from a file, without executing.
#[cfg(feature = "optimism")]
#[command(name = "import-op")]
ImportOp(crate::commands::import_op::ImportOpCommand),
ImportOp(reth_optimism_cli::ImportOpCommand),
/// This imports RLP encoded receipts from a file.
#[cfg(feature = "optimism")]
#[command(name = "import-receipts-op")]
ImportReceiptsOp(crate::commands::import_receipts_op::ImportReceiptsOpCommand),
ImportReceiptsOp(reth_optimism_cli::ImportReceiptsOpCommand),
/// Dumps genesis block JSON configuration to stdout.
DumpGenesis(dump_genesis::DumpGenesisCommand),
/// Database debugging utilities

View File

@ -1,148 +0,0 @@
//! Command that initializes the node by importing OP Mainnet chain segment below Bedrock, from a
//! file.
use crate::{commands::import::build_import_pipeline, version::SHORT_VERSION};
use clap::Parser;
use reth_cli_commands::common::{AccessRights, Environment, EnvironmentArgs};
use reth_consensus::noop::NoopConsensus;
use reth_db::tables;
use reth_db_api::transaction::DbTx;
use reth_downloaders::file_client::{
ChunkedFileReader, FileClient, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE,
};
use reth_optimism_primitives::bedrock_import::is_dup_tx;
use reth_provider::StageCheckpointReader;
use reth_prune::PruneModes;
use reth_stages::StageId;
use reth_static_file::StaticFileProducer;
use std::{path::PathBuf, sync::Arc};
use tracing::{debug, error, info};
/// Syncs RLP encoded blocks from a file.
#[derive(Debug, Parser)]
pub struct ImportOpCommand {
#[command(flatten)]
env: EnvironmentArgs,
/// Chunk byte length to read from file.
#[arg(long, value_name = "CHUNK_LEN", verbatim_doc_comment)]
chunk_len: Option<u64>,
/// The path to a block file for import.
///
/// The online stages (headers and bodies) are replaced by a file import, after which the
/// remaining stages are executed.
#[arg(value_name = "IMPORT_PATH", verbatim_doc_comment)]
path: PathBuf,
}
impl ImportOpCommand {
/// Execute `import` command
pub async fn execute(self) -> eyre::Result<()> {
info!(target: "reth::cli", "reth {} starting", SHORT_VERSION);
info!(target: "reth::cli",
"Disabled stages requiring state, since cannot execute OVM state changes"
);
debug!(target: "reth::cli",
chunk_byte_len=self.chunk_len.unwrap_or(DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE),
"Chunking chain import"
);
let Environment { provider_factory, config, .. } = self.env.init(AccessRights::RW)?;
// we use noop here because we expect the inputs to be valid
let consensus = Arc::new(NoopConsensus::default());
// open file
let mut reader = ChunkedFileReader::new(&self.path, self.chunk_len).await?;
let mut total_decoded_blocks = 0;
let mut total_decoded_txns = 0;
let mut total_filtered_out_dup_txns = 0;
while let Some(mut file_client) = reader.next_chunk::<FileClient>().await? {
// create a new FileClient from chunk read from file
info!(target: "reth::cli",
"Importing chain file chunk"
);
let tip = file_client.tip().ok_or(eyre::eyre!("file client has no tip"))?;
info!(target: "reth::cli", "Chain file chunk read");
total_decoded_blocks += file_client.headers_len();
total_decoded_txns += file_client.total_transactions();
for (block_number, body) in file_client.bodies_iter_mut() {
body.transactions.retain(|_| {
if is_dup_tx(block_number) {
total_filtered_out_dup_txns += 1;
return false
}
true
})
}
let (mut pipeline, events) = build_import_pipeline(
&config,
provider_factory.clone(),
&consensus,
Arc::new(file_client),
StaticFileProducer::new(provider_factory.clone(), PruneModes::default()),
true,
)
.await?;
// override the tip
pipeline.set_tip(tip);
debug!(target: "reth::cli", ?tip, "Tip manually set");
let provider = provider_factory.provider()?;
let latest_block_number =
provider.get_stage_checkpoint(StageId::Finish)?.map(|ch| ch.block_number);
tokio::spawn(reth_node_events::node::handle_events(
None,
latest_block_number,
events,
provider_factory.db_ref().clone(),
));
// Run pipeline
info!(target: "reth::cli", "Starting sync pipeline");
tokio::select! {
res = pipeline.run() => res?,
_ = tokio::signal::ctrl_c() => {},
}
}
let provider = provider_factory.provider()?;
let total_imported_blocks = provider.tx_ref().entries::<tables::HeaderNumbers>()?;
let total_imported_txns = provider.tx_ref().entries::<tables::TransactionHashNumbers>()?;
if total_decoded_blocks != total_imported_blocks ||
total_decoded_txns != total_imported_txns + total_filtered_out_dup_txns
{
error!(target: "reth::cli",
total_decoded_blocks,
total_imported_blocks,
total_decoded_txns,
total_filtered_out_dup_txns,
total_imported_txns,
"Chain was partially imported"
);
}
info!(target: "reth::cli",
total_imported_blocks,
total_imported_txns,
total_decoded_blocks,
total_decoded_txns,
total_filtered_out_dup_txns,
"Chain file imported"
);
Ok(())
}
}

View File

@ -1,210 +0,0 @@
//! Command that imports OP mainnet receipts from Bedrock datadir, exported via
//! <https://github.com/testinprod-io/op-geth/pull/1>.
use clap::Parser;
use reth_cli_commands::common::{AccessRights, Environment, EnvironmentArgs};
use reth_db::tables;
use reth_db_api::{database::Database, transaction::DbTx};
use reth_downloaders::{
file_client::{ChunkedFileReader, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE},
file_codec_ovm_receipt::HackReceiptFileCodec,
receipt_file_client::ReceiptFileClient,
};
use reth_execution_types::ExecutionOutcome;
use reth_node_core::version::SHORT_VERSION;
use reth_optimism_primitives::bedrock_import::is_dup_tx;
use reth_primitives::Receipts;
use reth_provider::{
OriginalValuesKnown, ProviderFactory, StageCheckpointReader, StateWriter,
StaticFileProviderFactory, StaticFileWriter, StatsReader,
};
use reth_stages::StageId;
use reth_static_file_types::StaticFileSegment;
use std::path::{Path, PathBuf};
use tracing::{debug, error, info, trace};
/// Initializes the database with the genesis block.
#[derive(Debug, Parser)]
pub struct ImportReceiptsOpCommand {
#[command(flatten)]
env: EnvironmentArgs,
/// Chunk byte length to read from file.
#[arg(long, value_name = "CHUNK_LEN", verbatim_doc_comment)]
chunk_len: Option<u64>,
/// The path to a receipts file for import. File must use `HackReceiptFileCodec` (used for
/// exporting OP chain segment below Bedrock block via testinprod/op-geth).
///
/// <https://github.com/testinprod-io/op-geth/pull/1>
#[arg(value_name = "IMPORT_PATH", verbatim_doc_comment)]
path: PathBuf,
}
impl ImportReceiptsOpCommand {
/// Execute `import` command
pub async fn execute(self) -> eyre::Result<()> {
info!(target: "reth::cli", "reth {} starting", SHORT_VERSION);
debug!(target: "reth::cli",
chunk_byte_len=self.chunk_len.unwrap_or(DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE),
"Chunking receipts import"
);
let Environment { provider_factory, .. } = self.env.init(AccessRights::RW)?;
import_receipts_from_file(
provider_factory,
self.path,
self.chunk_len,
|first_block, receipts: &mut Receipts| {
let mut total_filtered_out_dup_txns = 0;
for (index, receipts_for_block) in receipts.iter_mut().enumerate() {
if is_dup_tx(first_block + index as u64) {
receipts_for_block.clear();
total_filtered_out_dup_txns += 1;
}
}
total_filtered_out_dup_txns
},
)
.await
}
}
/// Imports receipts to static files. Takes a filter callback as parameter, that returns the total
/// number of filtered out receipts.
///
/// Caution! Filter callback must replace completely filtered out receipts for a block, with empty
/// vectors, rather than `vec!(None)`. This is since the code for writing to static files, expects
/// indices in the [`Receipts`] list, to map to sequential block numbers.
pub async fn import_receipts_from_file<DB, P, F>(
provider_factory: ProviderFactory<DB>,
path: P,
chunk_len: Option<u64>,
mut filter: F,
) -> eyre::Result<()>
where
DB: Database,
P: AsRef<Path>,
F: FnMut(u64, &mut Receipts) -> usize,
{
let provider = provider_factory.provider_rw()?;
let static_file_provider = provider_factory.static_file_provider();
let total_imported_txns = static_file_provider
.count_entries::<tables::Transactions>()
.expect("transaction static files must exist before importing receipts");
let highest_block_transactions = static_file_provider
.get_highest_static_file_block(StaticFileSegment::Transactions)
.expect("transaction static files must exist before importing receipts");
for stage in StageId::ALL {
let checkpoint = provider.get_stage_checkpoint(stage)?;
trace!(target: "reth::cli",
?stage,
?checkpoint,
"Read stage checkpoints from db"
);
}
// prepare the tx for `write_to_storage`
let tx = provider.into_tx();
let mut total_decoded_receipts = 0;
let mut total_filtered_out_dup_txns = 0;
// open file
let mut reader = ChunkedFileReader::new(path, chunk_len).await?;
while let Some(file_client) =
reader.next_chunk::<ReceiptFileClient<HackReceiptFileCodec>>().await?
{
// create a new file client from chunk read from file
let ReceiptFileClient {
mut receipts,
first_block,
total_receipts: total_receipts_chunk,
..
} = file_client;
// mark these as decoded
total_decoded_receipts += total_receipts_chunk;
total_filtered_out_dup_txns += filter(first_block, &mut receipts);
info!(target: "reth::cli",
first_receipts_block=?first_block,
total_receipts_chunk,
"Importing receipt file chunk"
);
// We're reusing receipt writing code internal to
// `ExecutionOutcome::write_to_storage`, so we just use a default empty
// `BundleState`.
let execution_outcome =
ExecutionOutcome::new(Default::default(), receipts, first_block, Default::default());
let static_file_producer =
static_file_provider.get_writer(first_block, StaticFileSegment::Receipts)?;
// finally, write the receipts
execution_outcome.write_to_storage::<DB::TXMut>(
&tx,
Some(static_file_producer),
OriginalValuesKnown::Yes,
)?;
}
tx.commit()?;
// as static files works in file ranges, internally it will be committing when creating the
// next file range already, so we only need to call explicitly at the end.
static_file_provider.commit()?;
if total_decoded_receipts == 0 {
error!(target: "reth::cli", "No receipts were imported, ensure the receipt file is valid and not empty");
return Ok(())
}
let total_imported_receipts = static_file_provider
.count_entries::<tables::Receipts>()
.expect("static files must exist after ensuring we decoded more than zero");
if total_imported_receipts + total_filtered_out_dup_txns != total_decoded_receipts {
error!(target: "reth::cli",
total_decoded_receipts,
total_imported_receipts,
total_filtered_out_dup_txns,
"Receipts were partially imported"
);
}
if total_imported_receipts != total_imported_txns {
error!(target: "reth::cli",
total_imported_receipts,
total_imported_txns,
"Receipts inconsistent with transactions"
);
}
let highest_block_receipts = static_file_provider
.get_highest_static_file_block(StaticFileSegment::Receipts)
.expect("static files must exist after ensuring we decoded more than zero");
if highest_block_receipts != highest_block_transactions {
error!(target: "reth::cli",
highest_block_receipts,
highest_block_transactions,
"Height of receipts inconsistent with transactions"
);
}
info!(target: "reth::cli",
total_imported_receipts,
total_decoded_receipts,
total_filtered_out_dup_txns,
"Receipt file imported"
);
Ok(())
}

View File

@ -4,8 +4,6 @@ pub mod config_cmd;
pub mod debug_cmd;
pub mod dump_genesis;
pub mod import;
pub mod import_op;
pub mod import_receipts_op;
pub mod init_cmd;
pub mod init_state;
pub mod node;