From e42fb32c7fb6fc570b043686622e82dfaedd7005 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:30:18 +0100 Subject: [PATCH] ci/test: speed up tests (#6987) --- .github/workflows/hive.yml | 6 +-- .github/workflows/integration.yml | 10 ++-- .github/workflows/unit.yml | 6 +-- Cargo.toml | 7 +++ Dockerfile | 8 ++- .../storage/db/src/implementation/mdbx/tx.rs | 18 ++++--- crates/trie/src/trie.rs | 51 +++++++++---------- testing/ef-tests/Cargo.toml | 1 + 8 files changed, 59 insertions(+), 48 deletions(-) diff --git a/.github/workflows/hive.yml b/.github/workflows/hive.yml index 589e1f2ba..2b8b59003 100644 --- a/.github/workflows/hive.yml +++ b/.github/workflows/hive.yml @@ -29,7 +29,7 @@ jobs: with: context: . tags: ghcr.io/paradigmxyz/reth:latest - build-args: BUILD_PROFILE=hivetests + build-args: BUILD_PROFILE=hivetests,FEATURES=asm-keccak outputs: type=docker,dest=./artifacts/reth_image.tar cache-from: type=gha cache-to: type=gha,mode=max @@ -198,7 +198,7 @@ jobs: echo "Simulator failed, creating issue" # Check if issue already exists # get all issues with the label C-hivetest, loop over each page and check if the issue already exists - + existing_issues=$(gh api /repos/paradigmxyz/reth/issues -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" -F "labels=C-hivetest" --method GET | jq '.[].title') if [[ $existing_issues == *"Hive Test Failure: ${{ matrix.sim }}"* ]]; then echo "Issue already exists" @@ -211,7 +211,7 @@ jobs: /repos/${{ github.repository }}/issues \ -f title='Hive Test Failure: ${{ matrix.sim }}' \ -f body="!!!!!!! This is an automated issue created by the hive test failure !!!!!!!

The hive test for ${{ matrix.sim }} failed. Please investigate and fix the issue.

[Link to the failed run](https://github.com/paradigmxyz/reth/actions/runs/${{ github.run_id }})" \ - -f "labels[]=C-hivetest" + -f "labels[]=C-hivetest" - name: Print simulator output if: ${{ failure() }} run: | diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 8d1ee765e..cff3ec64e 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -18,14 +18,13 @@ concurrency: jobs: test: - name: test / ${{ matrix.network }} (${{ matrix.partition }}/2) + name: test / ${{ matrix.network }} runs-on: group: Reth env: RUST_BACKTRACE: 1 strategy: matrix: - partition: [1, 2] network: ["ethereum", "optimism"] timeout-minutes: 60 steps: @@ -40,10 +39,9 @@ jobs: - name: Run tests run: | cargo nextest run \ - --locked --features "${{ matrix.network }}" \ + --locked --features "asm-keccak ${{ matrix.network }}" \ --workspace --exclude examples --exclude ef-tests \ - --partition hash:${{ matrix.partition }}/2 \ - -E 'kind(test)' + -E "kind(test)" sync: name: sync / 100k blocks @@ -63,7 +61,7 @@ jobs: cache-on-failure: true - name: Run sync run: | - cargo run --release --features jemalloc,min-error-logs --bin reth \ + cargo run --release --features asm-keccak,jemalloc,min-error-logs --bin reth \ -- node \ --debug.tip 0x91c90676cab257a59cd956d7cb0bceb9b1a71d79755c23c7277a0697ccfaf8c4 \ --debug.max-block 100000 \ diff --git a/.github/workflows/unit.yml b/.github/workflows/unit.yml index 8a3b45c6d..91a247fac 100644 --- a/.github/workflows/unit.yml +++ b/.github/workflows/unit.yml @@ -38,10 +38,10 @@ jobs: - name: Run tests run: | cargo nextest run \ - --locked --features "${{ matrix.network }}" \ + --locked --features "asm-keccak ${{ matrix.network }}" \ --workspace --exclude examples --exclude ef-tests \ --partition hash:${{ matrix.partition }}/2 \ - -E "kind(lib) | kind(bin) | kind(proc-macro)" + -E "!kind(test)" state: name: Ethereum state tests @@ -65,7 +65,7 @@ jobs: - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - - run: cargo nextest run --release -p ef-tests --features ef-tests + - run: cargo nextest run --release -p ef-tests --features "asm-keccak ef-tests" doc: name: doc tests (${{ matrix.network }}) diff --git a/Cargo.toml b/Cargo.toml index c8c8f84e9..ca400b97e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -96,6 +96,13 @@ homepage = "https://paradigmxyz.github.io/reth" repository = "https://github.com/paradigmxyz/reth" exclude = [".github/"] +# Speed up tests. +[profile.dev.package] +proptest.opt-level = 3 +rand_xorshift.opt-level = 3 +rand_chacha.opt-level = 3 +unarray.opt-level = 3 + # Meant for testing - all optimizations, but with debug assertions and overflow checks. [profile.hivetests] inherits = "test" diff --git a/Dockerfile b/Dockerfile index 7c0e5d987..ab192886b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,10 +12,14 @@ RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder COPY --from=planner /app/recipe.json recipe.json -# Set the build profile to be release +# Build profile, release by default ARG BUILD_PROFILE=release ENV BUILD_PROFILE $BUILD_PROFILE +# Extra Cargo features +ARG FEATURES="" +ENV FEATURES $FEATURES + # Install system dependencies RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg-config @@ -24,7 +28,7 @@ RUN cargo chef cook --profile $BUILD_PROFILE --recipe-path recipe.json # Build application COPY . . -RUN cargo build --profile $BUILD_PROFILE --no-default-features --locked --bin reth +RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --no-default-features --locked --bin reth # ARG is not resolved in COPY so we have to hack around it by copying the # binary to a temporary location diff --git a/crates/storage/db/src/implementation/mdbx/tx.rs b/crates/storage/db/src/implementation/mdbx/tx.rs index ee8b02eff..5c0a15abe 100644 --- a/crates/storage/db/src/implementation/mdbx/tx.rs +++ b/crates/storage/db/src/implementation/mdbx/tx.rs @@ -415,12 +415,13 @@ mod tests { // Give the `TxnManager` some time to time out the transaction. sleep(MAX_DURATION + Duration::from_millis(100)); + // Transaction has not timed out. assert_eq!( - tx.get::(0).err(), - Some(DatabaseError::Open(reth_libmdbx::Error::NotFound.into())) - ); // Transaction is not timeout-ed + tx.get::(0), + Err(DatabaseError::Open(reth_libmdbx::Error::NotFound.into())) + ); + // Backtrace is not recorded. assert!(!tx.metrics_handler.unwrap().backtrace_recorded.load(Ordering::Relaxed)); - // Backtrace is not recorded } #[test] @@ -437,11 +438,12 @@ mod tests { // Give the `TxnManager` some time to time out the transaction. sleep(MAX_DURATION + Duration::from_millis(100)); + // Transaction has timed out. assert_eq!( - tx.get::(0).err(), - Some(DatabaseError::Open(reth_libmdbx::Error::ReadTransactionTimeout.into())) - ); // Transaction is timeout-ed + tx.get::(0), + Err(DatabaseError::Open(reth_libmdbx::Error::ReadTransactionTimeout.into())) + ); + // Backtrace is recorded. assert!(tx.metrics_handler.unwrap().backtrace_recorded.load(Ordering::Relaxed)); - // Backtrace is recorded } } diff --git a/crates/trie/src/trie.rs b/crates/trie/src/trie.rs index e7e1be6df..08742122d 100644 --- a/crates/trie/src/trie.rs +++ b/crates/trie/src/trie.rs @@ -1249,37 +1249,36 @@ mod tests { #![proptest_config(ProptestConfig { cases: 128, ..ProptestConfig::default() })] + #[test] fn fuzz_state_root_incremental(account_changes: [BTreeMap; 5]) { - tokio::runtime::Runtime::new().unwrap().block_on(async { - let factory = create_test_provider_factory(); - let tx = factory.provider_rw().unwrap(); - let mut hashed_account_cursor = tx.tx_ref().cursor_write::().unwrap(); + let factory = create_test_provider_factory(); + let tx = factory.provider_rw().unwrap(); + let mut hashed_account_cursor = tx.tx_ref().cursor_write::().unwrap(); - let mut state = BTreeMap::default(); - for accounts in account_changes { - let should_generate_changeset = !state.is_empty(); - let mut changes = PrefixSetMut::default(); - for (hashed_address, balance) in accounts.clone() { - hashed_account_cursor.upsert(hashed_address, Account { balance, ..Default::default() }).unwrap(); - if should_generate_changeset { - changes.insert(Nibbles::unpack(hashed_address)); - } + let mut state = BTreeMap::default(); + for accounts in account_changes { + let should_generate_changeset = !state.is_empty(); + let mut changes = PrefixSetMut::default(); + for (hashed_address, balance) in accounts.clone() { + hashed_account_cursor.upsert(hashed_address, Account { balance, ..Default::default() }).unwrap(); + if should_generate_changeset { + changes.insert(Nibbles::unpack(hashed_address)); } - - let (state_root, trie_updates) = StateRoot::from_tx(tx.tx_ref()) - .with_prefix_sets(TriePrefixSets { account_prefix_set: changes.freeze(), ..Default::default() }) - .root_with_updates() - .unwrap(); - - state.append(&mut accounts.clone()); - let expected_root = state_root_prehashed( - state.clone().into_iter().map(|(key, balance)| (key, (Account { balance, ..Default::default() }, std::iter::empty()))) - ); - assert_eq!(expected_root, state_root); - trie_updates.flush(tx.tx_ref()).unwrap(); } - }); + + let (state_root, trie_updates) = StateRoot::from_tx(tx.tx_ref()) + .with_prefix_sets(TriePrefixSets { account_prefix_set: changes.freeze(), ..Default::default() }) + .root_with_updates() + .unwrap(); + + state.append(&mut accounts.clone()); + let expected_root = state_root_prehashed( + state.iter().map(|(&key, &balance)| (key, (Account { balance, ..Default::default() }, std::iter::empty()))) + ); + assert_eq!(expected_root, state_root); + trie_updates.flush(tx.tx_ref()).unwrap(); + } } } diff --git a/testing/ef-tests/Cargo.toml b/testing/ef-tests/Cargo.toml index c41886377..3f2193227 100644 --- a/testing/ef-tests/Cargo.toml +++ b/testing/ef-tests/Cargo.toml @@ -13,6 +13,7 @@ workspace = true [features] ef-tests = [] +asm-keccak = ["reth-primitives/asm-keccak"] [dependencies] reth-primitives.workspace = true