36 Commits

Author SHA1 Message Date
24f2460337 Merge pull request #80 from hl-archive-node/chore/v1.8.2
chore: Upgrade to reth v1.8.2
2025-10-05 04:38:54 -04:00
b55ddc54ad chore: clippy 2025-10-05 04:04:30 -04:00
aa73fab281 chore: Now cargo fmt sorts imports and trait methods 2025-10-05 03:56:23 -04:00
ae0cb0da6d chore: Move sprites0/reth to hl-archive-node/reth 2025-10-05 03:56:23 -04:00
8605be9864 chore: Upgrade to reth v1.8.2 2025-10-05 03:56:23 -04:00
c93ff90f94 Merge pull request #79 from hl-archive-node/fix/issue-78
fix: Do not filter out logs based on bloom (which is for perf optimization)
2025-10-05 00:43:20 -04:00
ce64e00e2f fix: Do not filter out logs based on bloom (which is for perf optimization)
Resolves #78
2025-10-05 00:33:44 -04:00
8d8da57d3a Merge pull request #77 from hl-archive-node/feat/cutoff-latest
feat: Add debug CLI flag to enforce latest blocks (--debug-cutoff-height)
2025-10-02 10:57:04 -04:00
875304f891 feat: Add debug CLI flag to enforce latest blocks (--debug-cutoff-height)
This is useful when syncing to specific testnet blocks
2025-10-02 14:53:47 +00:00
b37ba15765 Merge pull request #74 from Quertyy/feat/block-precompila-data-rpc-method
feat(rpc): add HlBlockPrecompile rpc API
2025-09-19 02:42:21 -04:00
3080665702 style: pass clippy check 2025-09-19 13:23:49 +07:00
4896e4f0ea refactor: use BlockId as block type 2025-09-19 12:41:14 +07:00
458f506ad2 refactor: use BlockHashOrNumber as block type 2025-09-19 12:33:32 +07:00
1c7136bfab feat(rpc): add HlBlockPrecompile rpc API 2025-09-18 04:57:49 +07:00
491e902904 Merge pull request #69 from hl-archive-node/fix/call-and-estimate
fix: Apply precompiles for eth_call and eth_estimateGas
2025-09-15 02:22:21 -04:00
45648a7a98 fix: Apply precompiles for eth_call and eth_estimateGas 2025-09-15 02:21:45 -04:00
c87c5a055a Merge pull request #68 from hl-archive-node/fix/testnet-token
fix: Add a manual mapping for testnet
2025-09-14 23:31:19 -04:00
c9416a3948 fix: Add a manual mapping for testnet 2025-09-14 23:24:00 -04:00
db10c23c56 Merge pull request #66 from hl-archive-node/feat/nb-release
fix: Fix tag format
2025-09-13 16:48:00 -04:00
fc395123f3 fix: Fix tag format 2025-09-13 16:47:05 -04:00
84ea1af682 Merge pull request #64 from sentioxyz/node-builder
fix docker build args
2025-09-13 16:43:35 -04:00
bd3e0626ed fix docker build args 2025-09-13 15:28:36 +08:00
7d223a464e Merge pull request #63 from hl-archive-node/feat/nb-release
feat: Add nb tag to docker releases
2025-09-11 19:36:43 -04:00
afcc551f67 feat: Add nb tag to docker releases 2025-09-11 19:35:50 -04:00
0dfd7a4c7f Merge pull request #62 from hl-archive-node/doc/testnet
doc: Update testnet instruction, add support channel
2025-09-11 19:33:50 -04:00
8faac526b7 doc: Add support channel 2025-09-11 19:32:55 -04:00
acfabf969c doc: Update testnet block number 2025-09-11 19:31:37 -04:00
fccf877a3a Merge pull request #61 from hl-archive-node/chore/v1.7.0
chore: Upgrade to reth v1.7.0
2025-09-11 19:26:47 -04:00
9e3f0c722e chore: Upgrade to reth v1.7.0 2025-09-11 19:25:48 -04:00
cd5bcc4cb0 chore: Add issue templates from reth 2025-09-11 19:00:09 -04:00
d831a459bb Merge pull request #60 from hl-archive-node/feat/block-metrics
feat: Add block source metrics
2025-09-11 18:56:18 -04:00
66c2ee654c feat: Add block source metrics 2025-09-11 18:50:22 -04:00
701e6a25e6 refactor: Remove duplications 2025-09-11 18:47:58 -04:00
ab11ce513f Merge pull request #57 from Quertyy/chore/reth-hl-version
chore(build): add reth-hl version output
2025-09-09 09:43:12 -04:00
37b852e810 chore(build): add reth-hl version output 2025-09-09 20:19:52 +07:00
51c43d6dbd Create a docker release github action (#54)
* create docker release action

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .

* .
2025-09-08 10:26:20 -04:00
66 changed files with 1616 additions and 745 deletions

127
.github/ISSUE_TEMPLATE/bug.yml vendored Normal file
View File

@ -0,0 +1,127 @@
name: Bug Report
description: Create a bug report
labels: ["C-bug", "S-needs-triage"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report! Please provide as much detail as possible.
If you believe you have found a vulnerability, please provide details [here](mailto:georgios@paradigm.xyz) instead.
- type: textarea
id: what-happened
attributes:
label: Describe the bug
description: |
A clear and concise description of what the bug is.
If the bug is in a crate you are using (i.e. you are not running the standard `reth` binary) please mention that as well.
validations:
required: true
- type: textarea
id: reproduction-steps
attributes:
label: Steps to reproduce
description: Please provide any steps you think might be relevant to reproduce the bug.
placeholder: |
Steps to reproduce:
1. Start '...'
2. Then '...'
3. Check '...'
4. See error
validations:
required: true
- type: textarea
id: logs
attributes:
label: Node logs
description: |
If applicable, please provide the node logs leading up to the bug.
**Please also provide debug logs.** By default, these can be found in:
- `~/.cache/reth/logs` on Linux
- `~/Library/Caches/reth/logs` on macOS
- `%localAppData%/reth/logs` on Windows
render: text
validations:
required: false
- type: dropdown
id: platform
attributes:
label: Platform(s)
description: What platform(s) did this occur on?
multiple: true
options:
- Linux (x86)
- Linux (ARM)
- Mac (Intel)
- Mac (Apple Silicon)
- Windows (x86)
- Windows (ARM)
- type: dropdown
id: container_type
attributes:
label: Container Type
description: Were you running it in a container?
multiple: true
options:
- Not running in a container
- Docker
- Kubernetes
- LXC/LXD
- Other
validations:
required: true
- type: textarea
id: client-version
attributes:
label: What version/commit are you on?
description: This can be obtained with `reth --version`
validations:
required: true
- type: textarea
id: database-version
attributes:
label: What database version are you on?
description: This can be obtained with `reth db version`
validations:
required: true
- type: textarea
id: network
attributes:
label: Which chain / network are you on?
description: This is the argument you pass to `reth --chain`. If you are using `--dev`, type in 'dev' here. If you are not running with `--chain` or `--dev` then it is mainnet.
validations:
required: true
- type: dropdown
id: node-type
attributes:
label: What type of node are you running?
options:
- Archive (default)
- Full via --full flag
- Pruned with custom reth.toml config
validations:
required: true
- type: textarea
id: prune-config
attributes:
label: What prune config do you use, if any?
description: The `[prune]` section in `reth.toml` file
validations:
required: false
- type: input
attributes:
label: If you've built Reth from source, provide the full command you used
validations:
required: false
- type: checkboxes
id: terms
attributes:
label: Code of Conduct
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/paradigmxyz/reth/blob/main/CONTRIBUTING.md#code-of-conduct)
options:
- label: I agree to follow the Code of Conduct
required: true

5
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: GitHub Discussions
url: https://github.com/paradigmxyz/reth/discussions
about: Please ask and answer questions here to keep the issue tracker clean.

19
.github/ISSUE_TEMPLATE/docs.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: Documentation
description: Suggest a change to our documentation
labels: ["C-docs", "S-needs-triage"]
body:
- type: markdown
attributes:
value: |
If you are unsure if the docs are relevant or needed, please open up a discussion first.
- type: textarea
attributes:
label: Describe the change
description: |
Please describe the documentation you want to change or add, and if it is for end-users or contributors.
validations:
required: true
- type: textarea
attributes:
label: Additional context
description: Add any other context to the feature (like screenshots, resources)

21
.github/ISSUE_TEMPLATE/feature.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Feature request
description: Suggest a feature
labels: ["C-enhancement", "S-needs-triage"]
body:
- type: markdown
attributes:
value: |
Please ensure that the feature has not already been requested in the issue tracker.
- type: textarea
attributes:
label: Describe the feature
description: |
Please describe the feature and what it is aiming to solve, if relevant.
If the feature is for a crate, please include a proposed API surface.
validations:
required: true
- type: textarea
attributes:
label: Additional context
description: Add any other context to the feature (like screenshots, resources)

38
.github/workflows/docker.yml vendored Normal file
View File

@ -0,0 +1,38 @@
# Publishes the Docker image.
name: docker
on:
push:
tags:
- v*
- nb-*
env:
IMAGE_NAME: ${{ github.repository_owner }}/nanoreth
CARGO_TERM_COLOR: always
DOCKER_IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/nanoreth
DOCKER_USERNAME: ${{ github.actor }}
jobs:
build:
name: build and push as latest
runs-on: ubuntu-24.04
permissions:
packages: write
contents: read
steps:
- uses: actions/checkout@v5
- uses: rui314/setup-mold@v1
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
with:
cache-on-failure: true
- name: Log in to Docker
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io --username ${DOCKER_USERNAME} --password-stdin
- name: Set up Docker builder
run: |
docker buildx create --use --name builder
- name: Build and push nanoreth image
run: make IMAGE_NAME=$IMAGE_NAME DOCKER_IMAGE_NAME=$DOCKER_IMAGE_NAME PROFILE=maxperf docker-build-push-latest

838
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
[package]
name = "reth_hl"
version = "0.1.0"
edition = "2021"
edition = "2024"
build = "build.rs"
[lib]
name = "reth_hl"
@ -25,67 +26,73 @@ lto = "fat"
codegen-units = 1
[dependencies]
reth = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-cli = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-cli-commands = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-basic-payload-builder = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-db = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-db-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-chainspec = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-cli-util = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-discv4 = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-engine-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-ethereum-forks = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-ethereum-payload-builder = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-ethereum-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-eth-wire = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-eth-wire-types = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-evm = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-evm-ethereum = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-node-core = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-revm = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-network = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-network-p2p = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-network-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-node-ethereum = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-network-peers = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-payload-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-primitives-traits = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-provider = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb", features = ["test-utils"] }
reth-rpc = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-rpc-eth-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-rpc-engine-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-tracing = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-trie-common = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-trie-db = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-codecs = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-transaction-pool = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
reth-stages-types = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
revm = { version = "28.0.1", default-features = false }
reth = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-cli = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-cli-commands = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-basic-payload-builder = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-db = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-db-api = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-chainspec = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-cli-util = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-discv4 = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-engine-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-ethereum-forks = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-ethereum-payload-builder = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-ethereum-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-eth-wire = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-eth-wire-types = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-evm = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-evm-ethereum = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-node-core = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-revm = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-network = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-network-p2p = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-network-api = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-node-ethereum = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-network-peers = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-payload-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-primitives-traits = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-provider = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88", features = ["test-utils"] }
reth-rpc = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-rpc-eth-api = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-rpc-engine-api = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-tracing = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-trie-common = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-trie-db = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-codecs = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-transaction-pool = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-stages-types = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-storage-api = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-errors = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-rpc-convert = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-rpc-eth-types = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-rpc-server-types = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
reth-metrics = { git = "https://github.com/hl-archive-node/reth", rev = "83baf84bcb6d88081fc1b39f97733b8ec345cb88" }
revm = { version = "29.0.1", default-features = false }
# alloy dependencies
alloy-genesis = { version = "1.0.23", default-features = false }
alloy-consensus = { version = "1.0.23", default-features = false }
alloy-genesis = { version = "1.0.37", default-features = false }
alloy-consensus = { version = "1.0.37", default-features = false }
alloy-chains = { version = "0.2.5", default-features = false }
alloy-eips = { version = "1.0.23", default-features = false }
alloy-evm = { version = "0.18.2", default-features = false }
alloy-eips = { version = "1.0.37", default-features = false }
alloy-evm = { version = "0.21.0", default-features = false }
alloy-json-abi = { version = "1.3.1", default-features = false }
alloy-json-rpc = { version = "1.0.23", default-features = false }
alloy-json-rpc = { version = "1.0.37", default-features = false }
alloy-dyn-abi = "1.3.1"
alloy-network = { version = "1.0.23", default-features = false }
alloy-network = { version = "1.0.37", default-features = false }
alloy-primitives = { version = "1.3.1", default-features = false, features = ["map-foldhash"] }
alloy-rlp = { version = "0.3.10", default-features = false, features = ["core-net"] }
alloy-rpc-types = { version = "1.0.23", features = ["eth"], default-features = false }
alloy-rpc-types-eth = { version = "1.0.23", default-features = false }
alloy-rpc-types-engine = { version = "1.0.23", default-features = false }
alloy-signer = { version = "1.0.23", default-features = false }
alloy-rpc-types = { version = "1.0.37", features = ["eth"], default-features = false }
alloy-rpc-types-eth = { version = "1.0.37", default-features = false }
alloy-rpc-types-engine = { version = "1.0.37", default-features = false }
alloy-signer = { version = "1.0.37", default-features = false }
alloy-sol-macro = "1.3.1"
alloy-sol-types = { version = "1.3.1", default-features = false }
jsonrpsee = "0.25.1"
jsonrpsee-core = "0.25.1"
jsonrpsee-types = "0.25.1"
jsonrpsee = "0.26.0"
jsonrpsee-core = "0.26.0"
jsonrpsee-types = "0.26.0"
# misc dependencies
auto_impl = "1"
@ -166,3 +173,7 @@ client = [
[dev-dependencies]
tempfile = "3.20.0"
[build-dependencies]
vergen = { version = "9.0.4", features = ["build", "cargo", "emit_and_set"] }
vergen-git2 = "1.0.5"

View File

@ -1,6 +1,8 @@
# Modifed from reth Makefile
.DEFAULT_GOAL := help
GIT_SHA ?= $(shell git rev-parse HEAD)
GIT_TAG ?= $(shell git describe --tags --abbrev=0 2>/dev/null)
BIN_DIR = "dist/bin"
# List of features to use when building. Can be overridden via the environment.
@ -17,6 +19,9 @@ PROFILE ?= release
# Extra flags for Cargo
CARGO_INSTALL_EXTRA_FLAGS ?=
# The docker image name
DOCKER_IMAGE_NAME ?= ghcr.io/hl-archive-node/nanoreth
##@ Help
.PHONY: help
@ -207,3 +212,49 @@ check-features:
--package reth-primitives-traits \
--package reth-primitives \
--feature-powerset
##@ Docker
# Note: This requires a buildx builder with emulation support. For example:
#
# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64`
# `docker buildx create --use --driver docker-container --name cross-builder`
.PHONY: docker-build-push
docker-build-push: ## Build and push a cross-arch Docker image tagged with the latest git tag.
$(call docker_build_push,$(GIT_TAG),$(GIT_TAG))
# Note: This requires a buildx builder with emulation support. For example:
#
# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64`
# `docker buildx create --use --driver docker-container --name cross-builder`
.PHONY: docker-build-push-git-sha
docker-build-push-git-sha: ## Build and push a cross-arch Docker image tagged with the latest git sha.
$(call docker_build_push,$(GIT_SHA),$(GIT_SHA))
# Note: This requires a buildx builder with emulation support. For example:
#
# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64`
# `docker buildx create --use --driver docker-container --name cross-builder`
.PHONY: docker-build-push-latest
docker-build-push-latest: ## Build and push a cross-arch Docker image tagged with the latest git tag and `latest`.
$(call docker_build_push,$(GIT_TAG),latest)
# Note: This requires a buildx builder with emulation support. For example:
#
# `docker run --privileged --rm tonistiigi/binfmt --install amd64,arm64`
# `docker buildx create --use --name cross-builder`
.PHONY: docker-build-push-nightly
docker-build-push-nightly: ## Build and push cross-arch Docker image tagged with the latest git tag with a `-nightly` suffix, and `latest-nightly`.
$(call docker_build_push,nightly,nightly)
# Create a Docker image using the main Dockerfile
define docker_build_push
docker buildx build --file ./Dockerfile . \
--platform linux/amd64 \
--tag $(DOCKER_IMAGE_NAME):$(1) \
--tag $(DOCKER_IMAGE_NAME):$(2) \
--build-arg BUILD_PROFILE="$(PROFILE)" \
--build-arg FEATURES="jemalloc,asm-keccak" \
--provenance=false \
--push
endef

View File

@ -3,6 +3,8 @@
HyperEVM archive node implementation based on [reth](https://github.com/paradigmxyz/reth).
NodeBuilder API version is heavily inspired by [reth-bsc](https://github.com/loocapro/reth-bsc).
Got questions? Drop by the [Hyperliquid Discord](https://discord.gg/hyperliquid) #node-operators channel.
## ⚠️ IMPORTANT: System Transactions Appear as Pseudo Transactions
Deposit transactions from [System Addresses](https://hyperliquid.gitbook.io/hyperliquid-docs/for-developers/hyperevm/hypercore-less-than-greater-than-hyperevm-transfers#system-addresses) like `0x222..22` / `0x200..xx` to user addresses are intentionally recorded as pseudo transactions.
@ -58,19 +60,19 @@ $ reth-hl node --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 \
## How to run (testnet)
Testnet is supported since block 21304281.
Testnet is supported since block 30281484.
```sh
# Get testnet genesis at block 21304281
# Get testnet genesis at block 30281484
$ cd ~
$ git clone https://github.com/sprites0/hl-testnet-genesis
$ zstd --rm -d ~/hl-testnet-genesis/*.zst
# Init node
$ make install
$ reth-hl init-state --without-evm --chain testnet --header ~/hl-testnet-genesis/21304281.rlp \
--header-hash 0x5b10856d2b1ad241c9bd6136bcc60ef7e8553560ca53995a590db65f809269b4 \
~/hl-testnet-genesis/21304281.jsonl --total-difficulty 0
$ reth-hl init-state --without-evm --chain testnet --header ~/hl-testnet-genesis/30281484.rlp \
--header-hash 0x147cc3c09e9ddbb11799c826758db284f77099478ab5f528d3a57a6105516c21 \
~/hl-testnet-genesis/30281484.jsonl --total-difficulty 0
# Run node
$ reth-hl node --chain testnet --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 \

91
build.rs Normal file
View File

@ -0,0 +1,91 @@
use std::{env, error::Error};
use vergen::{BuildBuilder, CargoBuilder, Emitter};
use vergen_git2::Git2Builder;
fn main() -> Result<(), Box<dyn Error>> {
let mut emitter = Emitter::default();
let build_builder = BuildBuilder::default().build_timestamp(true).build()?;
emitter.add_instructions(&build_builder)?;
let cargo_builder = CargoBuilder::default().features(true).target_triple(true).build()?;
emitter.add_instructions(&cargo_builder)?;
let git_builder =
Git2Builder::default().describe(false, true, None).dirty(true).sha(false).build()?;
emitter.add_instructions(&git_builder)?;
emitter.emit_and_set()?;
let sha = env::var("VERGEN_GIT_SHA")?;
let sha_short = &sha[0..7];
let is_dirty = env::var("VERGEN_GIT_DIRTY")? == "true";
// > git describe --always --tags
// if not on a tag: v0.2.0-beta.3-82-g1939939b
// if on a tag: v0.2.0-beta.3
let not_on_tag = env::var("VERGEN_GIT_DESCRIBE")?.ends_with(&format!("-g{sha_short}"));
let version_suffix = if is_dirty || not_on_tag { "-dev" } else { "" };
println!("cargo:rustc-env=RETH_HL_VERSION_SUFFIX={version_suffix}");
// Set short SHA
println!("cargo:rustc-env=VERGEN_GIT_SHA_SHORT={}", &sha[..8]);
// Set the build profile
let out_dir = env::var("OUT_DIR").unwrap();
let profile = out_dir.rsplit(std::path::MAIN_SEPARATOR).nth(3).unwrap();
println!("cargo:rustc-env=RETH_HL_BUILD_PROFILE={profile}");
// Set formatted version strings
let pkg_version = env!("CARGO_PKG_VERSION");
// The short version information for reth.
// - The latest version from Cargo.toml
// - The short SHA of the latest commit.
// Example: 0.1.0 (defa64b2)
println!("cargo:rustc-env=RETH_HL_SHORT_VERSION={pkg_version}{version_suffix} ({sha_short})");
// LONG_VERSION
// The long version information for reth.
//
// - The latest version from Cargo.toml + version suffix (if any)
// - The full SHA of the latest commit
// - The build datetime
// - The build features
// - The build profile
//
// Example:
//
// ```text
// Version: 0.1.0
// Commit SHA: defa64b2
// Build Timestamp: 2023-05-19T01:47:19.815651705Z
// Build Features: jemalloc
// Build Profile: maxperf
// ```
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_0=Version: {pkg_version}{version_suffix}");
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_1=Commit SHA: {sha}");
println!(
"cargo:rustc-env=RETH_HL_LONG_VERSION_2=Build Timestamp: {}",
env::var("VERGEN_BUILD_TIMESTAMP")?
);
println!(
"cargo:rustc-env=RETH_HL_LONG_VERSION_3=Build Features: {}",
env::var("VERGEN_CARGO_FEATURES")?
);
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_4=Build Profile: {profile}");
// The version information for reth formatted for P2P (devp2p).
// - The latest version from Cargo.toml
// - The target triple
//
// Example: reth/v0.1.0-alpha.1-428a6dc2f/aarch64-apple-darwin
println!(
"cargo:rustc-env=RETH_HL_P2P_CLIENT_VERSION={}",
format_args!("reth/v{pkg_version}-{sha_short}/{}", env::var("VERGEN_CARGO_TARGET_TRIPLE")?)
);
Ok(())
}

View File

@ -2,18 +2,18 @@ use alloy_eips::BlockId;
use alloy_json_rpc::RpcObject;
use alloy_primitives::{Bytes, U256};
use alloy_rpc_types_eth::{
state::{EvmOverrides, StateOverride},
BlockOverrides,
state::{EvmOverrides, StateOverride},
};
use jsonrpsee::{
http_client::{HttpClient, HttpClientBuilder},
proc_macros::rpc,
rpc_params,
types::{error::INTERNAL_ERROR_CODE, ErrorObject},
types::{ErrorObject, error::INTERNAL_ERROR_CODE},
};
use jsonrpsee_core::{async_trait, client::ClientT, ClientError, RpcResult};
use jsonrpsee_core::{ClientError, RpcResult, async_trait, client::ClientT};
use reth_rpc::eth::EthApiTypes;
use reth_rpc_eth_api::{helpers::EthCall, RpcTxReq};
use reth_rpc_eth_api::{RpcTxReq, helpers::EthCall};
#[rpc(server, namespace = "eth")]
pub(crate) trait CallForwarderApi<TxReq: RpcObject> {

View File

@ -7,34 +7,37 @@
//! For non-system transactions, we can just return the log as is, and the client will
//! adjust the transaction index accordingly.
use alloy_consensus::{transaction::TransactionMeta, BlockHeader, TxReceipt};
use alloy_consensus::{
BlockHeader, TxReceipt,
transaction::{TransactionMeta, TxHashRef},
};
use alloy_eips::{BlockId, BlockNumberOrTag};
use alloy_json_rpc::RpcObject;
use alloy_primitives::{B256, U256};
use alloy_rpc_types::{
pubsub::{Params, SubscriptionKind},
BlockTransactions, Filter, FilterChanges, FilterId, Log, PendingTransactionFilterKind,
TransactionInfo,
pubsub::{Params, SubscriptionKind},
};
use jsonrpsee::{proc_macros::rpc, PendingSubscriptionSink, SubscriptionMessage, SubscriptionSink};
use jsonrpsee_core::{async_trait, RpcResult};
use jsonrpsee_types::{error::INTERNAL_ERROR_CODE, ErrorObject};
use jsonrpsee::{PendingSubscriptionSink, SubscriptionMessage, SubscriptionSink, proc_macros::rpc};
use jsonrpsee_core::{RpcResult, async_trait};
use jsonrpsee_types::{ErrorObject, error::INTERNAL_ERROR_CODE};
use reth::{api::FullNodeComponents, builder::rpc::RpcContext, tasks::TaskSpawner};
use reth_primitives_traits::{BlockBody as _, SignedTransaction};
use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, ReceiptProvider};
use reth_rpc::{eth::pubsub::SubscriptionSerializeError, EthFilter, EthPubSub, RpcTypes};
use reth_rpc::{EthFilter, EthPubSub, RpcTypes, eth::pubsub::SubscriptionSerializeError};
use reth_rpc_eth_api::{
helpers::{EthBlocks, EthTransactions, LoadReceipt},
transaction::ConvertReceiptInput,
EthApiServer, EthApiTypes, EthFilterApiServer, EthPubSubApiServer, FullEthApiTypes, RpcBlock,
RpcConvert, RpcHeader, RpcNodeCoreExt, RpcReceipt, RpcTransaction, RpcTxReq,
helpers::{EthBlocks, EthTransactions, LoadReceipt},
transaction::ConvertReceiptInput,
};
use serde::Serialize;
use std::{borrow::Cow, marker::PhantomData, sync::Arc};
use std::{marker::PhantomData, sync::Arc};
use tokio_stream::{Stream, StreamExt};
use tracing::{trace, Instrument};
use tracing::{Instrument, trace};
use crate::{node::primitives::HlPrimitives, HlBlock};
use crate::{HlBlock, node::primitives::HlPrimitives};
pub trait EthWrapper:
EthApiServer<
@ -182,7 +185,7 @@ impl<Eth: EthWrapper> HlSystemTransactionExt<Eth> {
};
let input = ConvertReceiptInput {
receipt: Cow::Borrowed(receipt),
receipt: receipt.clone(),
tx,
gas_used: receipt.cumulative_gas_used() - gas_used,
next_log_index,
@ -530,7 +533,7 @@ async fn adjust_block_receipts<Eth: EthWrapper>(
};
let input = ConvertReceiptInput {
receipt: Cow::Borrowed(receipt),
receipt: receipt.clone(),
tx,
gas_used: receipt.cumulative_gas_used() - gas_used,
next_log_index,
@ -576,9 +579,8 @@ async fn adjust_transaction_receipt<Eth: EthWrapper>(
fn system_tx_count_for_block<Eth: EthWrapper>(eth_api: &Eth, block_id: BlockId) -> usize {
let provider = eth_api.provider();
let block = provider.block_by_id(block_id).unwrap().unwrap();
let system_tx_count =
block.body.transactions().iter().filter(|tx| tx.is_system_transaction()).count();
system_tx_count
block.body.transactions().iter().filter(|tx| tx.is_system_transaction()).count()
}
#[async_trait]

View File

@ -2,14 +2,14 @@ use std::time::Duration;
use alloy_json_rpc::RpcObject;
use alloy_network::Ethereum;
use alloy_primitives::{Bytes, B256};
use alloy_primitives::{B256, Bytes};
use alloy_rpc_types::TransactionRequest;
use jsonrpsee::{
http_client::{HttpClient, HttpClientBuilder},
proc_macros::rpc,
types::{error::INTERNAL_ERROR_CODE, ErrorObject},
types::{ErrorObject, error::INTERNAL_ERROR_CODE},
};
use jsonrpsee_core::{async_trait, client::ClientT, ClientError, RpcResult};
use jsonrpsee_core::{ClientError, RpcResult, async_trait, client::ClientT};
use reth::rpc::{result::internal_rpc_err, server_types::eth::EthApiError};
use reth_rpc_eth_api::RpcReceipt;

View File

@ -1,5 +1,5 @@
use alloy_chains::{Chain, NamedChain};
use alloy_primitives::{b256, Address, Bytes, B256, B64, U256};
use alloy_primitives::{Address, B64, B256, Bytes, U256, b256};
use reth_chainspec::{ChainHardforks, ChainSpec, EthereumHardfork, ForkCondition, Hardfork};
use reth_primitives::{Header, SealedHeader};
use std::sync::LazyLock;

View File

@ -37,10 +37,6 @@ impl EthChainSpec for HlChainSpec {
self.inner.chain()
}
fn base_fee_params_at_block(&self, block_number: u64) -> BaseFeeParams {
self.inner.base_fee_params_at_block(block_number)
}
fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams {
self.inner.base_fee_params_at_timestamp(timestamp)
}

View File

@ -1,4 +1,4 @@
use crate::chainspec::{hl::hl_testnet, HlChainSpec};
use crate::chainspec::{HlChainSpec, hl::hl_testnet};
use super::hl::hl_mainnet;
use reth_cli::chainspec::ChainSpecParser;

View File

@ -1,4 +1,4 @@
use alloy_primitives::{BlockNumber, B256};
use alloy_primitives::{B256, BlockNumber};
use reth_provider::{BlockNumReader, ProviderError};
use std::cmp::Ordering;

View File

@ -2,8 +2,8 @@ use super::HlEvmInner;
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
use reth_revm::context::ContextTr;
use revm::{
context::Cfg, context_interface::Block, handler::instructions::EthInstructions,
interpreter::interpreter::EthInterpreter, Context, Database,
Context, Database, context::Cfg, context_interface::Block,
handler::instructions::EthInstructions, interpreter::interpreter::EthInterpreter,
};
/// Trait that allows for hl HlEvm to be built.

View File

@ -1,8 +1,8 @@
use crate::evm::{spec::HlSpecId, transaction::HlTxEnv};
use revm::{
Context, Journal, MainContext,
context::{BlockEnv, CfgEnv, TxEnv},
database_interface::EmptyDB,
Context, Journal, MainContext,
};
/// Type alias for the default context type of the HlEvm.

View File

@ -1,16 +1,16 @@
use super::HlEvmInner;
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
use revm::{
context::{result::HaltReason, ContextSetters},
context_interface::{
result::{EVMError, ExecutionResult, ResultAndState},
Cfg, ContextTr, Database, JournalTr,
},
handler::{instructions::EthInstructions, PrecompileProvider},
inspector::{InspectCommitEvm, InspectEvm, Inspector, JournalExt},
interpreter::{interpreter::EthInterpreter, InterpreterResult},
state::EvmState,
DatabaseCommit, ExecuteCommitEvm, ExecuteEvm,
context::{ContextSetters, result::HaltReason},
context_interface::{
Cfg, ContextTr, Database, JournalTr,
result::{EVMError, ExecutionResult, ResultAndState},
},
handler::{PrecompileProvider, instructions::EthInstructions},
inspector::{InspectCommitEvm, InspectEvm, Inspector, JournalExt},
interpreter::{InterpreterResult, interpreter::EthInterpreter},
state::EvmState,
};
// Type alias for HL context

View File

@ -1,15 +1,15 @@
use revm::{
Inspector,
bytecode::opcode::BLOCKHASH,
context::{ContextSetters, Evm, FrameStack},
context_interface::ContextTr,
handler::{
EthFrame, EthPrecompiles, EvmTr, FrameInitOrResult, FrameTr, PrecompileProvider,
evm::{ContextDbError, FrameInitResult},
instructions::{EthInstructions, InstructionProvider},
EthFrame, EthPrecompiles, EvmTr, FrameInitOrResult, FrameTr, PrecompileProvider,
},
inspector::{InspectorEvmTr, JournalExt},
interpreter::{interpreter::EthInterpreter, Instruction, InterpreterResult},
Inspector,
interpreter::{Instruction, InterpreterResult, interpreter::EthInterpreter},
};
use crate::chainspec::MAINNET_CHAIN_ID;

View File

@ -7,8 +7,8 @@ use alloy_primitives::keccak256;
use revm::{
context::Host,
interpreter::{
as_u64_saturated, interpreter_types::StackTr, popn_top, InstructionContext,
InterpreterTypes,
InstructionContext, InterpreterTypes, as_u64_saturated, interpreter_types::StackTr,
popn_top,
},
primitives::{BLOCK_HASH_HISTORY, U256},
};

View File

@ -7,7 +7,7 @@ use reth_primitives_traits::SignerRecoverable;
use revm::{
context::TxEnv,
context_interface::transaction::Transaction,
primitives::{Address, Bytes, TxKind, B256, U256},
primitives::{Address, B256, Bytes, TxKind, U256},
};
#[auto_impl(&, &mut, Box, Arc)]

View File

@ -2,7 +2,7 @@
use alloy_chains::{Chain, NamedChain};
use core::any::Any;
use reth_chainspec::ForkCondition;
use reth_ethereum_forks::{hardfork, ChainHardforks, EthereumHardfork, Hardfork};
use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, Hardfork, hardfork};
hardfork!(
/// The name of a hl hardfork.

View File

@ -5,5 +5,6 @@ mod evm;
mod hardforks;
pub mod node;
pub mod pseudo_peer;
pub mod version;
pub use node::primitives::{HlBlock, HlBlockBody, HlPrimitives};

View File

@ -9,11 +9,12 @@ use reth_hl::{
hl_node_compliance::install_hl_node_compliance,
tx_forwarder::{self, EthForwarderApiServer},
},
chainspec::{parser::HlChainSpecParser, HlChainSpec},
chainspec::{HlChainSpec, parser::HlChainSpecParser},
node::{
cli::{Cli, HlNodeArgs},
storage::tables::Tables,
HlNode,
cli::{Cli, HlNodeArgs},
rpc::precompile::{HlBlockPrecompileApiServer, HlBlockPrecompileExt},
storage::tables::Tables,
},
};
use tracing::info;
@ -26,17 +27,16 @@ static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
fn main() -> eyre::Result<()> {
reth_cli_util::sigsegv_handler::install();
// Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided.
if std::env::var_os("RUST_BACKTRACE").is_none() {
std::env::set_var("RUST_BACKTRACE", "1");
}
// Initialize custom version metadata before parsing CLI so --version uses reth-hl values
reth_hl::version::init_reth_hl_version();
Cli::<HlChainSpecParser, HlNodeArgs>::parse().run(
|builder: WithLaunchContext<NodeBuilder<Arc<DatabaseEnv>, HlChainSpec>>,
ext: HlNodeArgs| async move {
let default_upstream_rpc_url = builder.config().chain.official_rpc_url();
let (node, engine_handle_tx) = HlNode::new(ext.block_source_args.parse().await?);
let (node, engine_handle_tx) =
HlNode::new(ext.block_source_args.parse().await?, ext.debug_cutoff_height);
let NodeHandle { node, node_exit_future: exit_future } = builder
.node(node)
.extend_rpc_modules(move |mut ctx| {
@ -69,10 +69,14 @@ fn main() -> eyre::Result<()> {
info!("eth_getProof is disabled by default");
}
ctx.modules.merge_configured(
HlBlockPrecompileExt::new(ctx.registry.eth_api().clone()).into_rpc(),
)?;
Ok(())
})
.apply(|builder| {
builder.db().create_tables_for::<Tables>().expect("create tables");
.apply(|mut builder| {
builder.db_mut().create_tables_for::<Tables>().expect("create tables");
builder
})
.launch()

View File

@ -1,21 +1,21 @@
use crate::{
chainspec::{parser::HlChainSpecParser, HlChainSpec},
node::{consensus::HlConsensus, evm::config::HlEvmConfig, storage::tables::Tables, HlNode},
chainspec::{HlChainSpec, parser::HlChainSpecParser},
node::{HlNode, consensus::HlConsensus, evm::config::HlEvmConfig, storage::tables::Tables},
pseudo_peer::BlockSourceArgs,
};
use clap::{Args, Parser};
use reth::{
CliRunner,
args::LogArgs,
builder::{NodeBuilder, WithLaunchContext},
cli::Commands,
prometheus_exporter::install_prometheus_recorder,
version::version_metadata,
CliRunner,
};
use reth_chainspec::EthChainSpec;
use reth_cli::chainspec::ChainSpecParser;
use reth_cli_commands::{common::EnvironmentArgs, launcher::FnLauncher};
use reth_db::{init_db, mdbx::init_db_for, DatabaseEnv};
use reth_db::{DatabaseEnv, init_db, mdbx::init_db_for};
use reth_tracing::FileWorkerGuard;
use std::{
fmt::{self},
@ -35,6 +35,12 @@ pub struct HlNodeArgs {
#[command(flatten)]
pub block_source_args: BlockSourceArgs,
/// Debug cutoff height.
///
/// This option is used to cut off the block import at a specific height.
#[arg(long, env = "DEBUG_CUTOFF_HEIGHT")]
pub debug_cutoff_height: Option<u64>,
/// Upstream RPC URL to forward incoming transactions.
///
/// Default to Hyperliquid's RPC URL when not provided (https://rpc.hyperliquid.xyz/evm).
@ -130,8 +136,9 @@ where
// Install the prometheus recorder to be sure to record all metrics
let _ = install_prometheus_recorder();
let components =
|spec: Arc<C::ChainSpec>| (HlEvmConfig::new(spec.clone()), HlConsensus::new(spec));
let components = |spec: Arc<C::ChainSpec>| {
(HlEvmConfig::new(spec.clone()), Arc::new(HlConsensus::new(spec)))
};
match self.command {
Commands::Node(command) => runner.run_command_until_exit(|ctx| {
@ -151,9 +158,6 @@ where
runner.run_command_until_exit(|ctx| command.execute::<HlNode, _>(ctx, components))
}
Commands::Config(command) => runner.run_until_ctrl_c(command.execute()),
Commands::Recover(command) => {
runner.run_command_until_exit(|ctx| command.execute::<HlNode>(ctx))
}
Commands::Prune(command) => runner.run_until_ctrl_c(command.execute::<HlNode>()),
Commands::Import(command) => {
runner.run_blocking_until_ctrl_c(command.execute::<HlNode, _>(components))

View File

@ -1,9 +1,9 @@
use crate::{hardforks::HlHardforks, node::HlNode, HlBlock, HlBlockBody, HlPrimitives};
use crate::{HlBlock, HlBlockBody, HlPrimitives, hardforks::HlHardforks, node::HlNode};
use alloy_consensus::Header;
use reth::{
api::FullNodeTypes,
api::{FullNodeTypes, NodeTypes},
beacon_consensus::EthBeaconConsensus,
builder::{components::ConsensusBuilder, BuilderContext},
builder::{BuilderContext, components::ConsensusBuilder},
consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator},
consensus_common::validation::{
validate_against_parent_4844, validate_against_parent_hash_number,
@ -24,7 +24,7 @@ impl<Node> ConsensusBuilder<Node> for HlConsensusBuilder
where
Node: FullNodeTypes<Types = HlNode>,
{
type Consensus = Arc<dyn FullConsensus<HlPrimitives, Error = ConsensusError>>;
type Consensus = Arc<HlConsensus<<Node::Types as NodeTypes>::ChainSpec>>;
async fn build_consensus(self, ctx: &BuilderContext<Node>) -> eyre::Result<Self::Consensus> {
Ok(Arc::new(HlConsensus::new(ctx.chain_spec())))

View File

@ -1,11 +1,11 @@
//! Copy of reth codebase.
use alloy_consensus::{proofs::calculate_receipt_root, BlockHeader, TxReceipt};
use alloy_consensus::{BlockHeader, TxReceipt, proofs::calculate_receipt_root};
use alloy_eips::eip7685::Requests;
use alloy_primitives::{Bloom, B256};
use alloy_primitives::{B256, Bloom};
use reth::consensus::ConsensusError;
use reth_chainspec::EthereumHardforks;
use reth_primitives::{gas_spent_by_transactions, GotExpected, RecoveredBlock};
use reth_primitives::{GotExpected, RecoveredBlock, gas_spent_by_transactions};
use reth_primitives_traits::{Block, Receipt as ReceiptTrait};
pub fn validate_block_post_execution<B, R, ChainSpec>(

View File

@ -1,6 +1,6 @@
use crate::{
node::evm::config::{HlBlockExecutorFactory, HlEvmConfig},
HlBlock,
node::evm::config::{HlBlockExecutorFactory, HlEvmConfig},
};
use alloy_consensus::Header;
use reth_evm::{

View File

@ -1,5 +1,6 @@
use super::{executor::HlBlockExecutor, factory::HlEvmFactory};
use crate::{
HlBlock, HlBlockBody, HlPrimitives,
chainspec::HlChainSpec,
evm::{spec::HlSpecId, transaction::HlTxEnv},
hardforks::HlHardforks,
@ -9,31 +10,30 @@ use crate::{
rpc::engine_api::validator::HlExecutionData,
types::HlExtras,
},
HlBlock, HlBlockBody, HlPrimitives,
};
use alloy_consensus::{BlockHeader, Header, Transaction as _, TxReceipt, EMPTY_OMMER_ROOT_HASH};
use alloy_eips::{merge::BEACON_NONCE, Encodable2718};
use alloy_consensus::{BlockHeader, EMPTY_OMMER_ROOT_HASH, Header, Transaction as _, TxReceipt};
use alloy_eips::{Encodable2718, merge::BEACON_NONCE};
use alloy_primitives::{Log, U256};
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
use reth_evm::{
block::{BlockExecutionError, BlockExecutorFactory, BlockExecutorFor},
eth::{receipt_builder::ReceiptBuilder, EthBlockExecutionCtx},
execute::{BlockAssembler, BlockAssemblerInput},
precompiles::PrecompilesMap,
ConfigureEngineEvm, ConfigureEvm, EvmEnv, EvmEnvFor, EvmFactory, ExecutableTxIterator,
ExecutionCtxFor, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, NextBlockEnvAttributes,
block::{BlockExecutionError, BlockExecutorFactory, BlockExecutorFor},
eth::{EthBlockExecutionCtx, receipt_builder::ReceiptBuilder},
execute::{BlockAssembler, BlockAssemblerInput},
precompiles::PrecompilesMap,
};
use reth_evm_ethereum::EthBlockAssembler;
use reth_payload_primitives::NewPayloadError;
use reth_primitives::{logs_bloom, BlockTy, HeaderTy, Receipt, SealedBlock, SealedHeader};
use reth_primitives_traits::{proofs, SignerRecoverable, WithEncoded};
use reth_primitives::{BlockTy, HeaderTy, Receipt, SealedBlock, SealedHeader, logs_bloom};
use reth_primitives_traits::{SignerRecoverable, WithEncoded, proofs};
use reth_provider::BlockExecutionResult;
use reth_revm::State;
use revm::{
Inspector,
context::{BlockEnv, CfgEnv, TxEnv},
context_interface::block::BlobExcessGasAndPrice,
primitives::hardfork::SpecId,
Inspector,
};
use std::{borrow::Cow, convert::Infallible, sync::Arc};
@ -106,7 +106,10 @@ where
} else {
// for the first post-fork block, both parent.blob_gas_used and
// parent.excess_blob_gas are evaluated as 0
Some(alloy_eips::eip7840::BlobParams::cancun().next_block_excess_blob_gas(0, 0))
Some(
alloy_eips::eip7840::BlobParams::cancun()
.next_block_excess_blob_gas_osaka(0, 0, 0),
)
};
}
@ -284,7 +287,7 @@ where
self
}
fn evm_env(&self, header: &Header) -> EvmEnv<HlSpecId> {
fn evm_env(&self, header: &Header) -> Result<EvmEnv<HlSpecId>, Self::Error> {
let blob_params = self.chain_spec().blob_params_at_timestamp(header.timestamp);
let spec = revm_spec_by_timestamp_and_block_number(
self.chain_spec().clone(),
@ -324,7 +327,7 @@ where
blob_excess_gas_and_price,
};
EvmEnv { cfg_env, block_env }
Ok(EvmEnv { cfg_env, block_env })
}
fn next_evm_env(
@ -373,9 +376,9 @@ where
fn context_for_block<'a>(
&self,
block: &'a SealedBlock<BlockTy<Self::Primitives>>,
) -> ExecutionCtxFor<'a, Self> {
) -> Result<ExecutionCtxFor<'a, Self>, Self::Error> {
let block_body = block.body();
HlBlockExecutionCtx {
Ok(HlBlockExecutionCtx {
ctx: EthBlockExecutionCtx {
parent_hash: block.header().parent_hash,
parent_beacon_block_root: block.header().parent_beacon_block_root,
@ -386,15 +389,15 @@ where
read_precompile_calls: block_body.read_precompile_calls.clone(),
highest_precompile_address: block_body.highest_precompile_address,
},
}
})
}
fn context_for_next_block(
&self,
parent: &SealedHeader<HeaderTy<Self::Primitives>>,
attributes: Self::NextBlockEnvCtx,
) -> ExecutionCtxFor<'_, Self> {
HlBlockExecutionCtx {
) -> Result<ExecutionCtxFor<'_, Self>, Self::Error> {
Ok(HlBlockExecutionCtx {
ctx: EthBlockExecutionCtx {
parent_hash: parent.hash(),
parent_beacon_block_root: attributes.parent_beacon_block_root,
@ -402,13 +405,13 @@ where
withdrawals: attributes.withdrawals.map(Cow::Owned),
},
extras: HlExtras::default(), // TODO: hacky, double check if this is correct
}
})
}
}
impl ConfigureEngineEvm<HlExecutionData> for HlEvmConfig {
fn evm_env_for_payload(&self, payload: &HlExecutionData) -> EvmEnvFor<Self> {
self.evm_env(&payload.0.header)
self.evm_env(&payload.0.header).unwrap()
}
fn context_for_payload<'a>(&self, payload: &'a HlExecutionData) -> ExecutionCtxFor<'a, Self> {

View File

@ -8,29 +8,26 @@ use crate::{
},
};
use alloy_consensus::{Transaction, TxReceipt};
use alloy_eips::{eip7685::Requests, Encodable2718};
use alloy_eips::{Encodable2718, eip7685::Requests};
use alloy_evm::{block::ExecutableTx, eth::receipt_builder::ReceiptBuilderCtx};
use alloy_primitives::{address, hex, Address, Bytes, U160, U256};
use alloy_primitives::{Address, Bytes, U160, U256, address, hex};
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
use reth_evm::{
block::{BlockValidationError, CommitChanges},
Database, Evm, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, OnStateHook,
block::BlockValidationError,
eth::receipt_builder::ReceiptBuilder,
execute::{BlockExecutionError, BlockExecutor},
precompiles::{DynPrecompile, PrecompileInput, PrecompilesMap},
Database, Evm, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, OnStateHook,
};
use reth_provider::BlockExecutionResult;
use reth_revm::State;
use revm::{
context::{
result::{ExecutionResult, ResultAndState},
TxEnv,
},
DatabaseCommit,
context::{TxEnv, result::ResultAndState},
interpreter::instructions::utility::IntoU256,
precompile::{PrecompileError, PrecompileOutput, PrecompileResult},
primitives::HashMap,
state::Bytecode,
DatabaseCommit,
};
pub fn is_system_transaction(tx: &TransactionSigned) -> bool {
@ -110,7 +107,9 @@ where
const COREWRITER_ENABLED_BLOCK_NUMBER: u64 = 7578300;
const COREWRITER_CONTRACT_ADDRESS: Address =
address!("0x3333333333333333333333333333333333333333");
const COREWRITER_CODE: &[u8] = &hex!("608060405234801561000f575f5ffd5b5060043610610029575f3560e01c806317938e131461002d575b5f5ffd5b61004760048036038101906100429190610123565b610049565b005b5f5f90505b61019081101561006557808060010191505061004e565b503373ffffffffffffffffffffffffffffffffffffffff167f8c7f585fb295f7eb1e6aeb8fba61b23a4fe60beda405f0045073b185c74412e383836040516100ae9291906101c8565b60405180910390a25050565b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5f83601f8401126100e3576100e26100c2565b5b8235905067ffffffffffffffff811115610100576100ff6100c6565b5b60208301915083600182028301111561011c5761011b6100ca565b5b9250929050565b5f5f60208385031215610139576101386100ba565b5b5f83013567ffffffffffffffff811115610156576101556100be565b5b610162858286016100ce565b92509250509250929050565b5f82825260208201905092915050565b828183375f83830152505050565b5f601f19601f8301169050919050565b5f6101a7838561016e565b93506101b483858461017e565b6101bd8361018c565b840190509392505050565b5f6020820190508181035f8301526101e181848661019c565b9050939250505056fea2646970667358221220f01517e1fbaff8af4bd72cb063cccecbacbb00b07354eea7dd52265d355474fb64736f6c634300081c0033");
const COREWRITER_CODE: &[u8] = &hex!(
"608060405234801561000f575f5ffd5b5060043610610029575f3560e01c806317938e131461002d575b5f5ffd5b61004760048036038101906100429190610123565b610049565b005b5f5f90505b61019081101561006557808060010191505061004e565b503373ffffffffffffffffffffffffffffffffffffffff167f8c7f585fb295f7eb1e6aeb8fba61b23a4fe60beda405f0045073b185c74412e383836040516100ae9291906101c8565b60405180910390a25050565b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5f83601f8401126100e3576100e26100c2565b5b8235905067ffffffffffffffff811115610100576100ff6100c6565b5b60208301915083600182028301111561011c5761011b6100ca565b5b9250929050565b5f5f60208385031215610139576101386100ba565b5b5f83013567ffffffffffffffff811115610156576101556100be565b5b610162858286016100ce565b92509250509250929050565b5f82825260208201905092915050565b828183375f83830152505050565b5f601f19601f8301169050919050565b5f6101a7838561016e565b93506101b483858461017e565b6101bd8361018c565b840190509392505050565b5f6020820190508181035f8301526101e181848661019c565b9050939250505056fea2646970667358221220f01517e1fbaff8af4bd72cb063cccecbacbb00b07354eea7dd52265d355474fb64736f6c634300081c0033"
);
if self.evm.block().number != U256::from(COREWRITER_ENABLED_BLOCK_NUMBER) {
return Ok(());
@ -161,11 +160,10 @@ where
Ok(())
}
fn execute_transaction_with_commit_condition(
fn execute_transaction_without_commit(
&mut self,
tx: impl ExecutableTx<Self>,
f: impl FnOnce(&ExecutionResult<<Self::Evm as Evm>::HaltReason>) -> CommitChanges,
) -> Result<Option<u64>, BlockExecutionError> {
) -> Result<ResultAndState<<Self::Evm as Evm>::HaltReason>, BlockExecutionError> {
// The sum of the transaction's gas limit, Tg, and the gas utilized in this block prior,
// must be no greater than the block's gasLimit.
let block_available_gas = self.evm.block().gas_limit - self.gas_used;
@ -178,16 +176,20 @@ where
.into());
}
// Execute transaction.
let ResultAndState { result, mut state } = self
.evm
.transact(&tx)
.map_err(|err| BlockExecutionError::evm(err, tx.tx().trie_hash()))?;
if !f(&result).should_commit() {
return Ok(None);
// Execute transaction and return the result
self.evm.transact(&tx).map_err(|err| {
let hash = tx.tx().trie_hash();
BlockExecutionError::evm(err, hash)
})
}
fn commit_transaction(
&mut self,
output: ResultAndState<<Self::Evm as Evm>::HaltReason>,
tx: impl ExecutableTx<Self>,
) -> Result<u64, BlockExecutionError> {
let ResultAndState { result, mut state } = output;
let gas_used = result.gas_used();
// append gas used
@ -215,7 +217,7 @@ where
// Commit the state changes.
self.evm.db_mut().commit(state);
Ok(Some(gas_used))
Ok(gas_used)
}
fn finish(self) -> Result<(Self::Evm, BlockExecutionResult<R::Receipt>), BlockExecutionError> {

View File

@ -7,16 +7,16 @@ use crate::evm::{
spec::HlSpecId,
transaction::HlTxEnv,
};
use reth_evm::{precompiles::PrecompilesMap, Database, EvmEnv, EvmFactory};
use reth_evm::{Database, EvmEnv, EvmFactory, precompiles::PrecompilesMap};
use reth_revm::Context;
use revm::{
Inspector,
context::{
result::{EVMError, HaltReason},
TxEnv,
result::{EVMError, HaltReason},
},
inspector::NoOpInspector,
precompile::{PrecompileSpecId, Precompiles},
Inspector,
};
/// Factory producing [`HlEvm`].

View File

@ -1,6 +1,6 @@
use crate::{
evm::{
api::{ctx::HlContext, HlEvmInner},
api::{HlEvmInner, ctx::HlContext},
spec::HlSpecId,
transaction::HlTxEnv,
},
@ -10,18 +10,18 @@ use alloy_primitives::{Address, Bytes};
use config::HlEvmConfig;
use reth::{
api::FullNodeTypes,
builder::{components::ExecutorBuilder, BuilderContext},
builder::{BuilderContext, components::ExecutorBuilder},
};
use reth_evm::{Database, Evm, EvmEnv};
use revm::{
context::{
result::{EVMError, ExecutionResult, HaltReason, Output, ResultAndState, SuccessReason},
BlockEnv, TxEnv,
},
handler::{instructions::EthInstructions, EthPrecompiles, PrecompileProvider},
interpreter::{interpreter::EthInterpreter, InterpreterResult},
state::EvmState,
Context, ExecuteEvm, InspectEvm, Inspector,
context::{
BlockEnv, TxEnv,
result::{EVMError, ExecutionResult, HaltReason, Output, ResultAndState, SuccessReason},
},
handler::{EthPrecompiles, PrecompileProvider, instructions::EthInstructions},
interpreter::{InterpreterResult, interpreter::EthInterpreter},
state::EvmState,
};
use std::ops::{Deref, DerefMut};
@ -98,11 +98,7 @@ where
&mut self,
tx: Self::Tx,
) -> Result<ResultAndState<Self::HaltReason>, Self::Error> {
if self.inspect {
self.inner.inspect_tx(tx)
} else {
self.inner.transact(tx)
}
if self.inspect { self.inner.inspect_tx(tx) } else { self.inner.transact(tx) }
}
fn transact_system_call(

View File

@ -1,4 +1,4 @@
use alloy_primitives::{address, Address};
use alloy_primitives::{Address, address};
use reth_evm::block::BlockExecutionError;
use revm::{primitives::HashMap, state::Account};

View File

@ -1,5 +1,6 @@
use crate::node::primitives::TransactionSigned;
use alloy_evm::eth::receipt_builder::{ReceiptBuilder, ReceiptBuilderCtx};
use reth_codecs::alloy::transaction::Envelope;
use reth_evm::Evm;
use reth_primitives::Receipt;

View File

@ -4,11 +4,11 @@ use crate::{
pool::HlPoolBuilder,
primitives::{HlBlock, HlPrimitives},
rpc::{
HlEthApiBuilder,
engine_api::{
builder::HlEngineApiBuilder, payload::HlPayloadTypes,
validator::HlPayloadValidatorBuilder,
},
HlEthApiBuilder,
},
storage::HlStorage,
},
@ -20,14 +20,14 @@ use network::HlNetworkBuilder;
use reth::{
api::{FullNodeTypes, NodeTypes},
builder::{
Node, NodeAdapter,
components::{ComponentsBuilder, NoopPayloadServiceBuilder},
rpc::RpcAddOns,
Node, NodeAdapter,
},
};
use reth_engine_primitives::ConsensusEngineHandle;
use std::{marker::PhantomData, sync::Arc};
use tokio::sync::{oneshot, Mutex};
use tokio::sync::{Mutex, oneshot};
pub mod cli;
pub mod consensus;
@ -49,14 +49,23 @@ pub type HlNodeAddOns<N> =
pub struct HlNode {
engine_handle_rx: Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
block_source_config: BlockSourceConfig,
debug_cutoff_height: Option<u64>,
}
impl HlNode {
pub fn new(
block_source_config: BlockSourceConfig,
debug_cutoff_height: Option<u64>,
) -> (Self, oneshot::Sender<ConsensusEngineHandle<HlPayloadTypes>>) {
let (tx, rx) = oneshot::channel();
(Self { engine_handle_rx: Arc::new(Mutex::new(Some(rx))), block_source_config }, tx)
(
Self {
engine_handle_rx: Arc::new(Mutex::new(Some(rx))),
block_source_config,
debug_cutoff_height,
},
tx,
)
}
}
@ -84,6 +93,7 @@ impl HlNode {
.network(HlNetworkBuilder {
engine_handle_rx: self.engine_handle_rx.clone(),
block_source_config: self.block_source_config.clone(),
debug_cutoff_height: self.debug_cutoff_height,
})
.consensus(HlConsensusBuilder::default())
}

View File

@ -8,7 +8,7 @@ use reth_primitives::NodePrimitives;
use service::{BlockMsg, ImportEvent, Outcome};
use std::{
fmt,
task::{ready, Context, Poll},
task::{Context, Poll, ready},
};
use crate::node::network::HlNewBlock;

View File

@ -1,17 +1,17 @@
use super::handle::ImportHandle;
use crate::{
HlBlock, HlBlockBody,
consensus::HlConsensus,
node::{
network::HlNewBlock,
rpc::engine_api::payload::HlPayloadTypes,
types::{BlockAndReceipts, EvmBlock},
},
HlBlock, HlBlockBody,
};
use alloy_consensus::{BlockBody, Header};
use alloy_primitives::U128;
use alloy_rpc_types::engine::{ForkchoiceState, PayloadStatusEnum};
use futures::{future::Either, stream::FuturesUnordered, StreamExt};
use futures::{StreamExt, future::Either, stream::FuturesUnordered};
use reth_engine_primitives::{ConsensusEngineHandle, EngineTypes};
use reth_eth_wire::NewBlock;
use reth_network::{

View File

@ -1,20 +1,20 @@
#![allow(clippy::owned_cow)]
use crate::{
HlBlock,
consensus::HlConsensus,
node::{
network::block_import::{handle::ImportHandle, service::ImportService, HlBlockImport},
HlNode,
network::block_import::{HlBlockImport, handle::ImportHandle, service::ImportService},
primitives::HlPrimitives,
rpc::engine_api::payload::HlPayloadTypes,
types::ReadPrecompileCalls,
HlNode,
},
pseudo_peer::{start_pseudo_peer, BlockSourceConfig},
HlBlock,
pseudo_peer::{BlockSourceConfig, start_pseudo_peer},
};
use alloy_rlp::{Decodable, Encodable};
use reth::{
api::{FullNodeTypes, TxTy},
builder::{components::NetworkBuilder, BuilderContext},
builder::{BuilderContext, components::NetworkBuilder},
transaction_pool::{PoolTransaction, TransactionPool},
};
use reth_discv4::NodeRecord;
@ -26,7 +26,7 @@ use reth_network_api::PeersInfo;
use reth_provider::StageCheckpointReader;
use reth_stages_types::StageId;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot, Mutex};
use tokio::sync::{Mutex, mpsc, oneshot};
use tracing::info;
pub mod block_import;
@ -38,8 +38,8 @@ pub struct HlNewBlock(pub NewBlock<HlBlock>);
mod rlp {
use super::*;
use crate::{
node::primitives::{BlockBody, TransactionSigned},
HlBlockBody,
node::primitives::{BlockBody, TransactionSigned},
};
use alloy_consensus::{BlobTransactionSidecar, Header};
use alloy_primitives::{Address, U128};
@ -142,6 +142,8 @@ pub struct HlNetworkBuilder {
Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
pub(crate) block_source_config: BlockSourceConfig,
pub(crate) debug_cutoff_height: Option<u64>,
}
impl HlNetworkBuilder {
@ -203,6 +205,7 @@ where
pool: Pool,
) -> eyre::Result<Self::Network> {
let block_source_config = self.block_source_config.clone();
let debug_cutoff_height = self.debug_cutoff_height;
let handle =
ctx.start_network(NetworkManager::builder(self.network_config(ctx)?).await?, pool);
let local_node_record = handle.local_node_record();
@ -223,6 +226,7 @@ where
block_source_config
.create_cached_block_source((*chain_spec).clone(), next_block_number)
.await,
debug_cutoff_height,
)
.await
.unwrap();

View File

@ -6,12 +6,12 @@
//! Ethereum transaction pool only supports TransactionSigned (EthereumTxEnvelope<TxEip4844>),
//! hence this placeholder for the transaction pool.
use crate::node::{primitives::TransactionSigned, HlNode};
use crate::node::{HlNode, primitives::TransactionSigned};
use alloy_consensus::{
error::ValueError, EthereumTxEnvelope, Transaction as TransactionTrait, TxEip4844,
EthereumTxEnvelope, Transaction as TransactionTrait, TxEip4844, error::ValueError,
};
use alloy_eips::{eip7702::SignedAuthorization, Typed2718};
use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256};
use alloy_eips::{Typed2718, eip7702::SignedAuthorization};
use alloy_primitives::{Address, B256, Bytes, ChainId, TxHash, TxKind, U256};
use alloy_rpc_types::AccessList;
use reth::{
api::FullNodeTypes, builder::components::PoolBuilder, transaction_pool::PoolTransaction,
@ -19,7 +19,7 @@ use reth::{
use reth_ethereum_primitives::PooledTransactionVariant;
use reth_primitives::Recovered;
use reth_primitives_traits::InMemorySize;
use reth_transaction_pool::{noop::NoopTransactionPool, EthPoolTransaction};
use reth_transaction_pool::{EthPoolTransaction, noop::NoopTransactionPool};
use std::sync::Arc;
pub struct HlPoolBuilder;

View File

@ -1,33 +1,35 @@
//! HlNodePrimitives::TransactionSigned; it's the same as ethereum transaction type,
//! except that it supports pseudo signer for system transactions.
use std::convert::Infallible;
use crate::evm::transaction::HlTxEnv;
use alloy_consensus::{
crypto::RecoveryError, error::ValueError, EthereumTxEnvelope, EthereumTypedTransaction,
SignableTransaction, Signed, Transaction as TransactionTrait, TransactionEnvelope, TxEip1559,
TxEip2930, TxEip4844, TxEip4844WithSidecar, TxEip7702, TxLegacy, TxType, TypedTransaction,
TxEip2930, TxEip4844, TxEip7702, TxLegacy, TxType, TypedTransaction, crypto::RecoveryError,
error::ValueError, transaction::TxHashRef,
};
use alloy_eips::{eip7594::BlobTransactionSidecarVariant, Encodable2718};
use alloy_eips::Encodable2718;
use alloy_network::TxSigner;
use alloy_primitives::{address, Address, TxHash, U256};
use alloy_primitives::{Address, TxHash, U256, address};
use alloy_rpc_types::{Transaction, TransactionInfo, TransactionRequest};
use alloy_signer::Signature;
use reth_codecs::alloy::transaction::FromTxCompact;
use reth_codecs::alloy::transaction::{Envelope, FromTxCompact};
use reth_db::{
table::{Compress, Decompress},
DatabaseError,
table::{Compress, Decompress},
};
use reth_ethereum_primitives::PooledTransactionVariant;
use reth_evm::FromRecoveredTx;
use reth_primitives::Recovered;
use reth_primitives_traits::{
serde_bincode_compat::SerdeBincodeCompat, InMemorySize, SignedTransaction, SignerRecoverable,
InMemorySize, SignedTransaction, SignerRecoverable, serde_bincode_compat::SerdeBincodeCompat,
};
use reth_rpc_eth_api::{
transaction::{FromConsensusTx, TryIntoTxEnv},
EthTxEnvError, SignTxRequestError, SignableTxRequest, TryIntoSimTx,
transaction::{FromConsensusTx, TryIntoTxEnv},
};
use revm::context::{BlockEnv, CfgEnv, TxEnv};
use crate::evm::transaction::HlTxEnv;
type InnerType = alloy_consensus::EthereumTxEnvelope<TxEip4844>;
#[derive(Debug, Clone, TransactionEnvelope)]
@ -46,6 +48,12 @@ fn s_to_address(s: U256) -> Address {
Address::from_slice(&buf)
}
impl TxHashRef for TransactionSigned {
fn tx_hash(&self) -> &TxHash {
self.inner().tx_hash()
}
}
impl SignerRecoverable for TransactionSigned {
fn recover_signer(&self) -> Result<Address, RecoveryError> {
if self.is_system_transaction() {
@ -69,11 +77,7 @@ impl SignerRecoverable for TransactionSigned {
}
}
impl SignedTransaction for TransactionSigned {
fn tx_hash(&self) -> &TxHash {
self.inner().tx_hash()
}
}
impl SignedTransaction for TransactionSigned {}
// ------------------------------------------------------------
// NOTE: All lines below are just wrappers for the inner type.
@ -157,16 +161,8 @@ impl TransactionSigned {
}
}
pub fn signature(&self) -> &Signature {
self.inner().signature()
}
pub const fn tx_type(&self) -> TxType {
self.inner().tx_type()
}
pub fn is_system_transaction(&self) -> bool {
self.gas_price().is_some() && self.gas_price().unwrap() == 0
matches!(self.gas_price(), Some(0))
}
}
@ -187,24 +183,16 @@ impl SerdeBincodeCompat for TransactionSigned {
pub type BlockBody = alloy_consensus::BlockBody<TransactionSigned>;
impl TryFrom<TransactionSigned>
for EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>
{
type Error = <InnerType as TryInto<
EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>,
>>::Error;
impl TryFrom<TransactionSigned> for PooledTransactionVariant {
type Error = <InnerType as TryInto<PooledTransactionVariant>>::Error;
fn try_from(value: TransactionSigned) -> Result<Self, Self::Error> {
value.into_inner().try_into()
}
}
impl From<EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>>
for TransactionSigned
{
fn from(
value: EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>,
) -> Self {
impl From<PooledTransactionVariant> for TransactionSigned {
fn from(value: PooledTransactionVariant) -> Self {
Self::Default(value.into())
}
}
@ -266,9 +254,17 @@ impl TryIntoTxEnv<HlTxEnv<TxEnv>> for TransactionRequest {
impl FromConsensusTx<TransactionSigned> for Transaction {
type TxInfo = TransactionInfo;
type Err = Infallible;
fn from_consensus_tx(tx: TransactionSigned, signer: Address, tx_info: Self::TxInfo) -> Self {
Self::from_transaction(Recovered::new_unchecked(tx.into_inner().into(), signer), tx_info)
fn from_consensus_tx(
tx: TransactionSigned,
signer: Address,
tx_info: Self::TxInfo,
) -> Result<Self, Self::Err> {
Ok(Self::from_transaction(
Recovered::new_unchecked(tx.into_inner().into(), signer),
tx_info,
))
}
}
@ -277,26 +273,7 @@ impl SignableTxRequest<TransactionSigned> for TransactionRequest {
self,
signer: impl TxSigner<Signature> + Send,
) -> Result<TransactionSigned, SignTxRequestError> {
let mut tx =
self.build_typed_tx().map_err(|_| SignTxRequestError::InvalidTransactionRequest)?;
let signature = signer.sign_transaction(&mut tx).await?;
let signed = match tx {
EthereumTypedTransaction::Legacy(tx) => {
EthereumTxEnvelope::Legacy(tx.into_signed(signature))
}
EthereumTypedTransaction::Eip2930(tx) => {
EthereumTxEnvelope::Eip2930(tx.into_signed(signature))
}
EthereumTypedTransaction::Eip1559(tx) => {
EthereumTxEnvelope::Eip1559(tx.into_signed(signature))
}
EthereumTypedTransaction::Eip4844(tx) => {
EthereumTxEnvelope::Eip4844(TxEip4844::from(tx).into_signed(signature))
}
EthereumTypedTransaction::Eip7702(tx) => {
EthereumTxEnvelope::Eip7702(tx.into_signed(signature))
}
};
let signed = SignableTxRequest::<InnerType>::try_build_and_sign(self, signer).await?;
Ok(TransactionSigned::Default(signed))
}
}

View File

@ -1,12 +1,12 @@
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
use reth::rpc::server_types::eth::{
builder::config::PendingBlockKind, error::FromEvmError, EthApiError, PendingBlock,
EthApiError, PendingBlock, builder::config::PendingBlockKind, error::FromEvmError,
};
use reth_rpc_eth_api::{
helpers::{
pending_block::PendingEnvBuilder, EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt,
},
RpcConvert,
helpers::{
EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, pending_block::PendingEnvBuilder,
},
};
impl<N, Rpc> EthBlocks for HlEthApi<N, Rpc>
@ -29,7 +29,7 @@ impl<N, Rpc> LoadPendingBlock for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm>,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
Rpc: RpcConvert<Primitives = N::Primitives>,
{
#[inline]
fn pending_block(&self) -> &tokio::sync::Mutex<Option<PendingBlock<N::Primitives>>> {
@ -50,7 +50,6 @@ where
impl<N, Rpc> LoadReceipt for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm>,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
{
}

View File

@ -1,17 +1,19 @@
use core::fmt;
use super::{HlEthApi, HlRpcNodeCore};
use crate::{node::evm::apply_precompiles, HlBlock};
use crate::{HlBlock, node::evm::apply_precompiles};
use alloy_consensus::transaction::TxHashRef;
use alloy_evm::Evm;
use alloy_primitives::B256;
use reth::rpc::server_types::eth::EthApiError;
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, TxEnvFor};
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, HaltReasonFor, InspectorFor, SpecFor, TxEnvFor};
use reth_primitives::{NodePrimitives, Recovered};
use reth_primitives_traits::SignedTransaction;
use reth_provider::{ProviderError, ProviderTx};
use reth_rpc_eth_api::{
helpers::{estimate::EstimateCall, Call, EthCall},
FromEvmError, RpcConvert, RpcNodeCore,
helpers::{Call, EthCall},
};
use revm::DatabaseCommit;
use revm::{DatabaseCommit, context::result::ResultAndState};
impl<N> HlRpcNodeCore for N where N: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
@ -19,15 +21,12 @@ impl<N, Rpc> EthCall for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm>,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
{
}
impl<N, Rpc> EstimateCall for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm>,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
Rpc: RpcConvert<
Primitives = N::Primitives,
Error = EthApiError,
TxEnv = TxEnvFor<N::Evm>,
Spec = SpecFor<N::Evm>,
>,
{
}
@ -35,7 +34,12 @@ impl<N, Rpc> Call for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm>,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
Rpc: RpcConvert<
Primitives = N::Primitives,
Error = EthApiError,
TxEnv = TxEnvFor<N::Evm>,
Spec = SpecFor<N::Evm>,
>,
{
#[inline]
fn call_gas_limit(&self) -> u64 {
@ -47,6 +51,46 @@ where
self.inner.eth_api.max_simulate_blocks()
}
fn transact<DB>(
&self,
db: DB,
evm_env: EvmEnvFor<Self::Evm>,
tx_env: TxEnvFor<Self::Evm>,
) -> Result<ResultAndState<HaltReasonFor<Self::Evm>>, Self::Error>
where
DB: Database<Error = ProviderError> + fmt::Debug,
{
let block_number = evm_env.block_env().number;
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
let mut evm = self.evm_config().evm_with_env(db, evm_env);
apply_precompiles(&mut evm, &hl_extras);
let res = evm.transact(tx_env).map_err(Self::Error::from_evm_err)?;
Ok(res)
}
fn transact_with_inspector<DB, I>(
&self,
db: DB,
evm_env: EvmEnvFor<Self::Evm>,
tx_env: TxEnvFor<Self::Evm>,
inspector: I,
) -> Result<ResultAndState<HaltReasonFor<Self::Evm>>, Self::Error>
where
DB: Database<Error = ProviderError> + fmt::Debug,
I: InspectorFor<Self::Evm, DB>,
{
let block_number = evm_env.block_env().number;
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
let mut evm = self.evm_config().evm_with_env_and_inspector(db, evm_env, inspector);
apply_precompiles(&mut evm, &hl_extras);
let res = evm.transact(tx_env).map_err(Self::Error::from_evm_err)?;
Ok(res)
}
fn replay_transactions_until<'a, DB, I>(
&self,
db: &mut DB,
@ -59,7 +103,7 @@ where
I: IntoIterator<Item = Recovered<&'a ProviderTx<Self::Provider>>>,
{
let block_number = evm_env.block_env().number;
let hl_extras = self.get_hl_extras(block_number.try_into().unwrap())?;
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
let mut evm = self.evm_config().evm_with_env(db, evm_env);
apply_precompiles(&mut evm, &hl_extras);

View File

@ -9,7 +9,7 @@ use alloy_primitives::B256;
use alloy_rpc_types_engine::PayloadError;
use reth::{
api::{FullNodeComponents, NodeTypes},
builder::{rpc::PayloadValidatorBuilder, AddOnsContext},
builder::{AddOnsContext, rpc::PayloadValidatorBuilder},
};
use reth_engine_primitives::{ExecutionPayload, PayloadValidator};
use reth_payload_primitives::NewPayloadError;

212
src/node/rpc/estimate.rs Normal file
View File

@ -0,0 +1,212 @@
use super::{HlEthApi, HlRpcNodeCore, apply_precompiles};
use alloy_evm::overrides::{StateOverrideError, apply_state_overrides};
use alloy_network::TransactionBuilder;
use alloy_primitives::{TxKind, U256};
use alloy_rpc_types_eth::state::StateOverride;
use reth_chainspec::MIN_TRANSACTION_GAS;
use reth_errors::ProviderError;
use reth_evm::{ConfigureEvm, Evm, EvmEnvFor, SpecFor, TransactionEnv, TxEnvFor};
use reth_revm::{database::StateProviderDatabase, db::CacheDB};
use reth_rpc_convert::{RpcConvert, RpcTxReq};
use reth_rpc_eth_api::{
AsEthApiError, IntoEthApiError, RpcNodeCore,
helpers::{
Call,
estimate::{EstimateCall, update_estimated_gas_range},
},
};
use reth_rpc_eth_types::{
EthApiError, RevertError, RpcInvalidTransactionError,
error::{FromEvmError, api::FromEvmHalt},
};
use reth_rpc_server_types::constants::gas_oracle::{CALL_STIPEND_GAS, ESTIMATE_GAS_ERROR_RATIO};
use reth_storage_api::StateProvider;
use revm::context_interface::{Transaction, result::ExecutionResult};
use tracing::trace;
impl<N, Rpc> EstimateCall for HlEthApi<N, Rpc>
where
Self: Call,
N: HlRpcNodeCore,
EthApiError: FromEvmError<N::Evm> + From<StateOverrideError<ProviderError>>,
Rpc: RpcConvert<
Primitives = N::Primitives,
Error = EthApiError,
TxEnv = TxEnvFor<N::Evm>,
Spec = SpecFor<N::Evm>,
>,
{
// Modified version that adds `apply_precompiles`; comments are stripped out.
fn estimate_gas_with<S>(
&self,
mut evm_env: EvmEnvFor<Self::Evm>,
mut request: RpcTxReq<<Self::RpcConvert as RpcConvert>::Network>,
state: S,
state_override: Option<StateOverride>,
) -> Result<U256, Self::Error>
where
S: StateProvider,
{
evm_env.cfg_env.disable_eip3607 = true;
evm_env.cfg_env.disable_base_fee = true;
request.as_mut().take_nonce();
let tx_request_gas_limit = request.as_ref().gas_limit();
let tx_request_gas_price = request.as_ref().gas_price();
let max_gas_limit = evm_env
.cfg_env
.tx_gas_limit_cap
.map_or(evm_env.block_env.gas_limit, |cap| cap.min(evm_env.block_env.gas_limit));
let mut highest_gas_limit = tx_request_gas_limit
.map(|mut tx_gas_limit| {
if max_gas_limit < tx_gas_limit {
tx_gas_limit = max_gas_limit;
}
tx_gas_limit
})
.unwrap_or(max_gas_limit);
let mut db = CacheDB::new(StateProviderDatabase::new(state));
if let Some(state_override) = state_override {
apply_state_overrides(state_override, &mut db).map_err(
|err: StateOverrideError<ProviderError>| {
let eth_api_error: EthApiError = EthApiError::from(err);
Self::Error::from(eth_api_error)
},
)?;
}
let mut tx_env = self.create_txn_env(&evm_env, request, &mut db)?;
let mut is_basic_transfer = false;
if tx_env.input().is_empty()
&& let TxKind::Call(to) = tx_env.kind()
&& let Ok(code) = db.db.account_code(&to) {
is_basic_transfer = code.map(|code| code.is_empty()).unwrap_or(true);
}
if tx_env.gas_price() > 0 {
highest_gas_limit =
highest_gas_limit.min(self.caller_gas_allowance(&mut db, &evm_env, &tx_env)?);
}
tx_env.set_gas_limit(tx_env.gas_limit().min(highest_gas_limit));
let block_number = evm_env.block_env().number;
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
let mut evm = self.evm_config().evm_with_env(&mut db, evm_env);
apply_precompiles(&mut evm, &hl_extras);
if is_basic_transfer {
let mut min_tx_env = tx_env.clone();
min_tx_env.set_gas_limit(MIN_TRANSACTION_GAS);
if let Ok(res) = evm.transact(min_tx_env).map_err(Self::Error::from_evm_err)
&& res.result.is_success() {
return Ok(U256::from(MIN_TRANSACTION_GAS));
}
}
trace!(target: "rpc::eth::estimate", ?tx_env, gas_limit = tx_env.gas_limit(), is_basic_transfer, "Starting gas estimation");
let mut res = match evm.transact(tx_env.clone()).map_err(Self::Error::from_evm_err) {
Err(err)
if err.is_gas_too_high() &&
(tx_request_gas_limit.is_some() || tx_request_gas_price.is_some()) =>
{
return Self::map_out_of_gas_err(&mut evm, tx_env, max_gas_limit);
}
Err(err) if err.is_gas_too_low() => {
return Err(RpcInvalidTransactionError::GasRequiredExceedsAllowance {
gas_limit: tx_env.gas_limit(),
}
.into_eth_err());
}
ethres => ethres?,
};
let gas_refund = match res.result {
ExecutionResult::Success { gas_refunded, .. } => gas_refunded,
ExecutionResult::Halt { reason, .. } => {
return Err(Self::Error::from_evm_halt(reason, tx_env.gas_limit()));
}
ExecutionResult::Revert { output, .. } => {
return if tx_request_gas_limit.is_some() || tx_request_gas_price.is_some() {
Self::map_out_of_gas_err(&mut evm, tx_env, max_gas_limit)
} else {
Err(RpcInvalidTransactionError::Revert(RevertError::new(output)).into_eth_err())
};
}
};
highest_gas_limit = tx_env.gas_limit();
let mut gas_used = res.result.gas_used();
let mut lowest_gas_limit = gas_used.saturating_sub(1);
let optimistic_gas_limit = (gas_used + gas_refund + CALL_STIPEND_GAS) * 64 / 63;
if optimistic_gas_limit < highest_gas_limit {
let mut optimistic_tx_env = tx_env.clone();
optimistic_tx_env.set_gas_limit(optimistic_gas_limit);
res = evm.transact(optimistic_tx_env).map_err(Self::Error::from_evm_err)?;
gas_used = res.result.gas_used();
update_estimated_gas_range(
res.result,
optimistic_gas_limit,
&mut highest_gas_limit,
&mut lowest_gas_limit,
)?;
};
let mut mid_gas_limit = std::cmp::min(
gas_used * 3,
((highest_gas_limit as u128 + lowest_gas_limit as u128) / 2) as u64,
);
trace!(target: "rpc::eth::estimate", ?highest_gas_limit, ?lowest_gas_limit, ?mid_gas_limit, "Starting binary search for gas");
while lowest_gas_limit + 1 < highest_gas_limit {
if (highest_gas_limit - lowest_gas_limit) as f64 / (highest_gas_limit as f64) <
ESTIMATE_GAS_ERROR_RATIO
{
break;
};
let mut mid_tx_env = tx_env.clone();
mid_tx_env.set_gas_limit(mid_gas_limit);
match evm.transact(mid_tx_env).map_err(Self::Error::from_evm_err) {
Err(err) if err.is_gas_too_high() => {
highest_gas_limit = mid_gas_limit;
}
Err(err) if err.is_gas_too_low() => {
lowest_gas_limit = mid_gas_limit;
}
ethres => {
res = ethres?;
update_estimated_gas_range(
res.result,
mid_gas_limit,
&mut highest_gas_limit,
&mut lowest_gas_limit,
)?;
}
}
mid_gas_limit = ((highest_gas_limit as u128 + lowest_gas_limit as u128) / 2) as u64;
}
Ok(U256::from(highest_gas_limit))
}
}

View File

@ -1,40 +1,43 @@
use crate::{
HlBlock, HlPrimitives,
chainspec::HlChainSpec,
node::{evm::apply_precompiles, types::HlExtras},
HlBlock, HlPrimitives,
};
use alloy_eips::BlockId;
use alloy_evm::Evm;
use alloy_network::Ethereum;
use alloy_primitives::U256;
use reth::{
api::{FullNodeTypes, HeaderTy, NodeTypes, PrimitivesTy},
builder::{
rpc::{EthApiBuilder, EthApiCtx},
FullNodeComponents,
rpc::{EthApiBuilder, EthApiCtx},
},
rpc::{
eth::{core::EthApiInner, DevSigner, FullEthApiServer},
eth::{DevSigner, FullEthApiServer, core::EthApiInner},
server_types::eth::{
receipt::EthReceiptConverter, EthApiError, EthStateCache, FeeHistoryCache,
GasPriceOracle,
EthApiError, EthStateCache, FeeHistoryCache, GasPriceOracle,
receipt::EthReceiptConverter,
},
},
tasks::{
pool::{BlockingTaskGuard, BlockingTaskPool},
TaskSpawner,
pool::{BlockingTaskGuard, BlockingTaskPool},
},
};
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, HaltReasonFor, InspectorFor, TxEnvFor};
use reth_primitives::NodePrimitives;
use reth_provider::{BlockReader, ChainSpecProvider, ProviderError, ProviderHeader, ProviderTx};
use reth_provider::{
BlockReaderIdExt, ChainSpecProvider, ProviderError, ProviderHeader, ProviderTx,
};
use reth_rpc::RpcTypes;
use reth_rpc_eth_api::{
helpers::{
pending_block::BuildPendingEnv, spec::SignersForApi, AddDevSigners, EthApiSpec, EthFees,
EthState, LoadFee, LoadState, SpawnBlocking, Trace,
},
EthApiTypes, FromEvmError, RpcConvert, RpcConverter, RpcNodeCore, RpcNodeCoreExt,
SignableTxRequest,
helpers::{
AddDevSigners, EthApiSpec, EthFees, EthState, LoadFee, LoadPendingBlock, LoadState,
SpawnBlocking, Trace, pending_block::BuildPendingEnv, spec::SignersForApi,
},
};
use revm::context::result::ResultAndState;
use std::{fmt, marker::PhantomData, sync::Arc};
@ -42,6 +45,8 @@ use std::{fmt, marker::PhantomData, sync::Arc};
mod block;
mod call;
pub mod engine_api;
mod estimate;
pub mod precompile;
mod transaction;
pub trait HlRpcNodeCore: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
@ -55,12 +60,17 @@ pub(crate) struct HlEthApiInner<N: HlRpcNodeCore, Rpc: RpcConvert> {
type HlRpcConvert<N, NetworkT> =
RpcConverter<NetworkT, <N as FullNodeComponents>::Evm, EthReceiptConverter<HlChainSpec>>;
#[derive(Clone)]
pub struct HlEthApi<N: HlRpcNodeCore, Rpc: RpcConvert> {
/// Gateway to node's core components.
pub(crate) inner: Arc<HlEthApiInner<N, Rpc>>,
}
impl<N: HlRpcNodeCore, Rpc: RpcConvert> Clone for HlEthApi<N, Rpc> {
fn clone(&self) -> Self {
Self { inner: self.inner.clone() }
}
}
impl<N, Rpc> fmt::Debug for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
@ -74,7 +84,7 @@ where
impl<N, Rpc> EthApiTypes for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
Rpc: RpcConvert<Primitives = N::Primitives>,
{
type Error = EthApiError;
type NetworkTypes = Rpc::Network;
@ -150,7 +160,7 @@ where
impl<N, Rpc> SpawnBlocking for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
Rpc: RpcConvert<Primitives = N::Primitives>,
{
#[inline]
fn io_task_spawner(&self) -> impl TaskSpawner {
@ -189,6 +199,7 @@ impl<N, Rpc> LoadState for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
Self: LoadPendingBlock,
{
}
@ -196,6 +207,7 @@ impl<N, Rpc> EthState for HlEthApi<N, Rpc>
where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
Self: LoadPendingBlock,
{
#[inline]
fn max_proof_window(&self) -> u64 {
@ -229,7 +241,7 @@ where
I: InspectorFor<Self::Evm, DB>,
{
let block_number = evm_env.block_env().number;
let hl_extras = self.get_hl_extras(block_number.try_into().unwrap())?;
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
let mut evm = self.evm_config().evm_with_env_and_inspector(db, evm_env, inspector);
apply_precompiles(&mut evm, &hl_extras);
@ -242,10 +254,10 @@ where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
{
fn get_hl_extras(&self, block_number: u64) -> Result<HlExtras, ProviderError> {
fn get_hl_extras(&self, block: BlockId) -> Result<HlExtras, ProviderError> {
Ok(self
.provider()
.block_by_number(block_number)?
.block_by_id(block)?
.map(|block| HlExtras {
read_precompile_calls: block.body.read_precompile_calls.clone(),
highest_precompile_address: block.body.highest_precompile_address,

View File

@ -0,0 +1,44 @@
use alloy_eips::BlockId;
use jsonrpsee::proc_macros::rpc;
use jsonrpsee_core::{RpcResult, async_trait};
use reth_rpc_convert::RpcConvert;
use reth_rpc_eth_types::EthApiError;
use tracing::trace;
use crate::node::{
rpc::{HlEthApi, HlRpcNodeCore},
types::HlExtras,
};
/// A custom RPC trait for fetching block precompile data.
#[rpc(server, namespace = "eth")]
#[async_trait]
pub trait HlBlockPrecompileApi {
/// Fetches precompile data for a given block.
#[method(name = "blockPrecompileData")]
async fn block_precompile_data(&self, block: BlockId) -> RpcResult<HlExtras>;
}
pub struct HlBlockPrecompileExt<N: HlRpcNodeCore, Rpc: RpcConvert> {
eth_api: HlEthApi<N, Rpc>,
}
impl<N: HlRpcNodeCore, Rpc: RpcConvert> HlBlockPrecompileExt<N, Rpc> {
/// Creates a new instance of the [`HlBlockPrecompileExt`].
pub fn new(eth_api: HlEthApi<N, Rpc>) -> Self {
Self { eth_api }
}
}
#[async_trait]
impl<N, Rpc> HlBlockPrecompileApiServer for HlBlockPrecompileExt<N, Rpc>
where
N: HlRpcNodeCore,
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
{
async fn block_precompile_data(&self, block: BlockId) -> RpcResult<HlExtras> {
trace!(target: "rpc::eth", ?block, "Serving eth_blockPrecompileData");
let hl_extras = self.eth_api.get_hl_extras(block).map_err(EthApiError::from)?;
Ok(hl_extras)
}
}

View File

@ -1,9 +1,11 @@
use std::time::Duration;
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
use alloy_primitives::{Bytes, B256};
use alloy_primitives::{B256, Bytes};
use reth::rpc::server_types::eth::EthApiError;
use reth_rpc_eth_api::{
helpers::{spec::SignersForRpc, EthTransactions, LoadTransaction},
RpcConvert,
helpers::{EthTransactions, LoadTransaction, spec::SignersForRpc},
};
impl<N, Rpc> LoadTransaction for HlEthApi<N, Rpc>
@ -25,4 +27,8 @@ where
async fn send_raw_transaction(&self, _tx: Bytes) -> Result<B256, Self::Error> {
unreachable!()
}
fn send_raw_transaction_sync_timeout(&self) -> Duration {
self.inner.eth_api.send_raw_transaction_sync_timeout()
}
}

View File

@ -5,6 +5,8 @@ use std::collections::BTreeMap;
use crate::chainspec::{MAINNET_CHAIN_ID, TESTNET_CHAIN_ID};
mod patch;
#[derive(Debug, Clone, Serialize, Deserialize)]
struct EvmContract {
address: Address,
@ -58,5 +60,10 @@ pub(crate) fn erc20_contract_to_spot_token(chain_id: u64) -> Result<BTreeMap<Add
map.insert(evm_contract.address, SpotId { index: token.index });
}
}
if chain_id == TESTNET_CHAIN_ID {
patch::patch_testnet_spot_meta(&mut map);
}
Ok(map)
}

View File

@ -0,0 +1,8 @@
use crate::node::spot_meta::SpotId;
use alloy_primitives::{Address, address};
use std::collections::BTreeMap;
/// Testnet-specific fix for #67
pub(super) fn patch_testnet_spot_meta(map: &mut BTreeMap<Address, SpotId>) {
map.insert(address!("0xd9cbec81df392a88aeff575e962d149d57f4d6bc"), SpotId { index: 0 });
}

View File

@ -1,22 +1,22 @@
use crate::{
HlBlock, HlBlockBody, HlPrimitives,
node::{
primitives::tx_wrapper::{convert_to_eth_block_body, convert_to_hl_block_body},
types::HlExtras,
},
HlBlock, HlBlockBody, HlPrimitives,
};
use alloy_consensus::BlockHeader;
use alloy_primitives::Bytes;
use reth_chainspec::EthereumHardforks;
use reth_db::{
DbTxUnwindExt,
cursor::{DbCursorRO, DbCursorRW},
transaction::{DbTx, DbTxMut},
DbTxUnwindExt,
};
use reth_provider::{
providers::{ChainStorage, NodeTypesForProvider},
BlockBodyReader, BlockBodyWriter, ChainSpecProvider, ChainStorageReader, ChainStorageWriter,
DBProvider, DatabaseProvider, EthStorage, ProviderResult, ReadBodyInput, StorageLocation,
providers::{ChainStorage, NodeTypesForProvider},
};
pub mod tables;

View File

@ -1,5 +1,5 @@
use alloy_primitives::{BlockNumber, Bytes};
use reth_db::{table::TableInfo, tables, TableSet, TableType, TableViewer};
use reth_db::{TableSet, TableType, TableViewer, table::TableInfo, tables};
use std::fmt;
tables! {

View File

@ -2,7 +2,7 @@
//!
//! Changes:
//! - ReadPrecompileCalls supports RLP encoding / decoding
use alloy_primitives::{Address, Bytes, Log, B256};
use alloy_primitives::{Address, B256, Bytes, Log};
use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable};
use bytes::BufMut;
use serde::{Deserialize, Serialize};

View File

@ -10,12 +10,12 @@ use std::{
use tracing::info;
use crate::{
HlBlock, HlBlockBody,
node::{
primitives::TransactionSigned as TxSigned,
spot_meta::{erc20_contract_to_spot_token, SpotId},
spot_meta::{SpotId, erc20_contract_to_spot_token},
types::{ReadPrecompileCalls, SystemTx},
},
HlBlock, HlBlockBody,
};
/// A raw transaction.

View File

@ -37,6 +37,7 @@ pub async fn start_pseudo_peer(
chain_spec: Arc<HlChainSpec>,
destination_peer: String,
block_source: BlockSourceBoxed,
debug_cutoff_height: Option<u64>,
) -> eyre::Result<()> {
let blockhash_cache = new_blockhash_cache();
@ -46,6 +47,7 @@ pub async fn start_pseudo_peer(
destination_peer,
block_source.clone(),
blockhash_cache.clone(),
debug_cutoff_height,
)
.await?;

View File

@ -1,8 +1,8 @@
use super::service::{BlockHashCache, BlockPoller};
use crate::{chainspec::HlChainSpec, node::network::HlNetworkPrimitives, HlPrimitives};
use crate::{HlPrimitives, chainspec::HlChainSpec, node::network::HlNetworkPrimitives};
use reth_network::{
config::{rng_secret_key, SecretKey},
NetworkConfig, NetworkManager, PeersConfig,
config::{SecretKey, rng_secret_key},
};
use reth_network_peers::TrustedPeer;
use reth_provider::test_utils::NoopProvider;
@ -20,6 +20,7 @@ pub struct NetworkBuilder {
discovery_port: u16,
listener_port: u16,
chain_spec: HlChainSpec,
debug_cutoff_height: Option<u64>,
}
impl Default for NetworkBuilder {
@ -31,6 +32,7 @@ impl Default for NetworkBuilder {
discovery_port: 0,
listener_port: 0,
chain_spec: HlChainSpec::default(),
debug_cutoff_height: None,
}
}
}
@ -46,6 +48,11 @@ impl NetworkBuilder {
self
}
pub fn with_debug_cutoff_height(mut self, debug_cutoff_height: Option<u64>) -> Self {
self.debug_cutoff_height = debug_cutoff_height;
self
}
pub async fn build<BS>(
self,
block_source: Arc<Box<dyn super::sources::BlockSource>>,
@ -58,8 +65,12 @@ impl NetworkBuilder {
.listener_addr(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), self.listener_port));
let chain_id = self.chain_spec.inner.chain().id();
let (block_poller, start_tx) =
BlockPoller::new_suspended(chain_id, block_source, blockhash_cache);
let (block_poller, start_tx) = BlockPoller::new_suspended(
chain_id,
block_source,
blockhash_cache,
self.debug_cutoff_height,
);
let config = builder.block_import(Box::new(block_poller)).build(Arc::new(NoopProvider::<
HlChainSpec,
HlPrimitives,
@ -77,10 +88,12 @@ pub async fn create_network_manager<BS>(
destination_peer: String,
block_source: Arc<Box<dyn super::sources::BlockSource>>,
blockhash_cache: BlockHashCache,
debug_cutoff_height: Option<u64>,
) -> eyre::Result<(NetworkManager<HlNetworkPrimitives>, mpsc::Sender<()>)> {
NetworkBuilder::default()
.with_boot_nodes(vec![TrustedPeer::from_str(&destination_peer).unwrap()])
.with_chain_spec(chain_spec)
.with_debug_cutoff_height(debug_cutoff_height)
.build::<BS>(block_source, blockhash_cache)
.await
}

View File

@ -52,12 +52,12 @@ impl BlockPoller {
chain_id: u64,
block_source: BS,
blockhash_cache: BlockHashCache,
debug_cutoff_height: Option<u64>,
) -> (Self, mpsc::Sender<()>) {
let block_source = Arc::new(block_source);
let (start_tx, start_rx) = mpsc::channel(1);
let (block_tx, block_rx) = mpsc::channel(100);
let block_tx_clone = block_tx.clone();
let task = tokio::spawn(Self::task(start_rx, block_source, block_tx_clone));
let task = tokio::spawn(Self::task(start_rx, block_source, block_tx, debug_cutoff_height));
(Self { chain_id, block_rx, task, blockhash_cache: blockhash_cache.clone() }, start_tx)
}
@ -69,7 +69,8 @@ impl BlockPoller {
async fn task<BS: BlockSource>(
mut start_rx: mpsc::Receiver<()>,
block_source: Arc<BS>,
block_tx_clone: mpsc::Sender<(u64, BlockAndReceipts)>,
block_tx: mpsc::Sender<(u64, BlockAndReceipts)>,
debug_cutoff_height: Option<u64>,
) -> eyre::Result<()> {
start_rx.recv().await.ok_or(eyre::eyre!("Failed to receive start signal"))?;
info!("Starting block poller");
@ -80,10 +81,15 @@ impl BlockPoller {
.await
.ok_or(eyre::eyre!("Failed to find latest block number"))?;
if let Some(debug_cutoff_height) = debug_cutoff_height
&& next_block_number > debug_cutoff_height {
next_block_number = debug_cutoff_height;
}
loop {
match block_source.collect_block(next_block_number).await {
Ok(block) => {
block_tx_clone.send((next_block_number, block)).await?;
block_tx.send((next_block_number, block)).await?;
next_block_number += 1;
}
Err(_) => tokio::time::sleep(polling_interval).await,

View File

@ -1,6 +1,6 @@
use super::{BlockSource, BlockSourceBoxed};
use crate::node::types::BlockAndReceipts;
use futures::{future::BoxFuture, FutureExt};
use futures::{FutureExt, future::BoxFuture};
use reth_network::cache::LruMap;
use std::sync::{Arc, RwLock};

View File

@ -1,4 +1,4 @@
use super::{scan::Scanner, time_utils::TimeUtils, HOURLY_SUBDIR};
use super::{HOURLY_SUBDIR, scan::Scanner, time_utils::TimeUtils};
use crate::node::types::BlockAndReceipts;
use std::{
fs::File,

View File

@ -14,6 +14,7 @@ use self::{
use super::{BlockSource, BlockSourceBoxed};
use crate::node::types::BlockAndReceipts;
use futures::future::BoxFuture;
use reth_metrics::{Metrics, metrics, metrics::Counter};
use std::{
path::{Path, PathBuf},
sync::Arc,
@ -41,6 +42,16 @@ pub struct HlNodeBlockSource {
pub local_blocks_cache: Arc<Mutex<LocalBlocksCache>>,
pub last_local_fetch: Arc<Mutex<Option<(u64, OffsetDateTime)>>>,
pub args: HlNodeBlockSourceArgs,
pub metrics: HlNodeBlockSourceMetrics,
}
#[derive(Metrics, Clone)]
#[metrics(scope = "block_source.hl_node")]
pub struct HlNodeBlockSourceMetrics {
/// How many times the HL node block source is polling for a block
pub fetched_from_hl_node: Counter,
/// How many times the HL node block source is fetched from the fallback
pub fetched_from_fallback: Counter,
}
impl BlockSource for HlNodeBlockSource {
@ -49,11 +60,13 @@ impl BlockSource for HlNodeBlockSource {
let args = self.args.clone();
let local_blocks_cache = self.local_blocks_cache.clone();
let last_local_fetch = self.last_local_fetch.clone();
let metrics = self.metrics.clone();
Box::pin(async move {
let now = OffsetDateTime::now_utc();
if let Some(block) = Self::try_collect_local_block(local_blocks_cache, height).await {
Self::update_last_fetch(last_local_fetch, height, now).await;
metrics.fetched_from_hl_node.increment(1);
return Ok(block);
}
@ -68,6 +81,7 @@ impl BlockSource for HlNodeBlockSource {
}
let block = fallback.collect_block(height).await?;
metrics.fetched_from_fallback.increment(1);
Self::update_last_fetch(last_local_fetch, height, now).await;
Ok(block)
})
@ -224,6 +238,7 @@ impl HlNodeBlockSource {
args,
local_blocks_cache: Arc::new(Mutex::new(LocalBlocksCache::new(CACHE_SIZE))),
last_local_fetch: Arc::new(Mutex::new(None)),
metrics: HlNodeBlockSourceMetrics::default(),
};
block_source.run(next_block_number).await.unwrap();
block_source

View File

@ -1,10 +1,10 @@
use super::*;
use crate::{
node::types::{reth_compat, ReadPrecompileCalls},
pseudo_peer::sources::{hl_node::scan::LocalBlockAndReceipts, LocalBlockSource},
node::types::{ReadPrecompileCalls, reth_compat},
pseudo_peer::sources::{LocalBlockSource, hl_node::scan::LocalBlockAndReceipts},
};
use alloy_consensus::{BlockBody, Header};
use alloy_primitives::{Address, Bloom, Bytes, B256, B64, U256};
use alloy_primitives::{Address, B64, B256, Bloom, Bytes, U256};
use std::{io::Write, time::Duration};
const DEFAULT_FALLBACK_THRESHOLD_FOR_TEST: Duration = Duration::from_millis(5000);

View File

@ -1,5 +1,5 @@
use std::path::Path;
use time::{macros::format_description, Date, OffsetDateTime, Time};
use time::{Date, OffsetDateTime, Time, macros::format_description};
pub struct TimeUtils;

View File

@ -1,7 +1,8 @@
use super::{utils, BlockSource};
use super::{BlockSource, utils};
use crate::node::types::BlockAndReceipts;
use eyre::Context;
use futures::{future::BoxFuture, FutureExt};
use futures::{FutureExt, future::BoxFuture};
use reth_metrics::{Metrics, metrics, metrics::Counter};
use std::path::PathBuf;
use tracing::info;
@ -9,11 +10,21 @@ use tracing::info;
#[derive(Debug, Clone)]
pub struct LocalBlockSource {
dir: PathBuf,
metrics: LocalBlockSourceMetrics,
}
#[derive(Metrics, Clone)]
#[metrics(scope = "block_source.local")]
pub struct LocalBlockSourceMetrics {
/// How many times the local block source is polling for a block
pub polling_attempt: Counter,
/// How many times the local block source is fetched from the local filesystem
pub fetched: Counter,
}
impl LocalBlockSource {
pub fn new(dir: impl Into<PathBuf>) -> Self {
Self { dir: dir.into() }
Self { dir: dir.into(), metrics: LocalBlockSourceMetrics::default() }
}
async fn pick_path_with_highest_number(dir: PathBuf, is_dir: bool) -> Option<(u64, String)> {
@ -31,13 +42,17 @@ impl LocalBlockSource {
impl BlockSource for LocalBlockSource {
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
let dir = self.dir.clone();
let metrics = self.metrics.clone();
async move {
let path = dir.join(utils::rmp_path(height));
metrics.polling_attempt.increment(1);
let file = tokio::fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read block from {path:?}"))?;
let mut decoder = lz4_flex::frame::FrameDecoder::new(&file[..]);
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;
metrics.fetched.increment(1);
Ok(blocks[0].clone())
}
.boxed()

View File

@ -1,7 +1,8 @@
use super::{utils, BlockSource};
use super::{BlockSource, utils};
use crate::node::types::BlockAndReceipts;
use aws_sdk_s3::types::RequestPayer;
use futures::{future::BoxFuture, FutureExt};
use futures::{FutureExt, future::BoxFuture};
use reth_metrics::{Metrics, metrics, metrics::Counter};
use std::{sync::Arc, time::Duration};
use tracing::info;
@ -11,11 +12,26 @@ pub struct S3BlockSource {
client: Arc<aws_sdk_s3::Client>,
bucket: String,
polling_interval: Duration,
metrics: S3BlockSourceMetrics,
}
#[derive(Metrics, Clone)]
#[metrics(scope = "block_source.s3")]
pub struct S3BlockSourceMetrics {
/// How many times the S3 block source is polling for a block
pub polling_attempt: Counter,
/// How many times the S3 block source has polled a block
pub fetched: Counter,
}
impl S3BlockSource {
pub fn new(client: aws_sdk_s3::Client, bucket: String, polling_interval: Duration) -> Self {
Self { client: client.into(), bucket, polling_interval }
Self {
client: client.into(),
bucket,
polling_interval,
metrics: S3BlockSourceMetrics::default(),
}
}
async fn pick_path_with_highest_number(
@ -52,14 +68,18 @@ impl BlockSource for S3BlockSource {
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
let client = self.client.clone();
let bucket = self.bucket.clone();
let metrics = self.metrics.clone();
async move {
let path = utils::rmp_path(height);
metrics.polling_attempt.increment(1);
let request = client
.get_object()
.request_payer(RequestPayer::Requester)
.bucket(&bucket)
.key(path);
let response = request.send().await?;
metrics.fetched.increment(1);
let bytes = response.body.collect().await?.into_bytes();
let mut decoder = lz4_flex::frame::FrameDecoder::new(&bytes[..]);
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;

35
src/version.rs Normal file
View File

@ -0,0 +1,35 @@
use std::borrow::Cow;
use reth_node_core::version::{RethCliVersionConsts, try_init_version_metadata};
pub fn init_reth_hl_version() {
let cargo_pkg_version = env!("CARGO_PKG_VERSION").to_string();
let short = env!("RETH_HL_SHORT_VERSION").to_string();
let long = format!(
"{}\n{}\n{}\n{}\n{}",
env!("RETH_HL_LONG_VERSION_0"),
env!("RETH_HL_LONG_VERSION_1"),
env!("RETH_HL_LONG_VERSION_2"),
env!("RETH_HL_LONG_VERSION_3"),
env!("RETH_HL_LONG_VERSION_4"),
);
let p2p = env!("RETH_HL_P2P_CLIENT_VERSION").to_string();
let meta = RethCliVersionConsts {
name_client: Cow::Borrowed("reth_hl"),
cargo_pkg_version: Cow::Owned(cargo_pkg_version.clone()),
vergen_git_sha_long: Cow::Owned(env!("VERGEN_GIT_SHA").to_string()),
vergen_git_sha: Cow::Owned(env!("VERGEN_GIT_SHA_SHORT").to_string()),
vergen_build_timestamp: Cow::Owned(env!("VERGEN_BUILD_TIMESTAMP").to_string()),
vergen_cargo_target_triple: Cow::Owned(env!("VERGEN_CARGO_TARGET_TRIPLE").to_string()),
vergen_cargo_features: Cow::Owned(env!("VERGEN_CARGO_FEATURES").to_string()),
short_version: Cow::Owned(short),
long_version: Cow::Owned(long),
build_profile_name: Cow::Owned(env!("RETH_HL_BUILD_PROFILE").to_string()),
p2p_client_version: Cow::Owned(p2p),
extra_data: Cow::Owned(format!("reth_hl/v{}/{}", cargo_pkg_version, std::env::consts::OS)),
};
let _ = try_init_version_metadata(meta);
}