mirror of
https://github.com/hl-archive-node/nanoreth.git
synced 2025-12-06 10:59:55 +00:00
Compare commits
67 Commits
32c4f92aec
...
nb-2025101
| Author | SHA1 | Date | |
|---|---|---|---|
| 51924e9671 | |||
| 8f15aa311f | |||
| bc66716a41 | |||
| fc819dbba2 | |||
| 1c5a22a814 | |||
| 852e186b1a | |||
| f83326059f | |||
| ca8c374116 | |||
| 5ba12a4850 | |||
| 8a179a6d9e | |||
| d570cf3e8d | |||
| 0e49e65068 | |||
| 13b63ff136 | |||
| 233026871f | |||
| 7e169d409d | |||
| 47aaad6ed9 | |||
| 9f73b1ede0 | |||
| bcdf4d933d | |||
| 2390ed864a | |||
| 567d6ce2e4 | |||
| 8b2c3a4a34 | |||
| 92759f04db | |||
| 71bb70bca6 | |||
| 5327ebc97a | |||
| 4d83b687d4 | |||
| 12f366573e | |||
| b8bae7cde9 | |||
| 0fd4b7943f | |||
| bfd61094ee | |||
| 3b33b0a526 | |||
| de7b524f0b | |||
| 24f2460337 | |||
| b55ddc54ad | |||
| aa73fab281 | |||
| ae0cb0da6d | |||
| 8605be9864 | |||
| c93ff90f94 | |||
| ce64e00e2f | |||
| 8d8da57d3a | |||
| 875304f891 | |||
| b37ba15765 | |||
| 3080665702 | |||
| 4896e4f0ea | |||
| 458f506ad2 | |||
| 1c7136bfab | |||
| 491e902904 | |||
| 45648a7a98 | |||
| c87c5a055a | |||
| c9416a3948 | |||
| db10c23c56 | |||
| fc395123f3 | |||
| 84ea1af682 | |||
| bd3e0626ed | |||
| 7d223a464e | |||
| afcc551f67 | |||
| 0dfd7a4c7f | |||
| 8faac526b7 | |||
| acfabf969c | |||
| fccf877a3a | |||
| 9e3f0c722e | |||
| cd5bcc4cb0 | |||
| d831a459bb | |||
| 66c2ee654c | |||
| 701e6a25e6 | |||
| ab11ce513f | |||
| 37b852e810 | |||
| 51c43d6dbd |
127
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
127
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
name: Bug Report
|
||||||
|
description: Create a bug report
|
||||||
|
labels: ["C-bug", "S-needs-triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this bug report! Please provide as much detail as possible.
|
||||||
|
|
||||||
|
If you believe you have found a vulnerability, please provide details [here](mailto:georgios@paradigm.xyz) instead.
|
||||||
|
- type: textarea
|
||||||
|
id: what-happened
|
||||||
|
attributes:
|
||||||
|
label: Describe the bug
|
||||||
|
description: |
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
If the bug is in a crate you are using (i.e. you are not running the standard `reth` binary) please mention that as well.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: reproduction-steps
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: Please provide any steps you think might be relevant to reproduce the bug.
|
||||||
|
placeholder: |
|
||||||
|
Steps to reproduce:
|
||||||
|
|
||||||
|
1. Start '...'
|
||||||
|
2. Then '...'
|
||||||
|
3. Check '...'
|
||||||
|
4. See error
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Node logs
|
||||||
|
description: |
|
||||||
|
If applicable, please provide the node logs leading up to the bug.
|
||||||
|
|
||||||
|
**Please also provide debug logs.** By default, these can be found in:
|
||||||
|
|
||||||
|
- `~/.cache/reth/logs` on Linux
|
||||||
|
- `~/Library/Caches/reth/logs` on macOS
|
||||||
|
- `%localAppData%/reth/logs` on Windows
|
||||||
|
render: text
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: dropdown
|
||||||
|
id: platform
|
||||||
|
attributes:
|
||||||
|
label: Platform(s)
|
||||||
|
description: What platform(s) did this occur on?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Linux (x86)
|
||||||
|
- Linux (ARM)
|
||||||
|
- Mac (Intel)
|
||||||
|
- Mac (Apple Silicon)
|
||||||
|
- Windows (x86)
|
||||||
|
- Windows (ARM)
|
||||||
|
- type: dropdown
|
||||||
|
id: container_type
|
||||||
|
attributes:
|
||||||
|
label: Container Type
|
||||||
|
description: Were you running it in a container?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Not running in a container
|
||||||
|
- Docker
|
||||||
|
- Kubernetes
|
||||||
|
- LXC/LXD
|
||||||
|
- Other
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: client-version
|
||||||
|
attributes:
|
||||||
|
label: What version/commit are you on?
|
||||||
|
description: This can be obtained with `reth --version`
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: database-version
|
||||||
|
attributes:
|
||||||
|
label: What database version are you on?
|
||||||
|
description: This can be obtained with `reth db version`
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: network
|
||||||
|
attributes:
|
||||||
|
label: Which chain / network are you on?
|
||||||
|
description: This is the argument you pass to `reth --chain`. If you are using `--dev`, type in 'dev' here. If you are not running with `--chain` or `--dev` then it is mainnet.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: node-type
|
||||||
|
attributes:
|
||||||
|
label: What type of node are you running?
|
||||||
|
options:
|
||||||
|
- Archive (default)
|
||||||
|
- Full via --full flag
|
||||||
|
- Pruned with custom reth.toml config
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: prune-config
|
||||||
|
attributes:
|
||||||
|
label: What prune config do you use, if any?
|
||||||
|
description: The `[prune]` section in `reth.toml` file
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: If you've built Reth from source, provide the full command you used
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/paradigmxyz/reth/blob/main/CONTRIBUTING.md#code-of-conduct)
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Code of Conduct
|
||||||
|
required: true
|
||||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: GitHub Discussions
|
||||||
|
url: https://github.com/paradigmxyz/reth/discussions
|
||||||
|
about: Please ask and answer questions here to keep the issue tracker clean.
|
||||||
19
.github/ISSUE_TEMPLATE/docs.yml
vendored
Normal file
19
.github/ISSUE_TEMPLATE/docs.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
name: Documentation
|
||||||
|
description: Suggest a change to our documentation
|
||||||
|
labels: ["C-docs", "S-needs-triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
If you are unsure if the docs are relevant or needed, please open up a discussion first.
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Describe the change
|
||||||
|
description: |
|
||||||
|
Please describe the documentation you want to change or add, and if it is for end-users or contributors.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional context
|
||||||
|
description: Add any other context to the feature (like screenshots, resources)
|
||||||
21
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
21
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
name: Feature request
|
||||||
|
description: Suggest a feature
|
||||||
|
labels: ["C-enhancement", "S-needs-triage"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Please ensure that the feature has not already been requested in the issue tracker.
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Describe the feature
|
||||||
|
description: |
|
||||||
|
Please describe the feature and what it is aiming to solve, if relevant.
|
||||||
|
|
||||||
|
If the feature is for a crate, please include a proposed API surface.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional context
|
||||||
|
description: Add any other context to the feature (like screenshots, resources)
|
||||||
1
.github/workflows/docker.yml
vendored
1
.github/workflows/docker.yml
vendored
@ -6,6 +6,7 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
|
- nb-*
|
||||||
|
|
||||||
env:
|
env:
|
||||||
IMAGE_NAME: ${{ github.repository_owner }}/nanoreth
|
IMAGE_NAME: ${{ github.repository_owner }}/nanoreth
|
||||||
|
|||||||
838
Cargo.lock
generated
838
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
115
Cargo.toml
115
Cargo.toml
@ -1,7 +1,8 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "reth_hl"
|
name = "reth_hl"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
build = "build.rs"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "reth_hl"
|
name = "reth_hl"
|
||||||
@ -25,67 +26,73 @@ lto = "fat"
|
|||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
reth = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-cli = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-cli = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-cli-commands = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-cli-commands = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-basic-payload-builder = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-basic-payload-builder = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-db = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-db = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-db-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-db-api = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-chainspec = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-chainspec = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-cli-util = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-cli-util = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-discv4 = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-discv4 = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-engine-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-engine-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-ethereum-forks = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-ethereum-forks = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-ethereum-payload-builder = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-ethereum-payload-builder = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-ethereum-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-ethereum-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-eth-wire = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-eth-wire = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-eth-wire-types = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-eth-wire-types = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-evm = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-evm = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-evm-ethereum = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-evm-ethereum = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-node-core = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-node-core = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-revm = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-revm = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-network = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-network = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-network-p2p = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-network-p2p = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-network-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-network-api = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-node-ethereum = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-node-ethereum = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-network-peers = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-network-peers = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-payload-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-payload-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-primitives = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-primitives = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-primitives-traits = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-primitives-traits = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-provider = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb", features = ["test-utils"] }
|
reth-provider = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a", features = ["test-utils"] }
|
||||||
reth-rpc = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-rpc = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-rpc-eth-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-rpc-eth-api = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-rpc-engine-api = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-rpc-engine-api = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-tracing = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-tracing = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-trie-common = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-trie-common = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-trie-db = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-trie-db = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-codecs = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-codecs = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-transaction-pool = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-transaction-pool = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
reth-stages-types = { git = "https://github.com/sprites0/reth", rev = "a690ef25b56039195e7e4a4abd01c78aedcc73fb" }
|
reth-stages-types = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
revm = { version = "28.0.1", default-features = false }
|
reth-storage-api = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
reth-errors = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
reth-rpc-convert = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
reth-rpc-eth-types = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
reth-rpc-server-types = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
reth-metrics = { git = "https://github.com/hl-archive-node/reth", rev = "416c2e26756f1c8ee86e6b8e4081f434952b3a1a" }
|
||||||
|
revm = { version = "29.0.1", default-features = false }
|
||||||
|
|
||||||
# alloy dependencies
|
# alloy dependencies
|
||||||
alloy-genesis = { version = "1.0.23", default-features = false }
|
alloy-genesis = { version = "1.0.37", default-features = false }
|
||||||
alloy-consensus = { version = "1.0.23", default-features = false }
|
alloy-consensus = { version = "1.0.37", default-features = false }
|
||||||
alloy-chains = { version = "0.2.5", default-features = false }
|
alloy-chains = { version = "0.2.5", default-features = false }
|
||||||
alloy-eips = { version = "1.0.23", default-features = false }
|
alloy-eips = { version = "1.0.37", default-features = false }
|
||||||
alloy-evm = { version = "0.18.2", default-features = false }
|
alloy-evm = { version = "0.21.0", default-features = false }
|
||||||
alloy-json-abi = { version = "1.3.1", default-features = false }
|
alloy-json-abi = { version = "1.3.1", default-features = false }
|
||||||
alloy-json-rpc = { version = "1.0.23", default-features = false }
|
alloy-json-rpc = { version = "1.0.37", default-features = false }
|
||||||
alloy-dyn-abi = "1.3.1"
|
alloy-dyn-abi = "1.3.1"
|
||||||
alloy-network = { version = "1.0.23", default-features = false }
|
alloy-network = { version = "1.0.37", default-features = false }
|
||||||
alloy-primitives = { version = "1.3.1", default-features = false, features = ["map-foldhash"] }
|
alloy-primitives = { version = "1.3.1", default-features = false, features = ["map-foldhash"] }
|
||||||
alloy-rlp = { version = "0.3.10", default-features = false, features = ["core-net"] }
|
alloy-rlp = { version = "0.3.10", default-features = false, features = ["core-net"] }
|
||||||
alloy-rpc-types = { version = "1.0.23", features = ["eth"], default-features = false }
|
alloy-rpc-types = { version = "1.0.37", features = ["eth"], default-features = false }
|
||||||
alloy-rpc-types-eth = { version = "1.0.23", default-features = false }
|
alloy-rpc-types-eth = { version = "1.0.37", default-features = false }
|
||||||
alloy-rpc-types-engine = { version = "1.0.23", default-features = false }
|
alloy-rpc-types-engine = { version = "1.0.37", default-features = false }
|
||||||
alloy-signer = { version = "1.0.23", default-features = false }
|
alloy-signer = { version = "1.0.37", default-features = false }
|
||||||
alloy-sol-macro = "1.3.1"
|
alloy-sol-macro = "1.3.1"
|
||||||
alloy-sol-types = { version = "1.3.1", default-features = false }
|
alloy-sol-types = { version = "1.3.1", default-features = false }
|
||||||
|
|
||||||
jsonrpsee = "0.25.1"
|
jsonrpsee = "0.26.0"
|
||||||
jsonrpsee-core = "0.25.1"
|
jsonrpsee-core = "0.26.0"
|
||||||
jsonrpsee-types = "0.25.1"
|
jsonrpsee-types = "0.26.0"
|
||||||
|
|
||||||
# misc dependencies
|
# misc dependencies
|
||||||
auto_impl = "1"
|
auto_impl = "1"
|
||||||
@ -166,3 +173,7 @@ client = [
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.20.0"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
vergen = { version = "9.0.4", features = ["build", "cargo", "emit_and_set"] }
|
||||||
|
vergen-git2 = "1.0.5"
|
||||||
1
Makefile
1
Makefile
@ -255,7 +255,6 @@ define docker_build_push
|
|||||||
--tag $(DOCKER_IMAGE_NAME):$(2) \
|
--tag $(DOCKER_IMAGE_NAME):$(2) \
|
||||||
--build-arg BUILD_PROFILE="$(PROFILE)" \
|
--build-arg BUILD_PROFILE="$(PROFILE)" \
|
||||||
--build-arg FEATURES="jemalloc,asm-keccak" \
|
--build-arg FEATURES="jemalloc,asm-keccak" \
|
||||||
--build-arg RUSTFLAGS="-C target-cpu=native" \
|
|
||||||
--provenance=false \
|
--provenance=false \
|
||||||
--push
|
--push
|
||||||
endef
|
endef
|
||||||
|
|||||||
12
README.md
12
README.md
@ -3,6 +3,8 @@
|
|||||||
HyperEVM archive node implementation based on [reth](https://github.com/paradigmxyz/reth).
|
HyperEVM archive node implementation based on [reth](https://github.com/paradigmxyz/reth).
|
||||||
NodeBuilder API version is heavily inspired by [reth-bsc](https://github.com/loocapro/reth-bsc).
|
NodeBuilder API version is heavily inspired by [reth-bsc](https://github.com/loocapro/reth-bsc).
|
||||||
|
|
||||||
|
Got questions? Drop by the [Hyperliquid Discord](https://discord.gg/hyperliquid) #node-operators channel.
|
||||||
|
|
||||||
## ⚠️ IMPORTANT: System Transactions Appear as Pseudo Transactions
|
## ⚠️ IMPORTANT: System Transactions Appear as Pseudo Transactions
|
||||||
|
|
||||||
Deposit transactions from [System Addresses](https://hyperliquid.gitbook.io/hyperliquid-docs/for-developers/hyperevm/hypercore-less-than-greater-than-hyperevm-transfers#system-addresses) like `0x222..22` / `0x200..xx` to user addresses are intentionally recorded as pseudo transactions.
|
Deposit transactions from [System Addresses](https://hyperliquid.gitbook.io/hyperliquid-docs/for-developers/hyperevm/hypercore-less-than-greater-than-hyperevm-transfers#system-addresses) like `0x222..22` / `0x200..xx` to user addresses are intentionally recorded as pseudo transactions.
|
||||||
@ -58,19 +60,19 @@ $ reth-hl node --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 \
|
|||||||
|
|
||||||
## How to run (testnet)
|
## How to run (testnet)
|
||||||
|
|
||||||
Testnet is supported since block 21304281.
|
Testnet is supported since block 30281484.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Get testnet genesis at block 21304281
|
# Get testnet genesis at block 30281484
|
||||||
$ cd ~
|
$ cd ~
|
||||||
$ git clone https://github.com/sprites0/hl-testnet-genesis
|
$ git clone https://github.com/sprites0/hl-testnet-genesis
|
||||||
$ zstd --rm -d ~/hl-testnet-genesis/*.zst
|
$ zstd --rm -d ~/hl-testnet-genesis/*.zst
|
||||||
|
|
||||||
# Init node
|
# Init node
|
||||||
$ make install
|
$ make install
|
||||||
$ reth-hl init-state --without-evm --chain testnet --header ~/hl-testnet-genesis/21304281.rlp \
|
$ reth-hl init-state --without-evm --chain testnet --header ~/hl-testnet-genesis/30281484.rlp \
|
||||||
--header-hash 0x5b10856d2b1ad241c9bd6136bcc60ef7e8553560ca53995a590db65f809269b4 \
|
--header-hash 0x147cc3c09e9ddbb11799c826758db284f77099478ab5f528d3a57a6105516c21 \
|
||||||
~/hl-testnet-genesis/21304281.jsonl --total-difficulty 0
|
~/hl-testnet-genesis/30281484.jsonl --total-difficulty 0
|
||||||
|
|
||||||
# Run node
|
# Run node
|
||||||
$ reth-hl node --chain testnet --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 \
|
$ reth-hl node --chain testnet --http --http.addr 0.0.0.0 --http.api eth,ots,net,web3 \
|
||||||
|
|||||||
91
build.rs
Normal file
91
build.rs
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
use std::{env, error::Error};
|
||||||
|
use vergen::{BuildBuilder, CargoBuilder, Emitter};
|
||||||
|
use vergen_git2::Git2Builder;
|
||||||
|
|
||||||
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
let mut emitter = Emitter::default();
|
||||||
|
|
||||||
|
let build_builder = BuildBuilder::default().build_timestamp(true).build()?;
|
||||||
|
|
||||||
|
emitter.add_instructions(&build_builder)?;
|
||||||
|
|
||||||
|
let cargo_builder = CargoBuilder::default().features(true).target_triple(true).build()?;
|
||||||
|
|
||||||
|
emitter.add_instructions(&cargo_builder)?;
|
||||||
|
|
||||||
|
let git_builder =
|
||||||
|
Git2Builder::default().describe(false, true, None).dirty(true).sha(false).build()?;
|
||||||
|
|
||||||
|
emitter.add_instructions(&git_builder)?;
|
||||||
|
|
||||||
|
emitter.emit_and_set()?;
|
||||||
|
let sha = env::var("VERGEN_GIT_SHA")?;
|
||||||
|
let sha_short = &sha[0..7];
|
||||||
|
|
||||||
|
let is_dirty = env::var("VERGEN_GIT_DIRTY")? == "true";
|
||||||
|
// > git describe --always --tags
|
||||||
|
// if not on a tag: v0.2.0-beta.3-82-g1939939b
|
||||||
|
// if on a tag: v0.2.0-beta.3
|
||||||
|
let not_on_tag = env::var("VERGEN_GIT_DESCRIBE")?.ends_with(&format!("-g{sha_short}"));
|
||||||
|
let version_suffix = if is_dirty || not_on_tag { "-dev" } else { "" };
|
||||||
|
println!("cargo:rustc-env=RETH_HL_VERSION_SUFFIX={version_suffix}");
|
||||||
|
|
||||||
|
// Set short SHA
|
||||||
|
println!("cargo:rustc-env=VERGEN_GIT_SHA_SHORT={}", &sha[..8]);
|
||||||
|
|
||||||
|
// Set the build profile
|
||||||
|
let out_dir = env::var("OUT_DIR").unwrap();
|
||||||
|
let profile = out_dir.rsplit(std::path::MAIN_SEPARATOR).nth(3).unwrap();
|
||||||
|
println!("cargo:rustc-env=RETH_HL_BUILD_PROFILE={profile}");
|
||||||
|
|
||||||
|
// Set formatted version strings
|
||||||
|
let pkg_version = env!("CARGO_PKG_VERSION");
|
||||||
|
|
||||||
|
// The short version information for reth.
|
||||||
|
// - The latest version from Cargo.toml
|
||||||
|
// - The short SHA of the latest commit.
|
||||||
|
// Example: 0.1.0 (defa64b2)
|
||||||
|
println!("cargo:rustc-env=RETH_HL_SHORT_VERSION={pkg_version}{version_suffix} ({sha_short})");
|
||||||
|
|
||||||
|
// LONG_VERSION
|
||||||
|
// The long version information for reth.
|
||||||
|
//
|
||||||
|
// - The latest version from Cargo.toml + version suffix (if any)
|
||||||
|
// - The full SHA of the latest commit
|
||||||
|
// - The build datetime
|
||||||
|
// - The build features
|
||||||
|
// - The build profile
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// ```text
|
||||||
|
// Version: 0.1.0
|
||||||
|
// Commit SHA: defa64b2
|
||||||
|
// Build Timestamp: 2023-05-19T01:47:19.815651705Z
|
||||||
|
// Build Features: jemalloc
|
||||||
|
// Build Profile: maxperf
|
||||||
|
// ```
|
||||||
|
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_0=Version: {pkg_version}{version_suffix}");
|
||||||
|
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_1=Commit SHA: {sha}");
|
||||||
|
println!(
|
||||||
|
"cargo:rustc-env=RETH_HL_LONG_VERSION_2=Build Timestamp: {}",
|
||||||
|
env::var("VERGEN_BUILD_TIMESTAMP")?
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
"cargo:rustc-env=RETH_HL_LONG_VERSION_3=Build Features: {}",
|
||||||
|
env::var("VERGEN_CARGO_FEATURES")?
|
||||||
|
);
|
||||||
|
println!("cargo:rustc-env=RETH_HL_LONG_VERSION_4=Build Profile: {profile}");
|
||||||
|
|
||||||
|
// The version information for reth formatted for P2P (devp2p).
|
||||||
|
// - The latest version from Cargo.toml
|
||||||
|
// - The target triple
|
||||||
|
//
|
||||||
|
// Example: reth/v0.1.0-alpha.1-428a6dc2f/aarch64-apple-darwin
|
||||||
|
println!(
|
||||||
|
"cargo:rustc-env=RETH_HL_P2P_CLIENT_VERSION={}",
|
||||||
|
format_args!("reth/v{pkg_version}-{sha_short}/{}", env::var("VERGEN_CARGO_TARGET_TRIPLE")?)
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@ -2,18 +2,18 @@ use alloy_eips::BlockId;
|
|||||||
use alloy_json_rpc::RpcObject;
|
use alloy_json_rpc::RpcObject;
|
||||||
use alloy_primitives::{Bytes, U256};
|
use alloy_primitives::{Bytes, U256};
|
||||||
use alloy_rpc_types_eth::{
|
use alloy_rpc_types_eth::{
|
||||||
state::{EvmOverrides, StateOverride},
|
|
||||||
BlockOverrides,
|
BlockOverrides,
|
||||||
|
state::{EvmOverrides, StateOverride},
|
||||||
};
|
};
|
||||||
use jsonrpsee::{
|
use jsonrpsee::{
|
||||||
http_client::{HttpClient, HttpClientBuilder},
|
http_client::{HttpClient, HttpClientBuilder},
|
||||||
proc_macros::rpc,
|
proc_macros::rpc,
|
||||||
rpc_params,
|
rpc_params,
|
||||||
types::{error::INTERNAL_ERROR_CODE, ErrorObject},
|
types::{ErrorObject, error::INTERNAL_ERROR_CODE},
|
||||||
};
|
};
|
||||||
use jsonrpsee_core::{async_trait, client::ClientT, ClientError, RpcResult};
|
use jsonrpsee_core::{ClientError, RpcResult, async_trait, client::ClientT};
|
||||||
use reth_rpc::eth::EthApiTypes;
|
use reth_rpc::eth::EthApiTypes;
|
||||||
use reth_rpc_eth_api::{helpers::EthCall, RpcTxReq};
|
use reth_rpc_eth_api::{RpcTxReq, helpers::EthCall};
|
||||||
|
|
||||||
#[rpc(server, namespace = "eth")]
|
#[rpc(server, namespace = "eth")]
|
||||||
pub(crate) trait CallForwarderApi<TxReq: RpcObject> {
|
pub(crate) trait CallForwarderApi<TxReq: RpcObject> {
|
||||||
|
|||||||
@ -7,70 +7,35 @@
|
|||||||
//! For non-system transactions, we can just return the log as is, and the client will
|
//! For non-system transactions, we can just return the log as is, and the client will
|
||||||
//! adjust the transaction index accordingly.
|
//! adjust the transaction index accordingly.
|
||||||
|
|
||||||
use alloy_consensus::{transaction::TransactionMeta, BlockHeader, TxReceipt};
|
use alloy_consensus::{
|
||||||
|
BlockHeader, TxReceipt,
|
||||||
|
transaction::{TransactionMeta, TxHashRef},
|
||||||
|
};
|
||||||
use alloy_eips::{BlockId, BlockNumberOrTag};
|
use alloy_eips::{BlockId, BlockNumberOrTag};
|
||||||
use alloy_json_rpc::RpcObject;
|
use alloy_json_rpc::RpcObject;
|
||||||
use alloy_primitives::{B256, U256};
|
use alloy_primitives::{B256, U256};
|
||||||
use alloy_rpc_types::{
|
use alloy_rpc_types::{
|
||||||
pubsub::{Params, SubscriptionKind},
|
|
||||||
BlockTransactions, Filter, FilterChanges, FilterId, Log, PendingTransactionFilterKind,
|
BlockTransactions, Filter, FilterChanges, FilterId, Log, PendingTransactionFilterKind,
|
||||||
TransactionInfo,
|
TransactionInfo,
|
||||||
|
pubsub::{Params, SubscriptionKind},
|
||||||
};
|
};
|
||||||
use jsonrpsee::{proc_macros::rpc, PendingSubscriptionSink, SubscriptionMessage, SubscriptionSink};
|
use jsonrpsee::{PendingSubscriptionSink, proc_macros::rpc};
|
||||||
use jsonrpsee_core::{async_trait, RpcResult};
|
use jsonrpsee_core::{RpcResult, async_trait};
|
||||||
use jsonrpsee_types::{error::INTERNAL_ERROR_CODE, ErrorObject};
|
use jsonrpsee_types::{ErrorObject, error::INTERNAL_ERROR_CODE};
|
||||||
use reth::{api::FullNodeComponents, builder::rpc::RpcContext, tasks::TaskSpawner};
|
use reth::{api::FullNodeComponents, builder::rpc::RpcContext, tasks::TaskSpawner};
|
||||||
use reth_primitives_traits::{BlockBody as _, SignedTransaction};
|
use reth_primitives_traits::SignedTransaction;
|
||||||
use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, ReceiptProvider};
|
use reth_provider::{BlockIdReader, BlockReader, BlockReaderIdExt, ReceiptProvider};
|
||||||
use reth_rpc::{eth::pubsub::SubscriptionSerializeError, EthFilter, EthPubSub, RpcTypes};
|
use reth_rpc::{EthFilter, EthPubSub};
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
helpers::{EthBlocks, EthTransactions, LoadReceipt},
|
EthApiTypes, EthFilterApiServer, EthPubSubApiServer, RpcBlock, RpcConvert, RpcReceipt,
|
||||||
transaction::ConvertReceiptInput,
|
RpcTransaction, helpers::EthBlocks, transaction::ConvertReceiptInput,
|
||||||
EthApiServer, EthApiTypes, EthFilterApiServer, EthPubSubApiServer, FullEthApiTypes, RpcBlock,
|
|
||||||
RpcConvert, RpcHeader, RpcNodeCoreExt, RpcReceipt, RpcTransaction, RpcTxReq,
|
|
||||||
};
|
};
|
||||||
use serde::Serialize;
|
use reth_rpc_eth_types::EthApiError;
|
||||||
use std::{borrow::Cow, marker::PhantomData, sync::Arc};
|
use std::{marker::PhantomData, sync::Arc};
|
||||||
use tokio_stream::{Stream, StreamExt};
|
use tokio_stream::StreamExt;
|
||||||
use tracing::{trace, Instrument};
|
use tracing::{Instrument, trace};
|
||||||
|
|
||||||
use crate::{node::primitives::HlPrimitives, HlBlock};
|
use crate::addons::utils::{EthWrapper, new_headers_stream, pipe_from_stream};
|
||||||
|
|
||||||
pub trait EthWrapper:
|
|
||||||
EthApiServer<
|
|
||||||
RpcTxReq<Self::NetworkTypes>,
|
|
||||||
RpcTransaction<Self::NetworkTypes>,
|
|
||||||
RpcBlock<Self::NetworkTypes>,
|
|
||||||
RpcReceipt<Self::NetworkTypes>,
|
|
||||||
RpcHeader<Self::NetworkTypes>,
|
|
||||||
> + FullEthApiTypes<
|
|
||||||
Primitives = HlPrimitives,
|
|
||||||
NetworkTypes: RpcTypes<TransactionResponse = alloy_rpc_types_eth::Transaction>,
|
|
||||||
> + RpcNodeCoreExt<Provider: BlockReader<Block = HlBlock>>
|
|
||||||
+ EthBlocks
|
|
||||||
+ EthTransactions
|
|
||||||
+ LoadReceipt
|
|
||||||
+ 'static
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> EthWrapper for T where
|
|
||||||
T: EthApiServer<
|
|
||||||
RpcTxReq<Self::NetworkTypes>,
|
|
||||||
RpcTransaction<Self::NetworkTypes>,
|
|
||||||
RpcBlock<Self::NetworkTypes>,
|
|
||||||
RpcReceipt<Self::NetworkTypes>,
|
|
||||||
RpcHeader<Self::NetworkTypes>,
|
|
||||||
> + FullEthApiTypes<
|
|
||||||
Primitives = HlPrimitives,
|
|
||||||
NetworkTypes: RpcTypes<TransactionResponse = alloy_rpc_types_eth::Transaction>,
|
|
||||||
> + RpcNodeCoreExt<Provider: BlockReader<Block = HlBlock>>
|
|
||||||
+ EthBlocks
|
|
||||||
+ EthTransactions
|
|
||||||
+ LoadReceipt
|
|
||||||
+ 'static
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rpc(server, namespace = "eth")]
|
#[rpc(server, namespace = "eth")]
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
@ -182,7 +147,7 @@ impl<Eth: EthWrapper> HlSystemTransactionExt<Eth> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let input = ConvertReceiptInput {
|
let input = ConvertReceiptInput {
|
||||||
receipt: Cow::Borrowed(receipt),
|
receipt: receipt.clone(),
|
||||||
tx,
|
tx,
|
||||||
gas_used: receipt.cumulative_gas_used() - gas_used,
|
gas_used: receipt.cumulative_gas_used() - gas_used,
|
||||||
next_log_index,
|
next_log_index,
|
||||||
@ -383,7 +348,7 @@ where
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
} else {
|
} else {
|
||||||
let _ = pubsub.handle_accepted(sink, kind, params).await;
|
let _ = pipe_from_stream(sink, new_headers_stream::<Eth>(&provider)).await;
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -408,23 +373,6 @@ fn adjust_log<Eth: EthWrapper>(mut log: Log, provider: &Eth::Provider) -> Option
|
|||||||
Some(log)
|
Some(log)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn pipe_from_stream<T: Serialize, St: Stream<Item = T> + Unpin>(
|
|
||||||
sink: SubscriptionSink,
|
|
||||||
mut stream: St,
|
|
||||||
) -> Result<(), ErrorObject<'static>> {
|
|
||||||
loop {
|
|
||||||
tokio::select! {
|
|
||||||
_ = sink.closed() => break Ok(()),
|
|
||||||
maybe_item = stream.next() => {
|
|
||||||
let Some(item) = maybe_item else { break Ok(()) };
|
|
||||||
let msg = SubscriptionMessage::new(sink.method_name(), sink.subscription_id(), &item)
|
|
||||||
.map_err(SubscriptionSerializeError::from)?;
|
|
||||||
if sink.send(msg).await.is_err() { break Ok(()); }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HlNodeBlockFilterHttp<Eth: EthWrapper> {
|
pub struct HlNodeBlockFilterHttp<Eth: EthWrapper> {
|
||||||
eth_api: Arc<Eth>,
|
eth_api: Arc<Eth>,
|
||||||
_marker: PhantomData<Eth>,
|
_marker: PhantomData<Eth>,
|
||||||
@ -530,7 +478,7 @@ async fn adjust_block_receipts<Eth: EthWrapper>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let input = ConvertReceiptInput {
|
let input = ConvertReceiptInput {
|
||||||
receipt: Cow::Borrowed(receipt),
|
receipt: receipt.clone(),
|
||||||
tx,
|
tx,
|
||||||
gas_used: receipt.cumulative_gas_used() - gas_used,
|
gas_used: receipt.cumulative_gas_used() - gas_used,
|
||||||
next_log_index,
|
next_log_index,
|
||||||
@ -575,10 +523,9 @@ async fn adjust_transaction_receipt<Eth: EthWrapper>(
|
|||||||
// This function assumes that `block_id` is already validated by the caller.
|
// This function assumes that `block_id` is already validated by the caller.
|
||||||
fn system_tx_count_for_block<Eth: EthWrapper>(eth_api: &Eth, block_id: BlockId) -> usize {
|
fn system_tx_count_for_block<Eth: EthWrapper>(eth_api: &Eth, block_id: BlockId) -> usize {
|
||||||
let provider = eth_api.provider();
|
let provider = eth_api.provider();
|
||||||
let block = provider.block_by_id(block_id).unwrap().unwrap();
|
let header = provider.header_by_id(block_id).unwrap().unwrap();
|
||||||
let system_tx_count =
|
|
||||||
block.body.transactions().iter().filter(|tx| tx.is_system_transaction()).count();
|
header.extras.system_tx_count.try_into().unwrap()
|
||||||
system_tx_count
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
@ -652,6 +599,9 @@ where
|
|||||||
block_id: BlockId,
|
block_id: BlockId,
|
||||||
) -> RpcResult<Option<Vec<RpcReceipt<Eth::NetworkTypes>>>> {
|
) -> RpcResult<Option<Vec<RpcReceipt<Eth::NetworkTypes>>>> {
|
||||||
trace!(target: "rpc::eth", ?block_id, "Serving eth_getBlockReceipts");
|
trace!(target: "rpc::eth", ?block_id, "Serving eth_getBlockReceipts");
|
||||||
|
if self.eth_api.provider().block_by_id(block_id).map_err(EthApiError::from)?.is_none() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
let result =
|
let result =
|
||||||
adjust_block_receipts(block_id, &*self.eth_api).instrument(engine_span!()).await?;
|
adjust_block_receipts(block_id, &*self.eth_api).instrument(engine_span!()).await?;
|
||||||
Ok(result.map(|(_, receipts)| receipts))
|
Ok(result.map(|(_, receipts)| receipts))
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
pub mod call_forwarder;
|
pub mod call_forwarder;
|
||||||
pub mod hl_node_compliance;
|
pub mod hl_node_compliance;
|
||||||
pub mod tx_forwarder;
|
pub mod tx_forwarder;
|
||||||
|
pub mod subscribe_fixup;
|
||||||
|
mod utils;
|
||||||
|
|||||||
54
src/addons/subscribe_fixup.rs
Normal file
54
src/addons/subscribe_fixup.rs
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
use crate::addons::utils::{EthWrapper, new_headers_stream, pipe_from_stream};
|
||||||
|
use alloy_rpc_types::pubsub::{Params, SubscriptionKind};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use jsonrpsee::PendingSubscriptionSink;
|
||||||
|
use jsonrpsee_types::ErrorObject;
|
||||||
|
use reth::tasks::TaskSpawner;
|
||||||
|
use reth_rpc::EthPubSub;
|
||||||
|
use reth_rpc_convert::RpcTransaction;
|
||||||
|
use reth_rpc_eth_api::{EthApiTypes, EthPubSubApiServer};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
pub struct SubscribeFixup<Eth: EthWrapper> {
|
||||||
|
pubsub: Arc<EthPubSub<Eth>>,
|
||||||
|
provider: Arc<Eth::Provider>,
|
||||||
|
subscription_task_spawner: Box<dyn TaskSpawner + 'static>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<Eth: EthWrapper> EthPubSubApiServer<RpcTransaction<Eth::NetworkTypes>> for SubscribeFixup<Eth>
|
||||||
|
where
|
||||||
|
ErrorObject<'static>: From<<Eth as EthApiTypes>::Error>,
|
||||||
|
{
|
||||||
|
async fn subscribe(
|
||||||
|
&self,
|
||||||
|
pending: PendingSubscriptionSink,
|
||||||
|
kind: SubscriptionKind,
|
||||||
|
params: Option<Params>,
|
||||||
|
) -> jsonrpsee::core::SubscriptionResult {
|
||||||
|
let sink = pending.accept().await?;
|
||||||
|
let (pubsub, provider) = (self.pubsub.clone(), self.provider.clone());
|
||||||
|
self.subscription_task_spawner.spawn(Box::pin(async move {
|
||||||
|
if kind == SubscriptionKind::NewHeads {
|
||||||
|
let _ = pipe_from_stream(sink, new_headers_stream::<Eth>(&provider)).await;
|
||||||
|
} else {
|
||||||
|
let _ = pubsub.handle_accepted(sink, kind, params).await;
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Eth: EthWrapper> SubscribeFixup<Eth> {
|
||||||
|
pub fn new(
|
||||||
|
pubsub: Arc<EthPubSub<Eth>>,
|
||||||
|
provider: Arc<Eth::Provider>,
|
||||||
|
subscription_task_spawner: Box<dyn TaskSpawner + 'static>,
|
||||||
|
) -> Self
|
||||||
|
where
|
||||||
|
Eth: EthWrapper,
|
||||||
|
ErrorObject<'static>: From<Eth::Error>,
|
||||||
|
{
|
||||||
|
Self { pubsub, provider, subscription_task_spawner }
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -2,14 +2,14 @@ use std::time::Duration;
|
|||||||
|
|
||||||
use alloy_json_rpc::RpcObject;
|
use alloy_json_rpc::RpcObject;
|
||||||
use alloy_network::Ethereum;
|
use alloy_network::Ethereum;
|
||||||
use alloy_primitives::{Bytes, B256};
|
use alloy_primitives::{B256, Bytes};
|
||||||
use alloy_rpc_types::TransactionRequest;
|
use alloy_rpc_types::TransactionRequest;
|
||||||
use jsonrpsee::{
|
use jsonrpsee::{
|
||||||
http_client::{HttpClient, HttpClientBuilder},
|
http_client::{HttpClient, HttpClientBuilder},
|
||||||
proc_macros::rpc,
|
proc_macros::rpc,
|
||||||
types::{error::INTERNAL_ERROR_CODE, ErrorObject},
|
types::{ErrorObject, error::INTERNAL_ERROR_CODE},
|
||||||
};
|
};
|
||||||
use jsonrpsee_core::{async_trait, client::ClientT, ClientError, RpcResult};
|
use jsonrpsee_core::{ClientError, RpcResult, async_trait, client::ClientT};
|
||||||
use reth::rpc::{result::internal_rpc_err, server_types::eth::EthApiError};
|
use reth::rpc::{result::internal_rpc_err, server_types::eth::EthApiError};
|
||||||
use reth_rpc_eth_api::RpcReceipt;
|
use reth_rpc_eth_api::RpcReceipt;
|
||||||
|
|
||||||
|
|||||||
90
src/addons/utils.rs
Normal file
90
src/addons/utils.rs
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::{HlBlock, HlPrimitives};
|
||||||
|
use alloy_primitives::U256;
|
||||||
|
use alloy_rpc_types::Header;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use jsonrpsee::{SubscriptionMessage, SubscriptionSink};
|
||||||
|
use jsonrpsee_types::ErrorObject;
|
||||||
|
use reth_primitives::SealedHeader;
|
||||||
|
use reth_provider::{BlockReader, CanonStateSubscriptions};
|
||||||
|
use reth_rpc::{RpcTypes, eth::pubsub::SubscriptionSerializeError};
|
||||||
|
use reth_rpc_convert::{RpcBlock, RpcHeader, RpcReceipt, RpcTransaction, RpcTxReq};
|
||||||
|
use reth_rpc_eth_api::{
|
||||||
|
EthApiServer, FullEthApiTypes, RpcNodeCoreExt,
|
||||||
|
helpers::{EthBlocks, EthTransactions, LoadReceipt},
|
||||||
|
};
|
||||||
|
use serde::Serialize;
|
||||||
|
use tokio_stream::Stream;
|
||||||
|
|
||||||
|
pub trait EthWrapper:
|
||||||
|
EthApiServer<
|
||||||
|
RpcTxReq<Self::NetworkTypes>,
|
||||||
|
RpcTransaction<Self::NetworkTypes>,
|
||||||
|
RpcBlock<Self::NetworkTypes>,
|
||||||
|
RpcReceipt<Self::NetworkTypes>,
|
||||||
|
RpcHeader<Self::NetworkTypes>,
|
||||||
|
> + FullEthApiTypes<
|
||||||
|
Primitives = HlPrimitives,
|
||||||
|
NetworkTypes: RpcTypes<TransactionResponse = alloy_rpc_types_eth::Transaction>,
|
||||||
|
> + RpcNodeCoreExt<Provider: BlockReader<Block = HlBlock>>
|
||||||
|
+ EthBlocks
|
||||||
|
+ EthTransactions
|
||||||
|
+ LoadReceipt
|
||||||
|
+ 'static
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EthWrapper for T where
|
||||||
|
T: EthApiServer<
|
||||||
|
RpcTxReq<Self::NetworkTypes>,
|
||||||
|
RpcTransaction<Self::NetworkTypes>,
|
||||||
|
RpcBlock<Self::NetworkTypes>,
|
||||||
|
RpcReceipt<Self::NetworkTypes>,
|
||||||
|
RpcHeader<Self::NetworkTypes>,
|
||||||
|
> + FullEthApiTypes<
|
||||||
|
Primitives = HlPrimitives,
|
||||||
|
NetworkTypes: RpcTypes<TransactionResponse = alloy_rpc_types_eth::Transaction>,
|
||||||
|
> + RpcNodeCoreExt<Provider: BlockReader<Block = HlBlock>>
|
||||||
|
+ EthBlocks
|
||||||
|
+ EthTransactions
|
||||||
|
+ LoadReceipt
|
||||||
|
+ 'static
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) async fn pipe_from_stream<T: Serialize, St: Stream<Item = T> + Unpin>(
|
||||||
|
sink: SubscriptionSink,
|
||||||
|
mut stream: St,
|
||||||
|
) -> Result<(), ErrorObject<'static>> {
|
||||||
|
loop {
|
||||||
|
tokio::select! {
|
||||||
|
_ = sink.closed() => break Ok(()),
|
||||||
|
maybe_item = stream.next() => {
|
||||||
|
let Some(item) = maybe_item else { break Ok(()) };
|
||||||
|
let msg = SubscriptionMessage::new(sink.method_name(), sink.subscription_id(), &item)
|
||||||
|
.map_err(SubscriptionSerializeError::from)?;
|
||||||
|
if sink.send(msg).await.is_err() { break Ok(()); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn new_headers_stream<Eth: EthWrapper>(
|
||||||
|
provider: &Arc<Eth::Provider>,
|
||||||
|
) -> impl Stream<Item = Header<alloy_consensus::Header>> {
|
||||||
|
provider.canonical_state_stream().flat_map(|new_chain| {
|
||||||
|
let headers = new_chain
|
||||||
|
.committed()
|
||||||
|
.blocks_iter()
|
||||||
|
.map(|block| {
|
||||||
|
Header::from_consensus(
|
||||||
|
SealedHeader::new(block.header().inner.clone(), block.hash()).into(),
|
||||||
|
None,
|
||||||
|
Some(U256::from(block.rlp_length())),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
futures::stream::iter(headers)
|
||||||
|
})
|
||||||
|
}
|
||||||
@ -1,5 +1,5 @@
|
|||||||
use alloy_chains::{Chain, NamedChain};
|
use alloy_chains::{Chain, NamedChain};
|
||||||
use alloy_primitives::{b256, Address, Bytes, B256, B64, U256};
|
use alloy_primitives::{Address, B64, B256, Bytes, U256, b256};
|
||||||
use reth_chainspec::{ChainHardforks, ChainSpec, EthereumHardfork, ForkCondition, Hardfork};
|
use reth_chainspec::{ChainHardforks, ChainSpec, EthereumHardfork, ForkCondition, Hardfork};
|
||||||
use reth_primitives::{Header, SealedHeader};
|
use reth_primitives::{Header, SealedHeader};
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|||||||
@ -1,8 +1,7 @@
|
|||||||
pub mod hl;
|
pub mod hl;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
|
||||||
use crate::hardforks::HlHardforks;
|
use crate::{hardforks::HlHardforks, node::primitives::{header::HlHeaderExtras, HlHeader}};
|
||||||
use alloy_consensus::Header;
|
|
||||||
use alloy_eips::eip7840::BlobParams;
|
use alloy_eips::eip7840::BlobParams;
|
||||||
use alloy_genesis::Genesis;
|
use alloy_genesis::Genesis;
|
||||||
use alloy_primitives::{Address, B256, U256};
|
use alloy_primitives::{Address, B256, U256};
|
||||||
@ -20,10 +19,11 @@ pub const TESTNET_CHAIN_ID: u64 = 998;
|
|||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
pub struct HlChainSpec {
|
pub struct HlChainSpec {
|
||||||
pub inner: ChainSpec,
|
pub inner: ChainSpec,
|
||||||
|
pub genesis_header: HlHeader,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EthChainSpec for HlChainSpec {
|
impl EthChainSpec for HlChainSpec {
|
||||||
type Header = Header;
|
type Header = HlHeader;
|
||||||
|
|
||||||
fn blob_params_at_timestamp(&self, timestamp: u64) -> Option<BlobParams> {
|
fn blob_params_at_timestamp(&self, timestamp: u64) -> Option<BlobParams> {
|
||||||
self.inner.blob_params_at_timestamp(timestamp)
|
self.inner.blob_params_at_timestamp(timestamp)
|
||||||
@ -37,10 +37,6 @@ impl EthChainSpec for HlChainSpec {
|
|||||||
self.inner.chain()
|
self.inner.chain()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn base_fee_params_at_block(&self, block_number: u64) -> BaseFeeParams {
|
|
||||||
self.inner.base_fee_params_at_block(block_number)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams {
|
fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams {
|
||||||
self.inner.base_fee_params_at_timestamp(timestamp)
|
self.inner.base_fee_params_at_timestamp(timestamp)
|
||||||
}
|
}
|
||||||
@ -61,8 +57,8 @@ impl EthChainSpec for HlChainSpec {
|
|||||||
Box::new(self.inner.display_hardforks())
|
Box::new(self.inner.display_hardforks())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn genesis_header(&self) -> &Header {
|
fn genesis_header(&self) -> &HlHeader {
|
||||||
self.inner.genesis_header()
|
&self.genesis_header
|
||||||
}
|
}
|
||||||
|
|
||||||
fn genesis(&self) -> &Genesis {
|
fn genesis(&self) -> &Genesis {
|
||||||
@ -131,4 +127,10 @@ impl HlChainSpec {
|
|||||||
_ => unreachable!("Unreachable since ChainSpecParser won't return other chains"),
|
_ => unreachable!("Unreachable since ChainSpecParser won't return other chains"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new(inner: ChainSpec) -> Self {
|
||||||
|
let genesis_header =
|
||||||
|
HlHeader { inner: inner.genesis_header().clone(), extras: HlHeaderExtras::default() };
|
||||||
|
Self { inner, genesis_header }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use crate::chainspec::{hl::hl_testnet, HlChainSpec};
|
use crate::chainspec::{HlChainSpec, hl::hl_testnet};
|
||||||
|
|
||||||
use super::hl::hl_mainnet;
|
use super::hl::hl_mainnet;
|
||||||
use reth_cli::chainspec::ChainSpecParser;
|
use reth_cli::chainspec::ChainSpecParser;
|
||||||
@ -26,8 +26,8 @@ impl ChainSpecParser for HlChainSpecParser {
|
|||||||
/// Currently only mainnet is supported.
|
/// Currently only mainnet is supported.
|
||||||
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<HlChainSpec>> {
|
pub fn chain_value_parser(s: &str) -> eyre::Result<Arc<HlChainSpec>> {
|
||||||
match s {
|
match s {
|
||||||
"mainnet" => Ok(Arc::new(HlChainSpec { inner: hl_mainnet() })),
|
"mainnet" => Ok(Arc::new(HlChainSpec::new(hl_mainnet()))),
|
||||||
"testnet" => Ok(Arc::new(HlChainSpec { inner: hl_testnet() })),
|
"testnet" => Ok(Arc::new(HlChainSpec::new(hl_testnet()))),
|
||||||
_ => Err(eyre::eyre!("Unsupported chain: {}", s)),
|
_ => Err(eyre::eyre!("Unsupported chain: {}", s)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use alloy_primitives::{BlockNumber, B256};
|
use alloy_primitives::{B256, BlockNumber};
|
||||||
use reth_provider::{BlockNumReader, ProviderError};
|
use reth_provider::{BlockNumReader, ProviderError};
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
|||||||
@ -2,8 +2,8 @@ use super::HlEvmInner;
|
|||||||
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
|
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
|
||||||
use reth_revm::context::ContextTr;
|
use reth_revm::context::ContextTr;
|
||||||
use revm::{
|
use revm::{
|
||||||
context::Cfg, context_interface::Block, handler::instructions::EthInstructions,
|
Context, Database, context::Cfg, context_interface::Block,
|
||||||
interpreter::interpreter::EthInterpreter, Context, Database,
|
handler::instructions::EthInstructions, interpreter::interpreter::EthInterpreter,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Trait that allows for hl HlEvm to be built.
|
/// Trait that allows for hl HlEvm to be built.
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
use crate::evm::{spec::HlSpecId, transaction::HlTxEnv};
|
use crate::evm::{spec::HlSpecId, transaction::HlTxEnv};
|
||||||
use revm::{
|
use revm::{
|
||||||
|
Context, Journal, MainContext,
|
||||||
context::{BlockEnv, CfgEnv, TxEnv},
|
context::{BlockEnv, CfgEnv, TxEnv},
|
||||||
database_interface::EmptyDB,
|
database_interface::EmptyDB,
|
||||||
Context, Journal, MainContext,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Type alias for the default context type of the HlEvm.
|
/// Type alias for the default context type of the HlEvm.
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
use super::HlEvmInner;
|
use super::HlEvmInner;
|
||||||
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
|
use crate::evm::{spec::HlSpecId, transaction::HlTxTr};
|
||||||
use revm::{
|
use revm::{
|
||||||
context::{result::HaltReason, ContextSetters},
|
|
||||||
context_interface::{
|
|
||||||
result::{EVMError, ExecutionResult, ResultAndState},
|
|
||||||
Cfg, ContextTr, Database, JournalTr,
|
|
||||||
},
|
|
||||||
handler::{instructions::EthInstructions, PrecompileProvider},
|
|
||||||
inspector::{InspectCommitEvm, InspectEvm, Inspector, JournalExt},
|
|
||||||
interpreter::{interpreter::EthInterpreter, InterpreterResult},
|
|
||||||
state::EvmState,
|
|
||||||
DatabaseCommit, ExecuteCommitEvm, ExecuteEvm,
|
DatabaseCommit, ExecuteCommitEvm, ExecuteEvm,
|
||||||
|
context::{ContextSetters, result::HaltReason},
|
||||||
|
context_interface::{
|
||||||
|
Cfg, ContextTr, Database, JournalTr,
|
||||||
|
result::{EVMError, ExecutionResult, ResultAndState},
|
||||||
|
},
|
||||||
|
handler::{PrecompileProvider, instructions::EthInstructions},
|
||||||
|
inspector::{InspectCommitEvm, InspectEvm, Inspector, JournalExt},
|
||||||
|
interpreter::{InterpreterResult, interpreter::EthInterpreter},
|
||||||
|
state::EvmState,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Type alias for HL context
|
// Type alias for HL context
|
||||||
|
|||||||
@ -1,15 +1,15 @@
|
|||||||
use revm::{
|
use revm::{
|
||||||
|
Inspector,
|
||||||
bytecode::opcode::BLOCKHASH,
|
bytecode::opcode::BLOCKHASH,
|
||||||
context::{ContextSetters, Evm, FrameStack},
|
context::{ContextSetters, Evm, FrameStack},
|
||||||
context_interface::ContextTr,
|
context_interface::ContextTr,
|
||||||
handler::{
|
handler::{
|
||||||
|
EthFrame, EthPrecompiles, EvmTr, FrameInitOrResult, FrameTr, PrecompileProvider,
|
||||||
evm::{ContextDbError, FrameInitResult},
|
evm::{ContextDbError, FrameInitResult},
|
||||||
instructions::{EthInstructions, InstructionProvider},
|
instructions::{EthInstructions, InstructionProvider},
|
||||||
EthFrame, EthPrecompiles, EvmTr, FrameInitOrResult, FrameTr, PrecompileProvider,
|
|
||||||
},
|
},
|
||||||
inspector::{InspectorEvmTr, JournalExt},
|
inspector::{InspectorEvmTr, JournalExt},
|
||||||
interpreter::{interpreter::EthInterpreter, Instruction, InterpreterResult},
|
interpreter::{Instruction, InterpreterResult, interpreter::EthInterpreter},
|
||||||
Inspector,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::chainspec::MAINNET_CHAIN_ID;
|
use crate::chainspec::MAINNET_CHAIN_ID;
|
||||||
|
|||||||
@ -7,36 +7,12 @@ use alloy_primitives::keccak256;
|
|||||||
use revm::{
|
use revm::{
|
||||||
context::Host,
|
context::Host,
|
||||||
interpreter::{
|
interpreter::{
|
||||||
as_u64_saturated, interpreter_types::StackTr, popn_top, InstructionContext,
|
_count, InstructionContext, InterpreterTypes, as_u64_saturated, interpreter_types::StackTr,
|
||||||
InterpreterTypes,
|
popn_top,
|
||||||
},
|
},
|
||||||
primitives::{BLOCK_HASH_HISTORY, U256},
|
primitives::{BLOCK_HASH_HISTORY, U256},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[macro_export]
|
|
||||||
#[collapse_debuginfo(yes)]
|
|
||||||
macro_rules! _count {
|
|
||||||
(@count) => { 0 };
|
|
||||||
(@count $head:tt $($tail:tt)*) => { 1 + _count!(@count $($tail)*) };
|
|
||||||
($($arg:tt)*) => { _count!(@count $($arg)*) };
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pops n values from the stack and returns the top value. Fails the instruction if n values can't
|
|
||||||
/// be popped.
|
|
||||||
#[macro_export]
|
|
||||||
#[collapse_debuginfo(yes)]
|
|
||||||
macro_rules! popn_top {
|
|
||||||
([ $($x:ident),* ], $top:ident, $interpreter:expr $(,$ret:expr)? ) => {
|
|
||||||
// Workaround for https://github.com/rust-lang/rust/issues/144329.
|
|
||||||
if $interpreter.stack.len() < (1 + $crate::_count!($($x)*)) {
|
|
||||||
$interpreter.halt_underflow();
|
|
||||||
return $($ret)?;
|
|
||||||
}
|
|
||||||
let ([$( $x ),*], $top) = unsafe { $interpreter.stack.popn_top().unwrap_unchecked() };
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Implements the BLOCKHASH instruction.
|
/// Implements the BLOCKHASH instruction.
|
||||||
///
|
///
|
||||||
/// Gets the hash of one of the 256 most recent complete blocks.
|
/// Gets the hash of one of the 256 most recent complete blocks.
|
||||||
|
|||||||
@ -7,7 +7,7 @@ use reth_primitives_traits::SignerRecoverable;
|
|||||||
use revm::{
|
use revm::{
|
||||||
context::TxEnv,
|
context::TxEnv,
|
||||||
context_interface::transaction::Transaction,
|
context_interface::transaction::Transaction,
|
||||||
primitives::{Address, Bytes, TxKind, B256, U256},
|
primitives::{Address, B256, Bytes, TxKind, U256},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[auto_impl(&, &mut, Box, Arc)]
|
#[auto_impl(&, &mut, Box, Arc)]
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
use alloy_chains::{Chain, NamedChain};
|
use alloy_chains::{Chain, NamedChain};
|
||||||
use core::any::Any;
|
use core::any::Any;
|
||||||
use reth_chainspec::ForkCondition;
|
use reth_chainspec::ForkCondition;
|
||||||
use reth_ethereum_forks::{hardfork, ChainHardforks, EthereumHardfork, Hardfork};
|
use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, Hardfork, hardfork};
|
||||||
|
|
||||||
hardfork!(
|
hardfork!(
|
||||||
/// The name of a hl hardfork.
|
/// The name of a hl hardfork.
|
||||||
|
|||||||
@ -5,5 +5,6 @@ mod evm;
|
|||||||
mod hardforks;
|
mod hardforks;
|
||||||
pub mod node;
|
pub mod node;
|
||||||
pub mod pseudo_peer;
|
pub mod pseudo_peer;
|
||||||
|
pub mod version;
|
||||||
|
|
||||||
pub use node::primitives::{HlBlock, HlBlockBody, HlPrimitives};
|
pub use node::primitives::{HlBlock, HlBlockBody, HlHeader, HlPrimitives};
|
||||||
|
|||||||
41
src/main.rs
41
src/main.rs
@ -1,19 +1,24 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use reth::builder::{NodeBuilder, NodeHandle, WithLaunchContext};
|
use reth::{
|
||||||
|
builder::{NodeBuilder, NodeHandle, WithLaunchContext},
|
||||||
|
rpc::{api::EthPubSubApiServer, eth::RpcNodeCore},
|
||||||
|
};
|
||||||
use reth_db::DatabaseEnv;
|
use reth_db::DatabaseEnv;
|
||||||
use reth_hl::{
|
use reth_hl::{
|
||||||
addons::{
|
addons::{
|
||||||
call_forwarder::{self, CallForwarderApiServer},
|
call_forwarder::{self, CallForwarderApiServer},
|
||||||
hl_node_compliance::install_hl_node_compliance,
|
hl_node_compliance::install_hl_node_compliance,
|
||||||
|
subscribe_fixup::SubscribeFixup,
|
||||||
tx_forwarder::{self, EthForwarderApiServer},
|
tx_forwarder::{self, EthForwarderApiServer},
|
||||||
},
|
},
|
||||||
chainspec::{parser::HlChainSpecParser, HlChainSpec},
|
chainspec::{HlChainSpec, parser::HlChainSpecParser},
|
||||||
node::{
|
node::{
|
||||||
cli::{Cli, HlNodeArgs},
|
|
||||||
storage::tables::Tables,
|
|
||||||
HlNode,
|
HlNode,
|
||||||
|
cli::{Cli, HlNodeArgs},
|
||||||
|
rpc::precompile::{HlBlockPrecompileApiServer, HlBlockPrecompileExt},
|
||||||
|
storage::tables::Tables,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
@ -26,17 +31,16 @@ static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
|||||||
fn main() -> eyre::Result<()> {
|
fn main() -> eyre::Result<()> {
|
||||||
reth_cli_util::sigsegv_handler::install();
|
reth_cli_util::sigsegv_handler::install();
|
||||||
|
|
||||||
// Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided.
|
// Initialize custom version metadata before parsing CLI so --version uses reth-hl values
|
||||||
if std::env::var_os("RUST_BACKTRACE").is_none() {
|
reth_hl::version::init_reth_hl_version();
|
||||||
std::env::set_var("RUST_BACKTRACE", "1");
|
|
||||||
}
|
|
||||||
|
|
||||||
Cli::<HlChainSpecParser, HlNodeArgs>::parse().run(
|
Cli::<HlChainSpecParser, HlNodeArgs>::parse().run(
|
||||||
|builder: WithLaunchContext<NodeBuilder<Arc<DatabaseEnv>, HlChainSpec>>,
|
|builder: WithLaunchContext<NodeBuilder<Arc<DatabaseEnv>, HlChainSpec>>,
|
||||||
ext: HlNodeArgs| async move {
|
ext: HlNodeArgs| async move {
|
||||||
let default_upstream_rpc_url = builder.config().chain.official_rpc_url();
|
let default_upstream_rpc_url = builder.config().chain.official_rpc_url();
|
||||||
|
|
||||||
let (node, engine_handle_tx) = HlNode::new(ext.block_source_args.parse().await?);
|
let (node, engine_handle_tx) =
|
||||||
|
HlNode::new(ext.block_source_args.parse().await?, ext.debug_cutoff_height);
|
||||||
let NodeHandle { node, node_exit_future: exit_future } = builder
|
let NodeHandle { node, node_exit_future: exit_future } = builder
|
||||||
.node(node)
|
.node(node)
|
||||||
.extend_rpc_modules(move |mut ctx| {
|
.extend_rpc_modules(move |mut ctx| {
|
||||||
@ -69,10 +73,25 @@ fn main() -> eyre::Result<()> {
|
|||||||
info!("eth_getProof is disabled by default");
|
info!("eth_getProof is disabled by default");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is a temporary workaround to fix the issue with custom headers
|
||||||
|
// affects `eth_subscribe[type=newHeads]`
|
||||||
|
ctx.modules.replace_configured(
|
||||||
|
SubscribeFixup::new(
|
||||||
|
Arc::new(ctx.registry.eth_handlers().pubsub.clone()),
|
||||||
|
Arc::new(ctx.registry.eth_api().provider().clone()),
|
||||||
|
Box::new(ctx.node().task_executor.clone()),
|
||||||
|
)
|
||||||
|
.into_rpc(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
ctx.modules.merge_configured(
|
||||||
|
HlBlockPrecompileExt::new(ctx.registry.eth_api().clone()).into_rpc(),
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
.apply(|builder| {
|
.apply(|mut builder| {
|
||||||
builder.db().create_tables_for::<Tables>().expect("create tables");
|
builder.db_mut().create_tables_for::<Tables>().expect("create tables");
|
||||||
builder
|
builder
|
||||||
})
|
})
|
||||||
.launch()
|
.launch()
|
||||||
|
|||||||
@ -1,21 +1,24 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
chainspec::{parser::HlChainSpecParser, HlChainSpec},
|
chainspec::{HlChainSpec, parser::HlChainSpecParser},
|
||||||
node::{consensus::HlConsensus, evm::config::HlEvmConfig, storage::tables::Tables, HlNode},
|
node::{
|
||||||
|
HlNode, consensus::HlConsensus, evm::config::HlEvmConfig, migrate::Migrator,
|
||||||
|
storage::tables::Tables,
|
||||||
|
},
|
||||||
pseudo_peer::BlockSourceArgs,
|
pseudo_peer::BlockSourceArgs,
|
||||||
};
|
};
|
||||||
use clap::{Args, Parser};
|
use clap::{Args, Parser};
|
||||||
use reth::{
|
use reth::{
|
||||||
args::LogArgs,
|
CliRunner,
|
||||||
|
args::{DatabaseArgs, DatadirArgs, LogArgs},
|
||||||
builder::{NodeBuilder, WithLaunchContext},
|
builder::{NodeBuilder, WithLaunchContext},
|
||||||
cli::Commands,
|
cli::Commands,
|
||||||
prometheus_exporter::install_prometheus_recorder,
|
prometheus_exporter::install_prometheus_recorder,
|
||||||
version::version_metadata,
|
version::version_metadata,
|
||||||
CliRunner,
|
|
||||||
};
|
};
|
||||||
use reth_chainspec::EthChainSpec;
|
use reth_chainspec::EthChainSpec;
|
||||||
use reth_cli::chainspec::ChainSpecParser;
|
use reth_cli::chainspec::ChainSpecParser;
|
||||||
use reth_cli_commands::{common::EnvironmentArgs, launcher::FnLauncher};
|
use reth_cli_commands::{common::EnvironmentArgs, launcher::FnLauncher};
|
||||||
use reth_db::{init_db, mdbx::init_db_for, DatabaseEnv};
|
use reth_db::{DatabaseEnv, init_db, mdbx::init_db_for};
|
||||||
use reth_tracing::FileWorkerGuard;
|
use reth_tracing::FileWorkerGuard;
|
||||||
use std::{
|
use std::{
|
||||||
fmt::{self},
|
fmt::{self},
|
||||||
@ -35,6 +38,12 @@ pub struct HlNodeArgs {
|
|||||||
#[command(flatten)]
|
#[command(flatten)]
|
||||||
pub block_source_args: BlockSourceArgs,
|
pub block_source_args: BlockSourceArgs,
|
||||||
|
|
||||||
|
/// Debug cutoff height.
|
||||||
|
///
|
||||||
|
/// This option is used to cut off the block import at a specific height.
|
||||||
|
#[arg(long, env = "DEBUG_CUTOFF_HEIGHT")]
|
||||||
|
pub debug_cutoff_height: Option<u64>,
|
||||||
|
|
||||||
/// Upstream RPC URL to forward incoming transactions.
|
/// Upstream RPC URL to forward incoming transactions.
|
||||||
///
|
///
|
||||||
/// Default to Hyperliquid's RPC URL when not provided (https://rpc.hyperliquid.xyz/evm).
|
/// Default to Hyperliquid's RPC URL when not provided (https://rpc.hyperliquid.xyz/evm).
|
||||||
@ -130,11 +139,14 @@ where
|
|||||||
// Install the prometheus recorder to be sure to record all metrics
|
// Install the prometheus recorder to be sure to record all metrics
|
||||||
let _ = install_prometheus_recorder();
|
let _ = install_prometheus_recorder();
|
||||||
|
|
||||||
let components =
|
let components = |spec: Arc<C::ChainSpec>| {
|
||||||
|spec: Arc<C::ChainSpec>| (HlEvmConfig::new(spec.clone()), HlConsensus::new(spec));
|
(HlEvmConfig::new(spec.clone()), Arc::new(HlConsensus::new(spec)))
|
||||||
|
};
|
||||||
|
|
||||||
match self.command {
|
match self.command {
|
||||||
Commands::Node(command) => runner.run_command_until_exit(|ctx| {
|
Commands::Node(command) => runner.run_command_until_exit(|ctx| {
|
||||||
|
Self::migrate_db(&command.chain, &command.datadir, &command.db)
|
||||||
|
.expect("Failed to migrate database");
|
||||||
command.execute(ctx, FnLauncher::new::<C, Ext>(launcher))
|
command.execute(ctx, FnLauncher::new::<C, Ext>(launcher))
|
||||||
}),
|
}),
|
||||||
Commands::Init(command) => {
|
Commands::Init(command) => {
|
||||||
@ -151,9 +163,6 @@ where
|
|||||||
runner.run_command_until_exit(|ctx| command.execute::<HlNode, _>(ctx, components))
|
runner.run_command_until_exit(|ctx| command.execute::<HlNode, _>(ctx, components))
|
||||||
}
|
}
|
||||||
Commands::Config(command) => runner.run_until_ctrl_c(command.execute()),
|
Commands::Config(command) => runner.run_until_ctrl_c(command.execute()),
|
||||||
Commands::Recover(command) => {
|
|
||||||
runner.run_command_until_exit(|ctx| command.execute::<HlNode>(ctx))
|
|
||||||
}
|
|
||||||
Commands::Prune(command) => runner.run_until_ctrl_c(command.execute::<HlNode>()),
|
Commands::Prune(command) => runner.run_until_ctrl_c(command.execute::<HlNode>()),
|
||||||
Commands::Import(command) => {
|
Commands::Import(command) => {
|
||||||
runner.run_blocking_until_ctrl_c(command.execute::<HlNode, _>(components))
|
runner.run_blocking_until_ctrl_c(command.execute::<HlNode, _>(components))
|
||||||
@ -184,4 +193,13 @@ where
|
|||||||
init_db_for::<_, Tables>(db_path, env.db.database_args())?;
|
init_db_for::<_, Tables>(db_path, env.db.database_args())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn migrate_db(
|
||||||
|
chain: &HlChainSpec,
|
||||||
|
datadir: &DatadirArgs,
|
||||||
|
db: &DatabaseArgs,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
Migrator::<HlNode>::new(chain.clone(), datadir.clone(), *db)?.migrate_db()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +1,8 @@
|
|||||||
use crate::{hardforks::HlHardforks, node::HlNode, HlBlock, HlBlockBody, HlPrimitives};
|
use crate::{hardforks::HlHardforks, node::{primitives::HlHeader, HlNode}, HlBlock, HlBlockBody, HlPrimitives};
|
||||||
use alloy_consensus::Header;
|
|
||||||
use reth::{
|
use reth::{
|
||||||
api::FullNodeTypes,
|
api::{FullNodeTypes, NodeTypes},
|
||||||
beacon_consensus::EthBeaconConsensus,
|
beacon_consensus::EthBeaconConsensus,
|
||||||
builder::{components::ConsensusBuilder, BuilderContext},
|
builder::{BuilderContext, components::ConsensusBuilder},
|
||||||
consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator},
|
consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator},
|
||||||
consensus_common::validation::{
|
consensus_common::validation::{
|
||||||
validate_against_parent_4844, validate_against_parent_hash_number,
|
validate_against_parent_4844, validate_against_parent_hash_number,
|
||||||
@ -24,7 +23,7 @@ impl<Node> ConsensusBuilder<Node> for HlConsensusBuilder
|
|||||||
where
|
where
|
||||||
Node: FullNodeTypes<Types = HlNode>,
|
Node: FullNodeTypes<Types = HlNode>,
|
||||||
{
|
{
|
||||||
type Consensus = Arc<dyn FullConsensus<HlPrimitives, Error = ConsensusError>>;
|
type Consensus = Arc<HlConsensus<<Node::Types as NodeTypes>::ChainSpec>>;
|
||||||
|
|
||||||
async fn build_consensus(self, ctx: &BuilderContext<Node>) -> eyre::Result<Self::Consensus> {
|
async fn build_consensus(self, ctx: &BuilderContext<Node>) -> eyre::Result<Self::Consensus> {
|
||||||
Ok(Arc::new(HlConsensus::new(ctx.chain_spec())))
|
Ok(Arc::new(HlConsensus::new(ctx.chain_spec())))
|
||||||
@ -101,14 +100,14 @@ where
|
|||||||
|
|
||||||
impl<ChainSpec> Consensus<HlBlock> for HlConsensus<ChainSpec>
|
impl<ChainSpec> Consensus<HlBlock> for HlConsensus<ChainSpec>
|
||||||
where
|
where
|
||||||
ChainSpec: EthChainSpec<Header = Header> + HlHardforks,
|
ChainSpec: EthChainSpec<Header = HlHeader> + HlHardforks,
|
||||||
{
|
{
|
||||||
type Error = ConsensusError;
|
type Error = ConsensusError;
|
||||||
|
|
||||||
fn validate_body_against_header(
|
fn validate_body_against_header(
|
||||||
&self,
|
&self,
|
||||||
body: &HlBlockBody,
|
body: &HlBlockBody,
|
||||||
header: &SealedHeader,
|
header: &SealedHeader<HlHeader>,
|
||||||
) -> Result<(), ConsensusError> {
|
) -> Result<(), ConsensusError> {
|
||||||
Consensus::<HlBlock>::validate_body_against_header(&self.inner, body, header)
|
Consensus::<HlBlock>::validate_body_against_header(&self.inner, body, header)
|
||||||
}
|
}
|
||||||
@ -148,7 +147,7 @@ mod reth_copy;
|
|||||||
|
|
||||||
impl<ChainSpec> FullConsensus<HlPrimitives> for HlConsensus<ChainSpec>
|
impl<ChainSpec> FullConsensus<HlPrimitives> for HlConsensus<ChainSpec>
|
||||||
where
|
where
|
||||||
ChainSpec: EthChainSpec<Header = Header> + HlHardforks,
|
ChainSpec: EthChainSpec<Header = HlHeader> + HlHardforks,
|
||||||
{
|
{
|
||||||
fn validate_block_post_execution(
|
fn validate_block_post_execution(
|
||||||
&self,
|
&self,
|
||||||
|
|||||||
@ -1,21 +1,21 @@
|
|||||||
//! Copy of reth codebase.
|
//! Copy of reth codebase.
|
||||||
|
|
||||||
use alloy_consensus::{proofs::calculate_receipt_root, BlockHeader, TxReceipt};
|
use crate::HlBlock;
|
||||||
|
use alloy_consensus::{BlockHeader, TxReceipt, proofs::calculate_receipt_root};
|
||||||
use alloy_eips::eip7685::Requests;
|
use alloy_eips::eip7685::Requests;
|
||||||
use alloy_primitives::{Bloom, B256};
|
use alloy_primitives::{B256, Bloom};
|
||||||
use reth::consensus::ConsensusError;
|
use reth::consensus::ConsensusError;
|
||||||
use reth_chainspec::EthereumHardforks;
|
use reth_chainspec::EthereumHardforks;
|
||||||
use reth_primitives::{gas_spent_by_transactions, GotExpected, RecoveredBlock};
|
use reth_primitives::{GotExpected, RecoveredBlock, gas_spent_by_transactions};
|
||||||
use reth_primitives_traits::{Block, Receipt as ReceiptTrait};
|
use reth_primitives_traits::Receipt as ReceiptTrait;
|
||||||
|
|
||||||
pub fn validate_block_post_execution<B, R, ChainSpec>(
|
pub fn validate_block_post_execution<R, ChainSpec>(
|
||||||
block: &RecoveredBlock<B>,
|
block: &RecoveredBlock<HlBlock>,
|
||||||
chain_spec: &ChainSpec,
|
chain_spec: &ChainSpec,
|
||||||
receipts: &[R],
|
receipts: &[R],
|
||||||
requests: &Requests,
|
requests: &Requests,
|
||||||
) -> Result<(), ConsensusError>
|
) -> Result<(), ConsensusError>
|
||||||
where
|
where
|
||||||
B: Block,
|
|
||||||
R: ReceiptTrait,
|
R: ReceiptTrait,
|
||||||
ChainSpec: EthereumHardforks,
|
ChainSpec: EthereumHardforks,
|
||||||
{
|
{
|
||||||
@ -42,7 +42,7 @@ where
|
|||||||
receipts.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
|
receipts.iter().filter(|&r| r.cumulative_gas_used() != 0).cloned().collect::<Vec<_>>();
|
||||||
if let Err(error) = verify_receipts(
|
if let Err(error) = verify_receipts(
|
||||||
block.header().receipts_root(),
|
block.header().receipts_root(),
|
||||||
block.header().logs_bloom(),
|
block.header().inner.logs_bloom(),
|
||||||
&receipts_for_root,
|
&receipts_for_root,
|
||||||
) {
|
) {
|
||||||
tracing::debug!(%error, ?receipts, "receipts verification failed");
|
tracing::debug!(%error, ?receipts, "receipts verification failed");
|
||||||
|
|||||||
@ -1,8 +1,6 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
node::evm::config::{HlBlockExecutorFactory, HlEvmConfig},
|
node::evm::config::{HlBlockExecutorFactory, HlEvmConfig}, HlBlock, HlHeader
|
||||||
HlBlock,
|
|
||||||
};
|
};
|
||||||
use alloy_consensus::Header;
|
|
||||||
use reth_evm::{
|
use reth_evm::{
|
||||||
block::BlockExecutionError,
|
block::BlockExecutionError,
|
||||||
execute::{BlockAssembler, BlockAssemblerInput},
|
execute::{BlockAssembler, BlockAssemblerInput},
|
||||||
@ -13,7 +11,7 @@ impl BlockAssembler<HlBlockExecutorFactory> for HlEvmConfig {
|
|||||||
|
|
||||||
fn assemble_block(
|
fn assemble_block(
|
||||||
&self,
|
&self,
|
||||||
input: BlockAssemblerInput<'_, '_, HlBlockExecutorFactory, Header>,
|
input: BlockAssemblerInput<'_, '_, HlBlockExecutorFactory, HlHeader>,
|
||||||
) -> Result<Self::Block, BlockExecutionError> {
|
) -> Result<Self::Block, BlockExecutionError> {
|
||||||
let HlBlock { header, body } = self.block_assembler.assemble_block(input)?;
|
let HlBlock { header, body } = self.block_assembler.assemble_block(input)?;
|
||||||
Ok(HlBlock { header, body })
|
Ok(HlBlock { header, body })
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
use super::{executor::HlBlockExecutor, factory::HlEvmFactory};
|
use super::{executor::HlBlockExecutor, factory::HlEvmFactory};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlock, HlBlockBody, HlHeader, HlPrimitives,
|
||||||
chainspec::HlChainSpec,
|
chainspec::HlChainSpec,
|
||||||
evm::{spec::HlSpecId, transaction::HlTxEnv},
|
evm::{spec::HlSpecId, transaction::HlTxEnv},
|
||||||
hardforks::HlHardforks,
|
hardforks::HlHardforks,
|
||||||
@ -9,31 +10,30 @@ use crate::{
|
|||||||
rpc::engine_api::validator::HlExecutionData,
|
rpc::engine_api::validator::HlExecutionData,
|
||||||
types::HlExtras,
|
types::HlExtras,
|
||||||
},
|
},
|
||||||
HlBlock, HlBlockBody, HlPrimitives,
|
|
||||||
};
|
};
|
||||||
use alloy_consensus::{BlockHeader, Header, Transaction as _, TxReceipt, EMPTY_OMMER_ROOT_HASH};
|
use alloy_consensus::{BlockHeader, EMPTY_OMMER_ROOT_HASH, Header, Transaction as _, TxReceipt};
|
||||||
use alloy_eips::{merge::BEACON_NONCE, Encodable2718};
|
use alloy_eips::{Encodable2718, merge::BEACON_NONCE};
|
||||||
use alloy_primitives::{Log, U256};
|
use alloy_primitives::{Log, U256};
|
||||||
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
|
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
|
||||||
use reth_evm::{
|
use reth_evm::{
|
||||||
block::{BlockExecutionError, BlockExecutorFactory, BlockExecutorFor},
|
|
||||||
eth::{receipt_builder::ReceiptBuilder, EthBlockExecutionCtx},
|
|
||||||
execute::{BlockAssembler, BlockAssemblerInput},
|
|
||||||
precompiles::PrecompilesMap,
|
|
||||||
ConfigureEngineEvm, ConfigureEvm, EvmEnv, EvmEnvFor, EvmFactory, ExecutableTxIterator,
|
ConfigureEngineEvm, ConfigureEvm, EvmEnv, EvmEnvFor, EvmFactory, ExecutableTxIterator,
|
||||||
ExecutionCtxFor, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, NextBlockEnvAttributes,
|
ExecutionCtxFor, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, NextBlockEnvAttributes,
|
||||||
|
block::{BlockExecutionError, BlockExecutorFactory, BlockExecutorFor},
|
||||||
|
eth::{EthBlockExecutionCtx, receipt_builder::ReceiptBuilder},
|
||||||
|
execute::{BlockAssembler, BlockAssemblerInput},
|
||||||
|
precompiles::PrecompilesMap,
|
||||||
};
|
};
|
||||||
use reth_evm_ethereum::EthBlockAssembler;
|
use reth_evm_ethereum::EthBlockAssembler;
|
||||||
use reth_payload_primitives::NewPayloadError;
|
use reth_payload_primitives::NewPayloadError;
|
||||||
use reth_primitives::{logs_bloom, BlockTy, HeaderTy, Receipt, SealedBlock, SealedHeader};
|
use reth_primitives::{BlockTy, HeaderTy, Receipt, SealedBlock, SealedHeader, logs_bloom};
|
||||||
use reth_primitives_traits::{proofs, SignerRecoverable, WithEncoded};
|
use reth_primitives_traits::{SignerRecoverable, WithEncoded, proofs};
|
||||||
use reth_provider::BlockExecutionResult;
|
use reth_provider::BlockExecutionResult;
|
||||||
use reth_revm::State;
|
use reth_revm::State;
|
||||||
use revm::{
|
use revm::{
|
||||||
|
Inspector,
|
||||||
context::{BlockEnv, CfgEnv, TxEnv},
|
context::{BlockEnv, CfgEnv, TxEnv},
|
||||||
context_interface::block::BlobExcessGasAndPrice,
|
context_interface::block::BlobExcessGasAndPrice,
|
||||||
primitives::hardfork::SpecId,
|
primitives::hardfork::SpecId,
|
||||||
Inspector,
|
|
||||||
};
|
};
|
||||||
use std::{borrow::Cow, convert::Infallible, sync::Arc};
|
use std::{borrow::Cow, convert::Infallible, sync::Arc};
|
||||||
|
|
||||||
@ -45,16 +45,16 @@ pub struct HlBlockAssembler {
|
|||||||
impl<F> BlockAssembler<F> for HlBlockAssembler
|
impl<F> BlockAssembler<F> for HlBlockAssembler
|
||||||
where
|
where
|
||||||
F: for<'a> BlockExecutorFactory<
|
F: for<'a> BlockExecutorFactory<
|
||||||
ExecutionCtx<'a> = HlBlockExecutionCtx<'a>,
|
ExecutionCtx<'a> = HlBlockExecutionCtx<'a>,
|
||||||
Transaction = TransactionSigned,
|
Transaction = TransactionSigned,
|
||||||
Receipt = Receipt,
|
Receipt = Receipt,
|
||||||
>,
|
>,
|
||||||
{
|
{
|
||||||
type Block = HlBlock;
|
type Block = HlBlock;
|
||||||
|
|
||||||
fn assemble_block(
|
fn assemble_block(
|
||||||
&self,
|
&self,
|
||||||
input: BlockAssemblerInput<'_, '_, F>,
|
input: BlockAssemblerInput<'_, '_, F, HlHeader>,
|
||||||
) -> Result<Self::Block, BlockExecutionError> {
|
) -> Result<Self::Block, BlockExecutionError> {
|
||||||
// TODO: Copy of EthBlockAssembler::assemble_block
|
// TODO: Copy of EthBlockAssembler::assemble_block
|
||||||
let inner = &self.inner;
|
let inner = &self.inner;
|
||||||
@ -106,7 +106,10 @@ where
|
|||||||
} else {
|
} else {
|
||||||
// for the first post-fork block, both parent.blob_gas_used and
|
// for the first post-fork block, both parent.blob_gas_used and
|
||||||
// parent.excess_blob_gas are evaluated as 0
|
// parent.excess_blob_gas are evaluated as 0
|
||||||
Some(alloy_eips::eip7840::BlobParams::cancun().next_block_excess_blob_gas(0, 0))
|
Some(
|
||||||
|
alloy_eips::eip7840::BlobParams::cancun()
|
||||||
|
.next_block_excess_blob_gas_osaka(0, 0, 0),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,6 +136,9 @@ where
|
|||||||
excess_blob_gas,
|
excess_blob_gas,
|
||||||
requests_hash,
|
requests_hash,
|
||||||
};
|
};
|
||||||
|
let system_tx_count =
|
||||||
|
transactions.iter().filter(|t| is_system_transaction(t)).count() as u64;
|
||||||
|
let header = HlHeader::from_ethereum_header(header, receipts, system_tx_count);
|
||||||
|
|
||||||
Ok(Self::Block {
|
Ok(Self::Block {
|
||||||
header,
|
header,
|
||||||
@ -237,9 +243,9 @@ where
|
|||||||
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt<Log = Log>>,
|
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt<Log = Log>>,
|
||||||
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks + Clone,
|
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks + Clone,
|
||||||
EvmF: EvmFactory<
|
EvmF: EvmFactory<
|
||||||
Tx: FromRecoveredTx<TransactionSigned> + FromTxWithEncoded<TransactionSigned>,
|
Tx: FromRecoveredTx<TransactionSigned> + FromTxWithEncoded<TransactionSigned>,
|
||||||
Precompiles = PrecompilesMap,
|
Precompiles = PrecompilesMap,
|
||||||
>,
|
>,
|
||||||
R::Transaction: From<TransactionSigned> + Clone,
|
R::Transaction: From<TransactionSigned> + Clone,
|
||||||
Self: 'static,
|
Self: 'static,
|
||||||
HlTxEnv<TxEnv>: IntoTxEnv<<EvmF as EvmFactory>::Tx>,
|
HlTxEnv<TxEnv>: IntoTxEnv<<EvmF as EvmFactory>::Tx>,
|
||||||
@ -266,6 +272,8 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static EMPTY_OMMERS: [Header; 0] = [];
|
||||||
|
|
||||||
impl ConfigureEvm for HlEvmConfig
|
impl ConfigureEvm for HlEvmConfig
|
||||||
where
|
where
|
||||||
Self: Send + Sync + Unpin + Clone + 'static,
|
Self: Send + Sync + Unpin + Clone + 'static,
|
||||||
@ -284,7 +292,7 @@ where
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evm_env(&self, header: &Header) -> EvmEnv<HlSpecId> {
|
fn evm_env(&self, header: &HlHeader) -> Result<EvmEnv<HlSpecId>, Self::Error> {
|
||||||
let blob_params = self.chain_spec().blob_params_at_timestamp(header.timestamp);
|
let blob_params = self.chain_spec().blob_params_at_timestamp(header.timestamp);
|
||||||
let spec = revm_spec_by_timestamp_and_block_number(
|
let spec = revm_spec_by_timestamp_and_block_number(
|
||||||
self.chain_spec().clone(),
|
self.chain_spec().clone(),
|
||||||
@ -324,12 +332,12 @@ where
|
|||||||
blob_excess_gas_and_price,
|
blob_excess_gas_and_price,
|
||||||
};
|
};
|
||||||
|
|
||||||
EvmEnv { cfg_env, block_env }
|
Ok(EvmEnv { cfg_env, block_env })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_evm_env(
|
fn next_evm_env(
|
||||||
&self,
|
&self,
|
||||||
parent: &Header,
|
parent: &HlHeader,
|
||||||
attributes: &Self::NextBlockEnvCtx,
|
attributes: &Self::NextBlockEnvCtx,
|
||||||
) -> Result<EvmEnv<HlSpecId>, Self::Error> {
|
) -> Result<EvmEnv<HlSpecId>, Self::Error> {
|
||||||
// ensure we're not missing any timestamp based hardforks
|
// ensure we're not missing any timestamp based hardforks
|
||||||
@ -373,28 +381,28 @@ where
|
|||||||
fn context_for_block<'a>(
|
fn context_for_block<'a>(
|
||||||
&self,
|
&self,
|
||||||
block: &'a SealedBlock<BlockTy<Self::Primitives>>,
|
block: &'a SealedBlock<BlockTy<Self::Primitives>>,
|
||||||
) -> ExecutionCtxFor<'a, Self> {
|
) -> Result<ExecutionCtxFor<'a, Self>, Self::Error> {
|
||||||
let block_body = block.body();
|
let block_body = block.body();
|
||||||
HlBlockExecutionCtx {
|
Ok(HlBlockExecutionCtx {
|
||||||
ctx: EthBlockExecutionCtx {
|
ctx: EthBlockExecutionCtx {
|
||||||
parent_hash: block.header().parent_hash,
|
parent_hash: block.header().parent_hash,
|
||||||
parent_beacon_block_root: block.header().parent_beacon_block_root,
|
parent_beacon_block_root: block.header().parent_beacon_block_root,
|
||||||
ommers: &block.body().ommers,
|
ommers: &EMPTY_OMMERS,
|
||||||
withdrawals: block.body().withdrawals.as_ref().map(Cow::Borrowed),
|
withdrawals: block.body().withdrawals.as_ref().map(Cow::Borrowed),
|
||||||
},
|
},
|
||||||
extras: HlExtras {
|
extras: HlExtras {
|
||||||
read_precompile_calls: block_body.read_precompile_calls.clone(),
|
read_precompile_calls: block_body.read_precompile_calls.clone(),
|
||||||
highest_precompile_address: block_body.highest_precompile_address,
|
highest_precompile_address: block_body.highest_precompile_address,
|
||||||
},
|
},
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn context_for_next_block(
|
fn context_for_next_block(
|
||||||
&self,
|
&self,
|
||||||
parent: &SealedHeader<HeaderTy<Self::Primitives>>,
|
parent: &SealedHeader<HeaderTy<Self::Primitives>>,
|
||||||
attributes: Self::NextBlockEnvCtx,
|
attributes: Self::NextBlockEnvCtx,
|
||||||
) -> ExecutionCtxFor<'_, Self> {
|
) -> Result<ExecutionCtxFor<'_, Self>, Self::Error> {
|
||||||
HlBlockExecutionCtx {
|
Ok(HlBlockExecutionCtx {
|
||||||
ctx: EthBlockExecutionCtx {
|
ctx: EthBlockExecutionCtx {
|
||||||
parent_hash: parent.hash(),
|
parent_hash: parent.hash(),
|
||||||
parent_beacon_block_root: attributes.parent_beacon_block_root,
|
parent_beacon_block_root: attributes.parent_beacon_block_root,
|
||||||
@ -402,13 +410,13 @@ where
|
|||||||
withdrawals: attributes.withdrawals.map(Cow::Owned),
|
withdrawals: attributes.withdrawals.map(Cow::Owned),
|
||||||
},
|
},
|
||||||
extras: HlExtras::default(), // TODO: hacky, double check if this is correct
|
extras: HlExtras::default(), // TODO: hacky, double check if this is correct
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigureEngineEvm<HlExecutionData> for HlEvmConfig {
|
impl ConfigureEngineEvm<HlExecutionData> for HlEvmConfig {
|
||||||
fn evm_env_for_payload(&self, payload: &HlExecutionData) -> EvmEnvFor<Self> {
|
fn evm_env_for_payload(&self, payload: &HlExecutionData) -> EvmEnvFor<Self> {
|
||||||
self.evm_env(&payload.0.header)
|
self.evm_env(&payload.0.header).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn context_for_payload<'a>(&self, payload: &'a HlExecutionData) -> ExecutionCtxFor<'a, Self> {
|
fn context_for_payload<'a>(&self, payload: &'a HlExecutionData) -> ExecutionCtxFor<'a, Self> {
|
||||||
@ -417,7 +425,7 @@ impl ConfigureEngineEvm<HlExecutionData> for HlEvmConfig {
|
|||||||
ctx: EthBlockExecutionCtx {
|
ctx: EthBlockExecutionCtx {
|
||||||
parent_hash: block.header.parent_hash,
|
parent_hash: block.header.parent_hash,
|
||||||
parent_beacon_block_root: block.header.parent_beacon_block_root,
|
parent_beacon_block_root: block.header.parent_beacon_block_root,
|
||||||
ommers: &block.body.ommers,
|
ommers: &EMPTY_OMMERS,
|
||||||
withdrawals: block.body.withdrawals.as_ref().map(Cow::Borrowed),
|
withdrawals: block.body.withdrawals.as_ref().map(Cow::Borrowed),
|
||||||
},
|
},
|
||||||
extras: HlExtras {
|
extras: HlExtras {
|
||||||
|
|||||||
@ -8,29 +8,26 @@ use crate::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
use alloy_consensus::{Transaction, TxReceipt};
|
use alloy_consensus::{Transaction, TxReceipt};
|
||||||
use alloy_eips::{eip7685::Requests, Encodable2718};
|
use alloy_eips::{Encodable2718, eip7685::Requests};
|
||||||
use alloy_evm::{block::ExecutableTx, eth::receipt_builder::ReceiptBuilderCtx};
|
use alloy_evm::{block::ExecutableTx, eth::receipt_builder::ReceiptBuilderCtx};
|
||||||
use alloy_primitives::{address, hex, Address, Bytes, U160, U256};
|
use alloy_primitives::{Address, Bytes, U160, U256, address, hex};
|
||||||
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
|
use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks};
|
||||||
use reth_evm::{
|
use reth_evm::{
|
||||||
block::{BlockValidationError, CommitChanges},
|
Database, Evm, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, OnStateHook,
|
||||||
|
block::BlockValidationError,
|
||||||
eth::receipt_builder::ReceiptBuilder,
|
eth::receipt_builder::ReceiptBuilder,
|
||||||
execute::{BlockExecutionError, BlockExecutor},
|
execute::{BlockExecutionError, BlockExecutor},
|
||||||
precompiles::{DynPrecompile, PrecompileInput, PrecompilesMap},
|
precompiles::{DynPrecompile, PrecompileInput, PrecompilesMap},
|
||||||
Database, Evm, FromRecoveredTx, FromTxWithEncoded, IntoTxEnv, OnStateHook,
|
|
||||||
};
|
};
|
||||||
use reth_provider::BlockExecutionResult;
|
use reth_provider::BlockExecutionResult;
|
||||||
use reth_revm::State;
|
use reth_revm::State;
|
||||||
use revm::{
|
use revm::{
|
||||||
context::{
|
DatabaseCommit,
|
||||||
result::{ExecutionResult, ResultAndState},
|
context::{TxEnv, result::ResultAndState},
|
||||||
TxEnv,
|
|
||||||
},
|
|
||||||
interpreter::instructions::utility::IntoU256,
|
interpreter::instructions::utility::IntoU256,
|
||||||
precompile::{PrecompileError, PrecompileOutput, PrecompileResult},
|
precompile::{PrecompileError, PrecompileOutput, PrecompileResult},
|
||||||
primitives::HashMap,
|
primitives::HashMap,
|
||||||
state::Bytecode,
|
state::Bytecode,
|
||||||
DatabaseCommit,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn is_system_transaction(tx: &TransactionSigned) -> bool {
|
pub fn is_system_transaction(tx: &TransactionSigned) -> bool {
|
||||||
@ -87,12 +84,12 @@ impl<'a, DB, EVM, Spec, R: ReceiptBuilder> HlBlockExecutor<'a, EVM, Spec, R>
|
|||||||
where
|
where
|
||||||
DB: Database + 'a,
|
DB: Database + 'a,
|
||||||
EVM: Evm<
|
EVM: Evm<
|
||||||
DB = &'a mut State<DB>,
|
DB = &'a mut State<DB>,
|
||||||
Precompiles = PrecompilesMap,
|
Precompiles = PrecompilesMap,
|
||||||
Tx: FromRecoveredTx<R::Transaction>
|
Tx: FromRecoveredTx<R::Transaction>
|
||||||
+ FromRecoveredTx<TransactionSigned>
|
+ FromRecoveredTx<TransactionSigned>
|
||||||
+ FromTxWithEncoded<TransactionSigned>,
|
+ FromTxWithEncoded<TransactionSigned>,
|
||||||
>,
|
>,
|
||||||
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks + Clone,
|
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks + Clone,
|
||||||
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt>,
|
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt>,
|
||||||
<R as ReceiptBuilder>::Transaction: Unpin + From<TransactionSigned>,
|
<R as ReceiptBuilder>::Transaction: Unpin + From<TransactionSigned>,
|
||||||
@ -110,7 +107,9 @@ where
|
|||||||
const COREWRITER_ENABLED_BLOCK_NUMBER: u64 = 7578300;
|
const COREWRITER_ENABLED_BLOCK_NUMBER: u64 = 7578300;
|
||||||
const COREWRITER_CONTRACT_ADDRESS: Address =
|
const COREWRITER_CONTRACT_ADDRESS: Address =
|
||||||
address!("0x3333333333333333333333333333333333333333");
|
address!("0x3333333333333333333333333333333333333333");
|
||||||
const COREWRITER_CODE: &[u8] = &hex!("608060405234801561000f575f5ffd5b5060043610610029575f3560e01c806317938e131461002d575b5f5ffd5b61004760048036038101906100429190610123565b610049565b005b5f5f90505b61019081101561006557808060010191505061004e565b503373ffffffffffffffffffffffffffffffffffffffff167f8c7f585fb295f7eb1e6aeb8fba61b23a4fe60beda405f0045073b185c74412e383836040516100ae9291906101c8565b60405180910390a25050565b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5f83601f8401126100e3576100e26100c2565b5b8235905067ffffffffffffffff811115610100576100ff6100c6565b5b60208301915083600182028301111561011c5761011b6100ca565b5b9250929050565b5f5f60208385031215610139576101386100ba565b5b5f83013567ffffffffffffffff811115610156576101556100be565b5b610162858286016100ce565b92509250509250929050565b5f82825260208201905092915050565b828183375f83830152505050565b5f601f19601f8301169050919050565b5f6101a7838561016e565b93506101b483858461017e565b6101bd8361018c565b840190509392505050565b5f6020820190508181035f8301526101e181848661019c565b9050939250505056fea2646970667358221220f01517e1fbaff8af4bd72cb063cccecbacbb00b07354eea7dd52265d355474fb64736f6c634300081c0033");
|
const COREWRITER_CODE: &[u8] = &hex!(
|
||||||
|
"608060405234801561000f575f5ffd5b5060043610610029575f3560e01c806317938e131461002d575b5f5ffd5b61004760048036038101906100429190610123565b610049565b005b5f5f90505b61019081101561006557808060010191505061004e565b503373ffffffffffffffffffffffffffffffffffffffff167f8c7f585fb295f7eb1e6aeb8fba61b23a4fe60beda405f0045073b185c74412e383836040516100ae9291906101c8565b60405180910390a25050565b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5ffd5b5f5f83601f8401126100e3576100e26100c2565b5b8235905067ffffffffffffffff811115610100576100ff6100c6565b5b60208301915083600182028301111561011c5761011b6100ca565b5b9250929050565b5f5f60208385031215610139576101386100ba565b5b5f83013567ffffffffffffffff811115610156576101556100be565b5b610162858286016100ce565b92509250509250929050565b5f82825260208201905092915050565b828183375f83830152505050565b5f601f19601f8301169050919050565b5f6101a7838561016e565b93506101b483858461017e565b6101bd8361018c565b840190509392505050565b5f6020820190508181035f8301526101e181848661019c565b9050939250505056fea2646970667358221220f01517e1fbaff8af4bd72cb063cccecbacbb00b07354eea7dd52265d355474fb64736f6c634300081c0033"
|
||||||
|
);
|
||||||
|
|
||||||
if self.evm.block().number != U256::from(COREWRITER_ENABLED_BLOCK_NUMBER) {
|
if self.evm.block().number != U256::from(COREWRITER_ENABLED_BLOCK_NUMBER) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@ -137,12 +136,12 @@ impl<'a, DB, E, Spec, R> BlockExecutor for HlBlockExecutor<'a, E, Spec, R>
|
|||||||
where
|
where
|
||||||
DB: Database + 'a,
|
DB: Database + 'a,
|
||||||
E: Evm<
|
E: Evm<
|
||||||
DB = &'a mut State<DB>,
|
DB = &'a mut State<DB>,
|
||||||
Tx: FromRecoveredTx<R::Transaction>
|
Tx: FromRecoveredTx<R::Transaction>
|
||||||
+ FromRecoveredTx<TransactionSigned>
|
+ FromRecoveredTx<TransactionSigned>
|
||||||
+ FromTxWithEncoded<TransactionSigned>,
|
+ FromTxWithEncoded<TransactionSigned>,
|
||||||
Precompiles = PrecompilesMap,
|
Precompiles = PrecompilesMap,
|
||||||
>,
|
>,
|
||||||
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks,
|
Spec: EthereumHardforks + HlHardforks + EthChainSpec + Hardforks,
|
||||||
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt>,
|
R: ReceiptBuilder<Transaction = TransactionSigned, Receipt: TxReceipt>,
|
||||||
<R as ReceiptBuilder>::Transaction: Unpin + From<TransactionSigned>,
|
<R as ReceiptBuilder>::Transaction: Unpin + From<TransactionSigned>,
|
||||||
@ -161,11 +160,10 @@ where
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute_transaction_with_commit_condition(
|
fn execute_transaction_without_commit(
|
||||||
&mut self,
|
&mut self,
|
||||||
tx: impl ExecutableTx<Self>,
|
tx: impl ExecutableTx<Self>,
|
||||||
f: impl FnOnce(&ExecutionResult<<Self::Evm as Evm>::HaltReason>) -> CommitChanges,
|
) -> Result<ResultAndState<<Self::Evm as Evm>::HaltReason>, BlockExecutionError> {
|
||||||
) -> Result<Option<u64>, BlockExecutionError> {
|
|
||||||
// The sum of the transaction's gas limit, Tg, and the gas utilized in this block prior,
|
// The sum of the transaction's gas limit, Tg, and the gas utilized in this block prior,
|
||||||
// must be no greater than the block's gasLimit.
|
// must be no greater than the block's gasLimit.
|
||||||
let block_available_gas = self.evm.block().gas_limit - self.gas_used;
|
let block_available_gas = self.evm.block().gas_limit - self.gas_used;
|
||||||
@ -178,15 +176,19 @@ where
|
|||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute transaction.
|
// Execute transaction and return the result
|
||||||
let ResultAndState { result, mut state } = self
|
self.evm.transact(&tx).map_err(|err| {
|
||||||
.evm
|
let hash = tx.tx().trie_hash();
|
||||||
.transact(&tx)
|
BlockExecutionError::evm(err, hash)
|
||||||
.map_err(|err| BlockExecutionError::evm(err, tx.tx().trie_hash()))?;
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if !f(&result).should_commit() {
|
fn commit_transaction(
|
||||||
return Ok(None);
|
&mut self,
|
||||||
}
|
output: ResultAndState<<Self::Evm as Evm>::HaltReason>,
|
||||||
|
tx: impl ExecutableTx<Self>,
|
||||||
|
) -> Result<u64, BlockExecutionError> {
|
||||||
|
let ResultAndState { result, mut state } = output;
|
||||||
|
|
||||||
let gas_used = result.gas_used();
|
let gas_used = result.gas_used();
|
||||||
|
|
||||||
@ -215,7 +217,7 @@ where
|
|||||||
// Commit the state changes.
|
// Commit the state changes.
|
||||||
self.evm.db_mut().commit(state);
|
self.evm.db_mut().commit(state);
|
||||||
|
|
||||||
Ok(Some(gas_used))
|
Ok(gas_used)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self) -> Result<(Self::Evm, BlockExecutionResult<R::Receipt>), BlockExecutionError> {
|
fn finish(self) -> Result<(Self::Evm, BlockExecutionResult<R::Receipt>), BlockExecutionError> {
|
||||||
|
|||||||
@ -7,16 +7,16 @@ use crate::evm::{
|
|||||||
spec::HlSpecId,
|
spec::HlSpecId,
|
||||||
transaction::HlTxEnv,
|
transaction::HlTxEnv,
|
||||||
};
|
};
|
||||||
use reth_evm::{precompiles::PrecompilesMap, Database, EvmEnv, EvmFactory};
|
use reth_evm::{Database, EvmEnv, EvmFactory, precompiles::PrecompilesMap};
|
||||||
use reth_revm::Context;
|
use reth_revm::Context;
|
||||||
use revm::{
|
use revm::{
|
||||||
|
Inspector,
|
||||||
context::{
|
context::{
|
||||||
result::{EVMError, HaltReason},
|
|
||||||
TxEnv,
|
TxEnv,
|
||||||
|
result::{EVMError, HaltReason},
|
||||||
},
|
},
|
||||||
inspector::NoOpInspector,
|
inspector::NoOpInspector,
|
||||||
precompile::{PrecompileSpecId, Precompiles},
|
precompile::{PrecompileSpecId, Precompiles},
|
||||||
Inspector,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Factory producing [`HlEvm`].
|
/// Factory producing [`HlEvm`].
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
evm::{
|
evm::{
|
||||||
api::{ctx::HlContext, HlEvmInner},
|
api::{HlEvmInner, ctx::HlContext},
|
||||||
spec::HlSpecId,
|
spec::HlSpecId,
|
||||||
transaction::HlTxEnv,
|
transaction::HlTxEnv,
|
||||||
},
|
},
|
||||||
@ -10,18 +10,18 @@ use alloy_primitives::{Address, Bytes};
|
|||||||
use config::HlEvmConfig;
|
use config::HlEvmConfig;
|
||||||
use reth::{
|
use reth::{
|
||||||
api::FullNodeTypes,
|
api::FullNodeTypes,
|
||||||
builder::{components::ExecutorBuilder, BuilderContext},
|
builder::{BuilderContext, components::ExecutorBuilder},
|
||||||
};
|
};
|
||||||
use reth_evm::{Database, Evm, EvmEnv};
|
use reth_evm::{Database, Evm, EvmEnv};
|
||||||
use revm::{
|
use revm::{
|
||||||
context::{
|
|
||||||
result::{EVMError, ExecutionResult, HaltReason, Output, ResultAndState, SuccessReason},
|
|
||||||
BlockEnv, TxEnv,
|
|
||||||
},
|
|
||||||
handler::{instructions::EthInstructions, EthPrecompiles, PrecompileProvider},
|
|
||||||
interpreter::{interpreter::EthInterpreter, InterpreterResult},
|
|
||||||
state::EvmState,
|
|
||||||
Context, ExecuteEvm, InspectEvm, Inspector,
|
Context, ExecuteEvm, InspectEvm, Inspector,
|
||||||
|
context::{
|
||||||
|
BlockEnv, TxEnv,
|
||||||
|
result::{EVMError, ExecutionResult, HaltReason, Output, ResultAndState, SuccessReason},
|
||||||
|
},
|
||||||
|
handler::{EthPrecompiles, PrecompileProvider, instructions::EthInstructions},
|
||||||
|
interpreter::{InterpreterResult, interpreter::EthInterpreter},
|
||||||
|
state::EvmState,
|
||||||
};
|
};
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
@ -98,11 +98,7 @@ where
|
|||||||
&mut self,
|
&mut self,
|
||||||
tx: Self::Tx,
|
tx: Self::Tx,
|
||||||
) -> Result<ResultAndState<Self::HaltReason>, Self::Error> {
|
) -> Result<ResultAndState<Self::HaltReason>, Self::Error> {
|
||||||
if self.inspect {
|
if self.inspect { self.inner.inspect_tx(tx) } else { self.inner.transact(tx) }
|
||||||
self.inner.inspect_tx(tx)
|
|
||||||
} else {
|
|
||||||
self.inner.transact(tx)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transact_system_call(
|
fn transact_system_call(
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use alloy_primitives::{address, Address};
|
use alloy_primitives::{Address, address};
|
||||||
use reth_evm::block::BlockExecutionError;
|
use reth_evm::block::BlockExecutionError;
|
||||||
use revm::{primitives::HashMap, state::Account};
|
use revm::{primitives::HashMap, state::Account};
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
use crate::node::primitives::TransactionSigned;
|
use crate::node::primitives::TransactionSigned;
|
||||||
use alloy_evm::eth::receipt_builder::{ReceiptBuilder, ReceiptBuilderCtx};
|
use alloy_evm::eth::receipt_builder::{ReceiptBuilder, ReceiptBuilderCtx};
|
||||||
|
use reth_codecs::alloy::transaction::Envelope;
|
||||||
use reth_evm::Evm;
|
use reth_evm::Evm;
|
||||||
use reth_primitives::Receipt;
|
use reth_primitives::Receipt;
|
||||||
|
|
||||||
|
|||||||
429
src/node/migrate.rs
Normal file
429
src/node/migrate.rs
Normal file
@ -0,0 +1,429 @@
|
|||||||
|
use alloy_consensus::Header;
|
||||||
|
use alloy_primitives::{B256, BlockHash, Bytes, U256, b256, hex::ToHexExt};
|
||||||
|
use reth::{
|
||||||
|
api::NodeTypesWithDBAdapter,
|
||||||
|
args::{DatabaseArgs, DatadirArgs},
|
||||||
|
dirs::{ChainPath, DataDirPath},
|
||||||
|
};
|
||||||
|
use reth_chainspec::EthChainSpec;
|
||||||
|
use reth_db::{
|
||||||
|
DatabaseEnv,
|
||||||
|
mdbx::{RO, tx::Tx},
|
||||||
|
models::CompactU256,
|
||||||
|
static_file::iter_static_files,
|
||||||
|
table::Decompress,
|
||||||
|
tables,
|
||||||
|
};
|
||||||
|
use reth_db_api::{
|
||||||
|
cursor::{DbCursorRO, DbCursorRW},
|
||||||
|
transaction::{DbTx, DbTxMut},
|
||||||
|
};
|
||||||
|
use reth_errors::ProviderResult;
|
||||||
|
use reth_ethereum_primitives::EthereumReceipt;
|
||||||
|
use reth_provider::{
|
||||||
|
DatabaseProvider, ProviderFactory, ReceiptProvider, StaticFileProviderFactory,
|
||||||
|
StaticFileSegment, StaticFileWriter,
|
||||||
|
providers::{NodeTypesForProvider, StaticFileProvider},
|
||||||
|
static_file::SegmentRangeInclusive,
|
||||||
|
};
|
||||||
|
use std::{fs::File, io::Write, path::PathBuf, sync::Arc};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::{HlHeader, HlPrimitives, chainspec::HlChainSpec};
|
||||||
|
|
||||||
|
pub(crate) trait HlNodeType:
|
||||||
|
NodeTypesForProvider<ChainSpec = HlChainSpec, Primitives = HlPrimitives>
|
||||||
|
{
|
||||||
|
}
|
||||||
|
impl<N: NodeTypesForProvider<ChainSpec = HlChainSpec, Primitives = HlPrimitives>> HlNodeType for N {}
|
||||||
|
|
||||||
|
pub(super) struct Migrator<N: HlNodeType> {
|
||||||
|
data_dir: ChainPath<DataDirPath>,
|
||||||
|
provider_factory: ProviderFactory<NodeTypesWithDBAdapter<N, Arc<DatabaseEnv>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<N: HlNodeType> Migrator<N> {
|
||||||
|
const MIGRATION_PATH_SUFFIX: &'static str = "migration-tmp";
|
||||||
|
|
||||||
|
pub fn new(
|
||||||
|
chain_spec: HlChainSpec,
|
||||||
|
datadir: DatadirArgs,
|
||||||
|
database_args: DatabaseArgs,
|
||||||
|
) -> eyre::Result<Self> {
|
||||||
|
let data_dir = datadir.clone().resolve_datadir(chain_spec.chain());
|
||||||
|
let provider_factory = Self::provider_factory(chain_spec, datadir, database_args)?;
|
||||||
|
Ok(Self { data_dir, provider_factory })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sf_provider(&self) -> StaticFileProvider<HlPrimitives> {
|
||||||
|
self.provider_factory.static_file_provider()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn migrate_db(&self) -> eyre::Result<()> {
|
||||||
|
let is_empty = Self::highest_block_number(&self.sf_provider()).is_none();
|
||||||
|
|
||||||
|
if is_empty {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.migrate_db_inner()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn highest_block_number(sf_provider: &StaticFileProvider<HlPrimitives>) -> Option<u64> {
|
||||||
|
sf_provider.get_highest_static_file_block(StaticFileSegment::Headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrate_db_inner(&self) -> eyre::Result<()> {
|
||||||
|
let migrated_mdbx = MigratorMdbx::<N>(self).migrate_mdbx()?;
|
||||||
|
let migrated_static_files = MigrateStaticFiles::<N>(self).migrate_static_files()?;
|
||||||
|
|
||||||
|
if migrated_mdbx || migrated_static_files {
|
||||||
|
info!("Database migrated successfully");
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn conversion_tmp_dir(&self) -> PathBuf {
|
||||||
|
self.data_dir.data_dir().join(Self::MIGRATION_PATH_SUFFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn provider_factory(
|
||||||
|
chain_spec: HlChainSpec,
|
||||||
|
datadir: DatadirArgs,
|
||||||
|
database_args: DatabaseArgs,
|
||||||
|
) -> eyre::Result<ProviderFactory<NodeTypesWithDBAdapter<N, Arc<DatabaseEnv>>>> {
|
||||||
|
let data_dir = datadir.clone().resolve_datadir(chain_spec.chain());
|
||||||
|
let db_env = reth_db::init_db(data_dir.db(), database_args.database_args())?;
|
||||||
|
let static_file_provider = StaticFileProvider::read_only(data_dir.static_files(), false)?;
|
||||||
|
let db = Arc::new(db_env);
|
||||||
|
Ok(ProviderFactory::new(db, Arc::new(chain_spec), static_file_provider))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MigratorMdbx<'a, N: HlNodeType>(&'a Migrator<N>);
|
||||||
|
|
||||||
|
impl<'a, N: HlNodeType> MigratorMdbx<'a, N> {
|
||||||
|
fn migrate_mdbx(&self) -> eyre::Result<bool> {
|
||||||
|
// if any header is in old format, we need to migrate it, so we pick the first and last one
|
||||||
|
let db_env = self.0.provider_factory.provider()?;
|
||||||
|
let mut cursor = db_env.tx_ref().cursor_read::<tables::Headers<Bytes>>()?;
|
||||||
|
|
||||||
|
let migration_needed = {
|
||||||
|
let first_is_old = match cursor.first()? {
|
||||||
|
Some((number, header)) => using_old_header(number, &header),
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
let last_is_old = match cursor.last()? {
|
||||||
|
Some((number, header)) => using_old_header(number, &header),
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
first_is_old || last_is_old
|
||||||
|
};
|
||||||
|
|
||||||
|
if !migration_needed {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
check_if_migration_enabled()?;
|
||||||
|
|
||||||
|
self.migrate_mdbx_inner()?;
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrate_mdbx_inner(&self) -> eyre::Result<()> {
|
||||||
|
// There shouldn't be many headers in mdbx, but using file for safety
|
||||||
|
info!("Old database detected, migrating mdbx...");
|
||||||
|
let conversion_tmp = self.0.conversion_tmp_dir();
|
||||||
|
let tmp_path = conversion_tmp.join("headers.rmp");
|
||||||
|
|
||||||
|
if conversion_tmp.exists() {
|
||||||
|
std::fs::remove_dir_all(&conversion_tmp)?;
|
||||||
|
}
|
||||||
|
std::fs::create_dir_all(&conversion_tmp)?;
|
||||||
|
|
||||||
|
let count = self.export_old_headers(&tmp_path)?;
|
||||||
|
self.import_new_headers(tmp_path, count)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn export_old_headers(&self, tmp_path: &PathBuf) -> Result<i32, eyre::Error> {
|
||||||
|
let db_env = self.0.provider_factory.provider()?;
|
||||||
|
let mut cursor_read = db_env.tx_ref().cursor_read::<tables::Headers<Bytes>>()?;
|
||||||
|
let mut tmp_writer = File::create(tmp_path)?;
|
||||||
|
let mut count = 0;
|
||||||
|
let old_headers = cursor_read.walk(None)?.filter_map(|row| {
|
||||||
|
let (block_number, header) = row.ok()?;
|
||||||
|
if !using_old_header(block_number, &header) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some((block_number, Header::decompress(&header).ok()?))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
for (block_number, header) in old_headers {
|
||||||
|
let receipt =
|
||||||
|
db_env.receipts_by_block(block_number.into())?.expect("Receipt not found");
|
||||||
|
let new_header = to_hl_header(receipt, header);
|
||||||
|
tmp_writer.write_all(&rmp_serde::to_vec(&(block_number, new_header))?)?;
|
||||||
|
count += 1;
|
||||||
|
}
|
||||||
|
Ok(count)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import_new_headers(&self, tmp_path: PathBuf, count: i32) -> Result<(), eyre::Error> {
|
||||||
|
let mut tmp_reader = File::open(tmp_path)?;
|
||||||
|
let db_env = self.0.provider_factory.provider_rw()?;
|
||||||
|
let mut cursor_write = db_env.tx_ref().cursor_write::<tables::Headers<Bytes>>()?;
|
||||||
|
for _ in 0..count {
|
||||||
|
let (number, header) = rmp_serde::from_read::<_, (u64, HlHeader)>(&mut tmp_reader)?;
|
||||||
|
cursor_write.upsert(number, &rmp_serde::to_vec(&header)?.into())?;
|
||||||
|
}
|
||||||
|
db_env.commit()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_if_migration_enabled() -> Result<(), eyre::Error> {
|
||||||
|
if std::env::var("EXPERIMENTAL_MIGRATE_DB").is_err() {
|
||||||
|
let err_msg = concat!(
|
||||||
|
"Detected an old database format but experimental database migration is currently disabled. ",
|
||||||
|
"To enable migration, set EXPERIMENTAL_MIGRATE_DB=1, or alternatively, resync your node (safest option)."
|
||||||
|
);
|
||||||
|
warn!("{}", err_msg);
|
||||||
|
return Err(eyre::eyre!("{}", err_msg));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MigrateStaticFiles<'a, N: HlNodeType>(&'a Migrator<N>);
|
||||||
|
|
||||||
|
impl<'a, N: HlNodeType> MigrateStaticFiles<'a, N> {
|
||||||
|
fn iterate_files_for_segment(
|
||||||
|
&self,
|
||||||
|
block_range: SegmentRangeInclusive,
|
||||||
|
dir: &PathBuf,
|
||||||
|
) -> eyre::Result<Vec<(PathBuf, String)>> {
|
||||||
|
let prefix = StaticFileSegment::Headers.filename(&block_range);
|
||||||
|
|
||||||
|
let entries = std::fs::read_dir(dir)?
|
||||||
|
.map(|res| res.map(|e| e.path()))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
Ok(entries
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|path| {
|
||||||
|
let file_name = path.file_name().and_then(|f| f.to_str())?;
|
||||||
|
if file_name.starts_with(&prefix) {
|
||||||
|
Some((path.clone(), file_name.to_string()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_placeholder(&self, block_range: SegmentRangeInclusive) -> eyre::Result<()> {
|
||||||
|
// The direction is opposite here
|
||||||
|
let src = self.0.data_dir.static_files();
|
||||||
|
let dst = self.0.conversion_tmp_dir();
|
||||||
|
|
||||||
|
for (src_path, file_name) in self.iterate_files_for_segment(block_range, &src)? {
|
||||||
|
let dst_path = dst.join(file_name);
|
||||||
|
if dst_path.exists() {
|
||||||
|
std::fs::remove_file(&dst_path)?;
|
||||||
|
}
|
||||||
|
std::os::unix::fs::symlink(src_path, dst_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn move_static_files_for_segment(
|
||||||
|
&self,
|
||||||
|
block_range: SegmentRangeInclusive,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
let src = self.0.conversion_tmp_dir();
|
||||||
|
let dst = self.0.data_dir.static_files();
|
||||||
|
|
||||||
|
for (src_path, file_name) in self.iterate_files_for_segment(block_range, &src)? {
|
||||||
|
let dst_path = dst.join(file_name);
|
||||||
|
std::fs::remove_file(&dst_path)?;
|
||||||
|
std::fs::rename(&src_path, &dst_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Still StaticFileProvider needs the file to exist, so we create a symlink
|
||||||
|
self.create_placeholder(block_range)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrate_static_files(&self) -> eyre::Result<bool> {
|
||||||
|
let conversion_tmp = self.0.conversion_tmp_dir();
|
||||||
|
let old_path = self.0.data_dir.static_files();
|
||||||
|
|
||||||
|
if conversion_tmp.exists() {
|
||||||
|
std::fs::remove_dir_all(&conversion_tmp)?;
|
||||||
|
}
|
||||||
|
std::fs::create_dir_all(&conversion_tmp)?;
|
||||||
|
|
||||||
|
let mut all_static_files = iter_static_files(&old_path)?;
|
||||||
|
let all_static_files =
|
||||||
|
all_static_files.remove(&StaticFileSegment::Headers).unwrap_or_default();
|
||||||
|
|
||||||
|
let mut first = true;
|
||||||
|
|
||||||
|
for (block_range, _tx_ranges) in all_static_files {
|
||||||
|
let migration_needed = self.using_old_header(block_range.start())?
|
||||||
|
|| self.using_old_header(block_range.end())?;
|
||||||
|
if !migration_needed {
|
||||||
|
// Create a placeholder symlink
|
||||||
|
self.create_placeholder(block_range)?;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if first {
|
||||||
|
check_if_migration_enabled()?;
|
||||||
|
|
||||||
|
info!("Old database detected, migrating static files...");
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let sf_provider = self.0.sf_provider();
|
||||||
|
let sf_tmp_provider = StaticFileProvider::<HlPrimitives>::read_write(&conversion_tmp)?;
|
||||||
|
let provider = self.0.provider_factory.provider()?;
|
||||||
|
let block_range_for_filename = sf_provider.find_fixed_range(block_range.start());
|
||||||
|
migrate_single_static_file(&sf_tmp_provider, &sf_provider, &provider, block_range)?;
|
||||||
|
|
||||||
|
self.move_static_files_for_segment(block_range_for_filename)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(!first)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn using_old_header(&self, number: u64) -> eyre::Result<bool> {
|
||||||
|
let sf_provider = self.0.sf_provider();
|
||||||
|
let content = old_headers_range(&sf_provider, number..=number)?;
|
||||||
|
|
||||||
|
let &[row] = &content.as_slice() else {
|
||||||
|
warn!("No header found for block {}", number);
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(using_old_header(number, &row[0]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Problem is that decompress just panics when the header is not valid
|
||||||
|
// So we need heuristics...
|
||||||
|
fn is_old_header(header: &[u8]) -> bool {
|
||||||
|
const SHA3_UNCLE_OFFSET: usize = 0x24;
|
||||||
|
const SHA3_UNCLE_HASH: B256 =
|
||||||
|
b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347");
|
||||||
|
const GENESIS_PREFIX: [u8; 4] = [0x01, 0x20, 0x00, 0xf8];
|
||||||
|
let Some(sha3_uncle_hash) = header.get(SHA3_UNCLE_OFFSET..SHA3_UNCLE_OFFSET + 32) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if sha3_uncle_hash == SHA3_UNCLE_HASH {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// genesis block might be different
|
||||||
|
if header.starts_with(&GENESIS_PREFIX) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_new_header(header: &[u8]) -> bool {
|
||||||
|
rmp_serde::from_slice::<HlHeader>(header).is_ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrate_single_static_file<N: HlNodeType>(
|
||||||
|
sf_out: &StaticFileProvider<HlPrimitives>,
|
||||||
|
sf_in: &StaticFileProvider<HlPrimitives>,
|
||||||
|
provider: &DatabaseProvider<Tx<RO>, NodeTypesWithDBAdapter<N, Arc<DatabaseEnv>>>,
|
||||||
|
block_range: SegmentRangeInclusive,
|
||||||
|
) -> Result<(), eyre::Error> {
|
||||||
|
info!("Migrating block range {}...", block_range);
|
||||||
|
|
||||||
|
// block_ranges into chunks of 50000 blocks
|
||||||
|
const CHUNK_SIZE: u64 = 50000;
|
||||||
|
for chunk in (0..=block_range.end()).step_by(CHUNK_SIZE as usize) {
|
||||||
|
let end = std::cmp::min(chunk + CHUNK_SIZE - 1, block_range.end());
|
||||||
|
let block_range = chunk..=end;
|
||||||
|
let headers = old_headers_range(sf_in, block_range.clone())?;
|
||||||
|
let receipts = provider.receipts_by_block_range(block_range.clone())?;
|
||||||
|
assert_eq!(headers.len(), receipts.len());
|
||||||
|
let mut writer = sf_out.get_writer(*block_range.start(), StaticFileSegment::Headers)?;
|
||||||
|
let new_headers = std::iter::zip(headers, receipts)
|
||||||
|
.map(|(header, receipts)| {
|
||||||
|
let eth_header = Header::decompress(&header[0]).unwrap();
|
||||||
|
let hl_header = to_hl_header(receipts, eth_header);
|
||||||
|
|
||||||
|
let difficulty: U256 = CompactU256::decompress(&header[1]).unwrap().into();
|
||||||
|
let hash = BlockHash::decompress(&header[2]).unwrap();
|
||||||
|
(hl_header, difficulty, hash)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
for header in new_headers {
|
||||||
|
writer.append_header(&header.0, header.1, &header.2)?;
|
||||||
|
}
|
||||||
|
writer.commit().unwrap();
|
||||||
|
info!("Migrated block range {:?}...", block_range);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_hl_header(receipts: Vec<EthereumReceipt>, eth_header: Header) -> HlHeader {
|
||||||
|
let system_tx_count = receipts.iter().filter(|r| r.cumulative_gas_used == 0).count();
|
||||||
|
HlHeader::from_ethereum_header(eth_header, &receipts, system_tx_count as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn old_headers_range(
|
||||||
|
provider: &StaticFileProvider<HlPrimitives>,
|
||||||
|
block_range: impl std::ops::RangeBounds<u64>,
|
||||||
|
) -> ProviderResult<Vec<Vec<Vec<u8>>>> {
|
||||||
|
Ok(provider
|
||||||
|
.fetch_range_with_predicate(
|
||||||
|
StaticFileSegment::Headers,
|
||||||
|
to_range(block_range),
|
||||||
|
|cursor, number| {
|
||||||
|
cursor.get(number.into(), 0b111).map(|rows| {
|
||||||
|
rows.map(|columns| columns.into_iter().map(|column| column.to_vec()).collect())
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|_| true,
|
||||||
|
)?
|
||||||
|
.into_iter()
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copied from reth
|
||||||
|
fn to_range<R: std::ops::RangeBounds<u64>>(bounds: R) -> std::ops::Range<u64> {
|
||||||
|
let start = match bounds.start_bound() {
|
||||||
|
std::ops::Bound::Included(&v) => v,
|
||||||
|
std::ops::Bound::Excluded(&v) => v + 1,
|
||||||
|
std::ops::Bound::Unbounded => 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let end = match bounds.end_bound() {
|
||||||
|
std::ops::Bound::Included(&v) => v + 1,
|
||||||
|
std::ops::Bound::Excluded(&v) => v,
|
||||||
|
std::ops::Bound::Unbounded => u64::MAX,
|
||||||
|
};
|
||||||
|
|
||||||
|
start..end
|
||||||
|
}
|
||||||
|
|
||||||
|
fn using_old_header(number: u64, header: &[u8]) -> bool {
|
||||||
|
let deserialized_old = is_old_header(header);
|
||||||
|
let deserialized_new = is_new_header(header);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
deserialized_old ^ deserialized_new,
|
||||||
|
"Header is not valid: {} {}\ndeserialized_old: {}\ndeserialized_new: {}",
|
||||||
|
number,
|
||||||
|
header.encode_hex(),
|
||||||
|
deserialized_old,
|
||||||
|
deserialized_new
|
||||||
|
);
|
||||||
|
deserialized_old && !deserialized_new
|
||||||
|
}
|
||||||
@ -4,11 +4,11 @@ use crate::{
|
|||||||
pool::HlPoolBuilder,
|
pool::HlPoolBuilder,
|
||||||
primitives::{HlBlock, HlPrimitives},
|
primitives::{HlBlock, HlPrimitives},
|
||||||
rpc::{
|
rpc::{
|
||||||
|
HlEthApiBuilder,
|
||||||
engine_api::{
|
engine_api::{
|
||||||
builder::HlEngineApiBuilder, payload::HlPayloadTypes,
|
builder::HlEngineApiBuilder, payload::HlPayloadTypes,
|
||||||
validator::HlPayloadValidatorBuilder,
|
validator::HlPayloadValidatorBuilder,
|
||||||
},
|
},
|
||||||
HlEthApiBuilder,
|
|
||||||
},
|
},
|
||||||
storage::HlStorage,
|
storage::HlStorage,
|
||||||
},
|
},
|
||||||
@ -20,19 +20,20 @@ use network::HlNetworkBuilder;
|
|||||||
use reth::{
|
use reth::{
|
||||||
api::{FullNodeTypes, NodeTypes},
|
api::{FullNodeTypes, NodeTypes},
|
||||||
builder::{
|
builder::{
|
||||||
|
Node, NodeAdapter,
|
||||||
components::{ComponentsBuilder, NoopPayloadServiceBuilder},
|
components::{ComponentsBuilder, NoopPayloadServiceBuilder},
|
||||||
rpc::RpcAddOns,
|
rpc::RpcAddOns,
|
||||||
Node, NodeAdapter,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use reth_engine_primitives::ConsensusEngineHandle;
|
use reth_engine_primitives::ConsensusEngineHandle;
|
||||||
use std::{marker::PhantomData, sync::Arc};
|
use std::{marker::PhantomData, sync::Arc};
|
||||||
use tokio::sync::{oneshot, Mutex};
|
use tokio::sync::{Mutex, oneshot};
|
||||||
|
|
||||||
pub mod cli;
|
pub mod cli;
|
||||||
pub mod consensus;
|
pub mod consensus;
|
||||||
pub mod engine;
|
pub mod engine;
|
||||||
pub mod evm;
|
pub mod evm;
|
||||||
|
pub mod migrate;
|
||||||
pub mod network;
|
pub mod network;
|
||||||
pub mod primitives;
|
pub mod primitives;
|
||||||
pub mod rpc;
|
pub mod rpc;
|
||||||
@ -49,14 +50,23 @@ pub type HlNodeAddOns<N> =
|
|||||||
pub struct HlNode {
|
pub struct HlNode {
|
||||||
engine_handle_rx: Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
|
engine_handle_rx: Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
|
||||||
block_source_config: BlockSourceConfig,
|
block_source_config: BlockSourceConfig,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HlNode {
|
impl HlNode {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
block_source_config: BlockSourceConfig,
|
block_source_config: BlockSourceConfig,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
) -> (Self, oneshot::Sender<ConsensusEngineHandle<HlPayloadTypes>>) {
|
) -> (Self, oneshot::Sender<ConsensusEngineHandle<HlPayloadTypes>>) {
|
||||||
let (tx, rx) = oneshot::channel();
|
let (tx, rx) = oneshot::channel();
|
||||||
(Self { engine_handle_rx: Arc::new(Mutex::new(Some(rx))), block_source_config }, tx)
|
(
|
||||||
|
Self {
|
||||||
|
engine_handle_rx: Arc::new(Mutex::new(Some(rx))),
|
||||||
|
block_source_config,
|
||||||
|
debug_cutoff_height,
|
||||||
|
},
|
||||||
|
tx,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,6 +94,7 @@ impl HlNode {
|
|||||||
.network(HlNetworkBuilder {
|
.network(HlNetworkBuilder {
|
||||||
engine_handle_rx: self.engine_handle_rx.clone(),
|
engine_handle_rx: self.engine_handle_rx.clone(),
|
||||||
block_source_config: self.block_source_config.clone(),
|
block_source_config: self.block_source_config.clone(),
|
||||||
|
debug_cutoff_height: self.debug_cutoff_height,
|
||||||
})
|
})
|
||||||
.consensus(HlConsensusBuilder::default())
|
.consensus(HlConsensusBuilder::default())
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,7 @@ use reth_primitives::NodePrimitives;
|
|||||||
use service::{BlockMsg, ImportEvent, Outcome};
|
use service::{BlockMsg, ImportEvent, Outcome};
|
||||||
use std::{
|
use std::{
|
||||||
fmt,
|
fmt,
|
||||||
task::{ready, Context, Poll},
|
task::{Context, Poll, ready},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::node::network::HlNewBlock;
|
use crate::node::network::HlNewBlock;
|
||||||
|
|||||||
@ -1,17 +1,17 @@
|
|||||||
use super::handle::ImportHandle;
|
use super::handle::ImportHandle;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlock, HlBlockBody,
|
||||||
consensus::HlConsensus,
|
consensus::HlConsensus,
|
||||||
node::{
|
node::{
|
||||||
network::HlNewBlock,
|
network::HlNewBlock,
|
||||||
rpc::engine_api::payload::HlPayloadTypes,
|
rpc::engine_api::payload::HlPayloadTypes,
|
||||||
types::{BlockAndReceipts, EvmBlock},
|
types::{BlockAndReceipts, EvmBlock},
|
||||||
},
|
},
|
||||||
HlBlock, HlBlockBody,
|
|
||||||
};
|
};
|
||||||
use alloy_consensus::{BlockBody, Header};
|
use alloy_consensus::{BlockBody, Header};
|
||||||
use alloy_primitives::U128;
|
use alloy_primitives::U128;
|
||||||
use alloy_rpc_types::engine::{ForkchoiceState, PayloadStatusEnum};
|
use alloy_rpc_types::engine::{ForkchoiceState, PayloadStatusEnum};
|
||||||
use futures::{future::Either, stream::FuturesUnordered, StreamExt};
|
use futures::{StreamExt, future::Either, stream::FuturesUnordered};
|
||||||
use reth_engine_primitives::{ConsensusEngineHandle, EngineTypes};
|
use reth_engine_primitives::{ConsensusEngineHandle, EngineTypes};
|
||||||
use reth_eth_wire::NewBlock;
|
use reth_eth_wire::NewBlock;
|
||||||
use reth_network::{
|
use reth_network::{
|
||||||
@ -179,7 +179,7 @@ where
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::chainspec::hl::hl_mainnet;
|
use crate::{chainspec::hl::hl_mainnet, HlHeader};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use alloy_primitives::{B256, U128};
|
use alloy_primitives::{B256, U128};
|
||||||
@ -355,7 +355,7 @@ mod tests {
|
|||||||
/// Creates a test block message
|
/// Creates a test block message
|
||||||
fn create_test_block() -> NewBlockMessage<HlNewBlock> {
|
fn create_test_block() -> NewBlockMessage<HlNewBlock> {
|
||||||
let block = HlBlock {
|
let block = HlBlock {
|
||||||
header: Header::default(),
|
header: HlHeader::default(),
|
||||||
body: HlBlockBody {
|
body: HlBlockBody {
|
||||||
inner: BlockBody {
|
inner: BlockBody {
|
||||||
transactions: Vec::new(),
|
transactions: Vec::new(),
|
||||||
|
|||||||
@ -1,20 +1,20 @@
|
|||||||
#![allow(clippy::owned_cow)]
|
#![allow(clippy::owned_cow)]
|
||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlock,
|
||||||
consensus::HlConsensus,
|
consensus::HlConsensus,
|
||||||
node::{
|
node::{
|
||||||
network::block_import::{handle::ImportHandle, service::ImportService, HlBlockImport},
|
HlNode,
|
||||||
|
network::block_import::{HlBlockImport, handle::ImportHandle, service::ImportService},
|
||||||
primitives::HlPrimitives,
|
primitives::HlPrimitives,
|
||||||
rpc::engine_api::payload::HlPayloadTypes,
|
rpc::engine_api::payload::HlPayloadTypes,
|
||||||
types::ReadPrecompileCalls,
|
types::ReadPrecompileCalls,
|
||||||
HlNode,
|
|
||||||
},
|
},
|
||||||
pseudo_peer::{start_pseudo_peer, BlockSourceConfig},
|
pseudo_peer::{BlockSourceConfig, start_pseudo_peer},
|
||||||
HlBlock,
|
|
||||||
};
|
};
|
||||||
use alloy_rlp::{Decodable, Encodable};
|
use alloy_rlp::{Decodable, Encodable};
|
||||||
use reth::{
|
use reth::{
|
||||||
api::{FullNodeTypes, TxTy},
|
api::{FullNodeTypes, TxTy},
|
||||||
builder::{components::NetworkBuilder, BuilderContext},
|
builder::{BuilderContext, components::NetworkBuilder},
|
||||||
transaction_pool::{PoolTransaction, TransactionPool},
|
transaction_pool::{PoolTransaction, TransactionPool},
|
||||||
};
|
};
|
||||||
use reth_discv4::NodeRecord;
|
use reth_discv4::NodeRecord;
|
||||||
@ -26,7 +26,7 @@ use reth_network_api::PeersInfo;
|
|||||||
use reth_provider::StageCheckpointReader;
|
use reth_provider::StageCheckpointReader;
|
||||||
use reth_stages_types::StageId;
|
use reth_stages_types::StageId;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::{mpsc, oneshot, Mutex};
|
use tokio::sync::{Mutex, mpsc, oneshot};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
pub mod block_import;
|
pub mod block_import;
|
||||||
@ -38,10 +38,10 @@ pub struct HlNewBlock(pub NewBlock<HlBlock>);
|
|||||||
mod rlp {
|
mod rlp {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlockBody, HlHeader,
|
||||||
node::primitives::{BlockBody, TransactionSigned},
|
node::primitives::{BlockBody, TransactionSigned},
|
||||||
HlBlockBody,
|
|
||||||
};
|
};
|
||||||
use alloy_consensus::{BlobTransactionSidecar, Header};
|
use alloy_consensus::BlobTransactionSidecar;
|
||||||
use alloy_primitives::{Address, U128};
|
use alloy_primitives::{Address, U128};
|
||||||
use alloy_rlp::{RlpDecodable, RlpEncodable};
|
use alloy_rlp::{RlpDecodable, RlpEncodable};
|
||||||
use alloy_rpc_types::Withdrawals;
|
use alloy_rpc_types::Withdrawals;
|
||||||
@ -50,9 +50,9 @@ mod rlp {
|
|||||||
#[derive(RlpEncodable, RlpDecodable)]
|
#[derive(RlpEncodable, RlpDecodable)]
|
||||||
#[rlp(trailing)]
|
#[rlp(trailing)]
|
||||||
struct BlockHelper<'a> {
|
struct BlockHelper<'a> {
|
||||||
header: Cow<'a, Header>,
|
header: Cow<'a, HlHeader>,
|
||||||
transactions: Cow<'a, Vec<TransactionSigned>>,
|
transactions: Cow<'a, Vec<TransactionSigned>>,
|
||||||
ommers: Cow<'a, Vec<Header>>,
|
ommers: Cow<'a, Vec<HlHeader>>,
|
||||||
withdrawals: Option<Cow<'a, Withdrawals>>,
|
withdrawals: Option<Cow<'a, Withdrawals>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,6 +142,8 @@ pub struct HlNetworkBuilder {
|
|||||||
Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
|
Arc<Mutex<Option<oneshot::Receiver<ConsensusEngineHandle<HlPayloadTypes>>>>>,
|
||||||
|
|
||||||
pub(crate) block_source_config: BlockSourceConfig,
|
pub(crate) block_source_config: BlockSourceConfig,
|
||||||
|
|
||||||
|
pub(crate) debug_cutoff_height: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HlNetworkBuilder {
|
impl HlNetworkBuilder {
|
||||||
@ -203,6 +205,7 @@ where
|
|||||||
pool: Pool,
|
pool: Pool,
|
||||||
) -> eyre::Result<Self::Network> {
|
) -> eyre::Result<Self::Network> {
|
||||||
let block_source_config = self.block_source_config.clone();
|
let block_source_config = self.block_source_config.clone();
|
||||||
|
let debug_cutoff_height = self.debug_cutoff_height;
|
||||||
let handle =
|
let handle =
|
||||||
ctx.start_network(NetworkManager::builder(self.network_config(ctx)?).await?, pool);
|
ctx.start_network(NetworkManager::builder(self.network_config(ctx)?).await?, pool);
|
||||||
let local_node_record = handle.local_node_record();
|
let local_node_record = handle.local_node_record();
|
||||||
@ -223,6 +226,7 @@ where
|
|||||||
block_source_config
|
block_source_config
|
||||||
.create_cached_block_source((*chain_spec).clone(), next_block_number)
|
.create_cached_block_source((*chain_spec).clone(), next_block_number)
|
||||||
.await,
|
.await,
|
||||||
|
debug_cutoff_height,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|||||||
@ -6,12 +6,12 @@
|
|||||||
//! Ethereum transaction pool only supports TransactionSigned (EthereumTxEnvelope<TxEip4844>),
|
//! Ethereum transaction pool only supports TransactionSigned (EthereumTxEnvelope<TxEip4844>),
|
||||||
//! hence this placeholder for the transaction pool.
|
//! hence this placeholder for the transaction pool.
|
||||||
|
|
||||||
use crate::node::{primitives::TransactionSigned, HlNode};
|
use crate::node::{HlNode, primitives::TransactionSigned};
|
||||||
use alloy_consensus::{
|
use alloy_consensus::{
|
||||||
error::ValueError, EthereumTxEnvelope, Transaction as TransactionTrait, TxEip4844,
|
EthereumTxEnvelope, Transaction as TransactionTrait, TxEip4844, error::ValueError,
|
||||||
};
|
};
|
||||||
use alloy_eips::{eip7702::SignedAuthorization, Typed2718};
|
use alloy_eips::{Typed2718, eip7702::SignedAuthorization};
|
||||||
use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256};
|
use alloy_primitives::{Address, B256, Bytes, ChainId, TxHash, TxKind, U256};
|
||||||
use alloy_rpc_types::AccessList;
|
use alloy_rpc_types::AccessList;
|
||||||
use reth::{
|
use reth::{
|
||||||
api::FullNodeTypes, builder::components::PoolBuilder, transaction_pool::PoolTransaction,
|
api::FullNodeTypes, builder::components::PoolBuilder, transaction_pool::PoolTransaction,
|
||||||
@ -19,7 +19,7 @@ use reth::{
|
|||||||
use reth_ethereum_primitives::PooledTransactionVariant;
|
use reth_ethereum_primitives::PooledTransactionVariant;
|
||||||
use reth_primitives::Recovered;
|
use reth_primitives::Recovered;
|
||||||
use reth_primitives_traits::InMemorySize;
|
use reth_primitives_traits::InMemorySize;
|
||||||
use reth_transaction_pool::{noop::NoopTransactionPool, EthPoolTransaction};
|
use reth_transaction_pool::{EthPoolTransaction, noop::NoopTransactionPool};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub struct HlPoolBuilder;
|
pub struct HlPoolBuilder;
|
||||||
|
|||||||
49
src/node/primitives/block.rs
Normal file
49
src/node/primitives/block.rs
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
use super::{HlBlockBody, HlHeader, rlp};
|
||||||
|
use alloy_rlp::Encodable;
|
||||||
|
use reth_primitives_traits::{Block, InMemorySize};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
/// Block for HL
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct HlBlock {
|
||||||
|
pub header: HlHeader,
|
||||||
|
pub body: HlBlockBody,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InMemorySize for HlBlock {
|
||||||
|
fn size(&self) -> usize {
|
||||||
|
self.header.size() + self.body.size()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Block for HlBlock {
|
||||||
|
type Header = HlHeader;
|
||||||
|
type Body = HlBlockBody;
|
||||||
|
|
||||||
|
fn new(header: Self::Header, body: Self::Body) -> Self {
|
||||||
|
Self { header, body }
|
||||||
|
}
|
||||||
|
fn header(&self) -> &Self::Header {
|
||||||
|
&self.header
|
||||||
|
}
|
||||||
|
fn body(&self) -> &Self::Body {
|
||||||
|
&self.body
|
||||||
|
}
|
||||||
|
fn split(self) -> (Self::Header, Self::Body) {
|
||||||
|
(self.header, self.body)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rlp_length(header: &Self::Header, body: &Self::Body) -> usize {
|
||||||
|
rlp::BlockHelper {
|
||||||
|
header: Cow::Borrowed(header),
|
||||||
|
transactions: Cow::Borrowed(&body.inner.transactions),
|
||||||
|
ommers: Cow::Borrowed(&body.inner.ommers),
|
||||||
|
withdrawals: body.inner.withdrawals.as_ref().map(Cow::Borrowed),
|
||||||
|
sidecars: body.sidecars.as_ref().map(Cow::Borrowed),
|
||||||
|
read_precompile_calls: body.read_precompile_calls.as_ref().map(Cow::Borrowed),
|
||||||
|
highest_precompile_address: body.highest_precompile_address.as_ref().map(Cow::Borrowed),
|
||||||
|
}
|
||||||
|
.length()
|
||||||
|
}
|
||||||
|
}
|
||||||
77
src/node/primitives/body.rs
Normal file
77
src/node/primitives/body.rs
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
use alloy_consensus::BlobTransactionSidecar;
|
||||||
|
use alloy_primitives::Address;
|
||||||
|
use reth_primitives_traits::{BlockBody as BlockBodyTrait, InMemorySize};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::node::types::{ReadPrecompileCall, ReadPrecompileCalls};
|
||||||
|
use crate::{HlHeader, node::primitives::TransactionSigned};
|
||||||
|
|
||||||
|
/// Block body for HL. It is equivalent to Ethereum [`BlockBody`] but additionally stores sidecars
|
||||||
|
/// for blob transactions.
|
||||||
|
#[derive(
|
||||||
|
Debug,
|
||||||
|
Clone,
|
||||||
|
Default,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
derive_more::Deref,
|
||||||
|
derive_more::DerefMut,
|
||||||
|
)]
|
||||||
|
pub struct HlBlockBody {
|
||||||
|
#[serde(flatten)]
|
||||||
|
#[deref]
|
||||||
|
#[deref_mut]
|
||||||
|
pub inner: BlockBody,
|
||||||
|
pub sidecars: Option<Vec<BlobTransactionSidecar>>,
|
||||||
|
pub read_precompile_calls: Option<ReadPrecompileCalls>,
|
||||||
|
pub highest_precompile_address: Option<Address>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type BlockBody = alloy_consensus::BlockBody<TransactionSigned, HlHeader>;
|
||||||
|
|
||||||
|
impl InMemorySize for HlBlockBody {
|
||||||
|
fn size(&self) -> usize {
|
||||||
|
self.inner.size()
|
||||||
|
+ self
|
||||||
|
.sidecars
|
||||||
|
.as_ref()
|
||||||
|
.map_or(0, |s| s.capacity() * core::mem::size_of::<BlobTransactionSidecar>())
|
||||||
|
+ self
|
||||||
|
.read_precompile_calls
|
||||||
|
.as_ref()
|
||||||
|
.map_or(0, |s| s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlockBodyTrait for HlBlockBody {
|
||||||
|
type Transaction = TransactionSigned;
|
||||||
|
type OmmerHeader = super::HlHeader;
|
||||||
|
|
||||||
|
fn transactions(&self) -> &[Self::Transaction] {
|
||||||
|
BlockBodyTrait::transactions(&self.inner)
|
||||||
|
}
|
||||||
|
fn into_ethereum_body(self) -> BlockBody {
|
||||||
|
self.inner
|
||||||
|
}
|
||||||
|
fn into_transactions(self) -> Vec<Self::Transaction> {
|
||||||
|
self.inner.into_transactions()
|
||||||
|
}
|
||||||
|
fn withdrawals(&self) -> Option<&alloy_rpc_types::Withdrawals> {
|
||||||
|
self.inner.withdrawals()
|
||||||
|
}
|
||||||
|
fn ommers(&self) -> Option<&[Self::OmmerHeader]> {
|
||||||
|
self.inner.ommers()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn calculate_tx_root(&self) -> alloy_primitives::B256 {
|
||||||
|
alloy_consensus::proofs::calculate_transaction_root(
|
||||||
|
&self
|
||||||
|
.transactions()
|
||||||
|
.iter()
|
||||||
|
.filter(|tx| !tx.is_system_transaction())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
241
src/node/primitives/header.rs
Normal file
241
src/node/primitives/header.rs
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
use alloy_consensus::Header;
|
||||||
|
use alloy_primitives::{Address, B64, B256, BlockNumber, Bloom, Bytes, Sealable, U256};
|
||||||
|
use alloy_rlp::{RlpDecodable, RlpEncodable};
|
||||||
|
use reth_cli_commands::common::CliHeader;
|
||||||
|
use reth_codecs::Compact;
|
||||||
|
use reth_ethereum_primitives::EthereumReceipt;
|
||||||
|
use reth_primitives::{SealedHeader, logs_bloom};
|
||||||
|
use reth_primitives_traits::{BlockHeader, InMemorySize, serde_bincode_compat::RlpBincode};
|
||||||
|
use reth_rpc_convert::transaction::FromConsensusHeader;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The header type of this node
|
||||||
|
///
|
||||||
|
/// This type extends the regular ethereum header with an extension.
|
||||||
|
#[derive(
|
||||||
|
Clone,
|
||||||
|
Debug,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
Hash,
|
||||||
|
derive_more::AsRef,
|
||||||
|
derive_more::Deref,
|
||||||
|
Default,
|
||||||
|
RlpEncodable,
|
||||||
|
RlpDecodable,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct HlHeader {
|
||||||
|
/// The regular eth header
|
||||||
|
#[as_ref]
|
||||||
|
#[deref]
|
||||||
|
pub inner: Header,
|
||||||
|
/// The extended header fields that is not part of the block hash
|
||||||
|
pub extras: HlHeaderExtras,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize, RlpEncodable, RlpDecodable, Hash,
|
||||||
|
)]
|
||||||
|
pub struct HlHeaderExtras {
|
||||||
|
pub logs_bloom_with_system_txs: Bloom,
|
||||||
|
pub system_tx_count: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlHeader {
|
||||||
|
pub(crate) fn from_ethereum_header(header: Header, receipts: &[EthereumReceipt], system_tx_count: u64) -> HlHeader {
|
||||||
|
let logs_bloom = logs_bloom(receipts.iter().flat_map(|r| &r.logs));
|
||||||
|
HlHeader {
|
||||||
|
inner: header,
|
||||||
|
extras: HlHeaderExtras { logs_bloom_with_system_txs: logs_bloom, system_tx_count },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Header> for HlHeader {
|
||||||
|
fn from(_value: Header) -> Self {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<Self> for HlHeader {
|
||||||
|
fn as_ref(&self) -> &Self {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sealable for HlHeader {
|
||||||
|
fn hash_slow(&self) -> B256 {
|
||||||
|
self.inner.hash_slow()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl alloy_consensus::BlockHeader for HlHeader {
|
||||||
|
fn parent_hash(&self) -> B256 {
|
||||||
|
self.inner.parent_hash()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ommers_hash(&self) -> B256 {
|
||||||
|
self.inner.ommers_hash()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn beneficiary(&self) -> Address {
|
||||||
|
self.inner.beneficiary()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn state_root(&self) -> B256 {
|
||||||
|
self.inner.state_root()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transactions_root(&self) -> B256 {
|
||||||
|
self.inner.transactions_root()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn receipts_root(&self) -> B256 {
|
||||||
|
self.inner.receipts_root()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn withdrawals_root(&self) -> Option<B256> {
|
||||||
|
self.inner.withdrawals_root()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn logs_bloom(&self) -> Bloom {
|
||||||
|
self.extras.logs_bloom_with_system_txs
|
||||||
|
}
|
||||||
|
|
||||||
|
fn difficulty(&self) -> U256 {
|
||||||
|
self.inner.difficulty()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn number(&self) -> BlockNumber {
|
||||||
|
self.inner.number()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gas_limit(&self) -> u64 {
|
||||||
|
self.inner.gas_limit()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gas_used(&self) -> u64 {
|
||||||
|
self.inner.gas_used()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn timestamp(&self) -> u64 {
|
||||||
|
self.inner.timestamp()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mix_hash(&self) -> Option<B256> {
|
||||||
|
self.inner.mix_hash()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nonce(&self) -> Option<B64> {
|
||||||
|
self.inner.nonce()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn base_fee_per_gas(&self) -> Option<u64> {
|
||||||
|
self.inner.base_fee_per_gas()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn blob_gas_used(&self) -> Option<u64> {
|
||||||
|
self.inner.blob_gas_used()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn excess_blob_gas(&self) -> Option<u64> {
|
||||||
|
self.inner.excess_blob_gas()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parent_beacon_block_root(&self) -> Option<B256> {
|
||||||
|
self.inner.parent_beacon_block_root()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn requests_hash(&self) -> Option<B256> {
|
||||||
|
self.inner.requests_hash()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_data(&self) -> &Bytes {
|
||||||
|
self.inner.extra_data()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.extras.system_tx_count == 0 && self.inner.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InMemorySize for HlHeader {
|
||||||
|
fn size(&self) -> usize {
|
||||||
|
self.inner.size() + self.extras.size()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InMemorySize for HlHeaderExtras {
|
||||||
|
fn size(&self) -> usize {
|
||||||
|
self.logs_bloom_with_system_txs.data().len() + self.system_tx_count.size()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl reth_codecs::Compact for HlHeader {
|
||||||
|
fn to_compact<B>(&self, buf: &mut B) -> usize
|
||||||
|
where
|
||||||
|
B: alloy_rlp::bytes::BufMut + AsMut<[u8]>,
|
||||||
|
{
|
||||||
|
// Because Header ends with extra_data which is `Bytes`, we can't use to_compact for extras,
|
||||||
|
// because Compact trait requires the Bytes field to be placed at the end of the struct.
|
||||||
|
// Bytes::from_compact just reads all trailing data as the Bytes field.
|
||||||
|
//
|
||||||
|
// Hence we need to use other form of serialization, since extra headers are not Compact-compatible.
|
||||||
|
// We just treat all header fields as rmp-serialized one `Bytes` field.
|
||||||
|
let result: Bytes = rmp_serde::to_vec(&self).unwrap().into();
|
||||||
|
result.to_compact(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) {
|
||||||
|
let (bytes, remaining) = Bytes::from_compact(buf, len);
|
||||||
|
let header: HlHeader = rmp_serde::from_slice(&bytes).unwrap();
|
||||||
|
(header, remaining)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl reth_db_api::table::Compress for HlHeader {
|
||||||
|
type Compressed = Vec<u8>;
|
||||||
|
|
||||||
|
fn compress_to_buf<B: alloy_primitives::bytes::BufMut + AsMut<[u8]>>(&self, buf: &mut B) {
|
||||||
|
let _ = Compact::to_compact(self, buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl reth_db_api::table::Decompress for HlHeader {
|
||||||
|
fn decompress(value: &[u8]) -> Result<Self, reth_db_api::DatabaseError> {
|
||||||
|
let (obj, _) = Compact::from_compact(value, value.len());
|
||||||
|
Ok(obj)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlockHeader for HlHeader {}
|
||||||
|
|
||||||
|
impl RlpBincode for HlHeader {}
|
||||||
|
|
||||||
|
impl CliHeader for HlHeader {
|
||||||
|
fn set_number(&mut self, number: u64) {
|
||||||
|
self.inner.set_number(number);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<HlHeader> for Header {
|
||||||
|
fn from(value: HlHeader) -> Self {
|
||||||
|
value.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_ethereum_ommers(ommers: &[HlHeader]) -> Vec<Header> {
|
||||||
|
ommers.iter().map(|ommer| ommer.clone().into()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromConsensusHeader<HlHeader> for alloy_rpc_types::Header {
|
||||||
|
fn from_consensus_header(header: SealedHeader<HlHeader>, block_size: usize) -> Self {
|
||||||
|
FromConsensusHeader::<Header>::from_consensus_header(
|
||||||
|
SealedHeader::<Header>::new(header.inner.clone(), header.hash()),
|
||||||
|
block_size,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,17 +1,18 @@
|
|||||||
#![allow(clippy::owned_cow)]
|
|
||||||
use alloy_consensus::{BlobTransactionSidecar, Header};
|
|
||||||
use alloy_primitives::Address;
|
|
||||||
use alloy_rlp::{Encodable, RlpDecodable, RlpEncodable};
|
|
||||||
use reth_ethereum_primitives::Receipt;
|
use reth_ethereum_primitives::Receipt;
|
||||||
use reth_primitives::NodePrimitives;
|
use reth_primitives::NodePrimitives;
|
||||||
use reth_primitives_traits::{Block, BlockBody as BlockBodyTrait, InMemorySize};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use crate::node::types::{ReadPrecompileCall, ReadPrecompileCalls};
|
pub mod transaction;
|
||||||
|
pub use transaction::TransactionSigned;
|
||||||
|
|
||||||
pub mod tx_wrapper;
|
pub mod block;
|
||||||
pub use tx_wrapper::{BlockBody, TransactionSigned};
|
pub use block::HlBlock;
|
||||||
|
pub mod body;
|
||||||
|
pub use body::{BlockBody, HlBlockBody};
|
||||||
|
pub mod header;
|
||||||
|
pub use header::HlHeader;
|
||||||
|
|
||||||
|
pub mod rlp;
|
||||||
|
pub mod serde_bincode_compat;
|
||||||
|
|
||||||
/// Primitive types for HyperEVM.
|
/// Primitive types for HyperEVM.
|
||||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||||
@ -20,321 +21,8 @@ pub struct HlPrimitives;
|
|||||||
|
|
||||||
impl NodePrimitives for HlPrimitives {
|
impl NodePrimitives for HlPrimitives {
|
||||||
type Block = HlBlock;
|
type Block = HlBlock;
|
||||||
type BlockHeader = Header;
|
type BlockHeader = HlHeader;
|
||||||
type BlockBody = HlBlockBody;
|
type BlockBody = HlBlockBody;
|
||||||
type SignedTx = TransactionSigned;
|
type SignedTx = TransactionSigned;
|
||||||
type Receipt = Receipt;
|
type Receipt = Receipt;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Block body for HL. It is equivalent to Ethereum [`BlockBody`] but additionally stores sidecars
|
|
||||||
/// for blob transactions.
|
|
||||||
#[derive(
|
|
||||||
Debug,
|
|
||||||
Clone,
|
|
||||||
Default,
|
|
||||||
PartialEq,
|
|
||||||
Eq,
|
|
||||||
Serialize,
|
|
||||||
Deserialize,
|
|
||||||
derive_more::Deref,
|
|
||||||
derive_more::DerefMut,
|
|
||||||
)]
|
|
||||||
pub struct HlBlockBody {
|
|
||||||
#[serde(flatten)]
|
|
||||||
#[deref]
|
|
||||||
#[deref_mut]
|
|
||||||
pub inner: BlockBody,
|
|
||||||
pub sidecars: Option<Vec<BlobTransactionSidecar>>,
|
|
||||||
pub read_precompile_calls: Option<ReadPrecompileCalls>,
|
|
||||||
pub highest_precompile_address: Option<Address>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InMemorySize for HlBlockBody {
|
|
||||||
fn size(&self) -> usize {
|
|
||||||
self.inner.size() +
|
|
||||||
self.sidecars
|
|
||||||
.as_ref()
|
|
||||||
.map_or(0, |s| s.capacity() * core::mem::size_of::<BlobTransactionSidecar>()) +
|
|
||||||
self.read_precompile_calls
|
|
||||||
.as_ref()
|
|
||||||
.map_or(0, |s| s.0.capacity() * core::mem::size_of::<ReadPrecompileCall>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlockBodyTrait for HlBlockBody {
|
|
||||||
type Transaction = TransactionSigned;
|
|
||||||
type OmmerHeader = Header;
|
|
||||||
|
|
||||||
fn transactions(&self) -> &[Self::Transaction] {
|
|
||||||
BlockBodyTrait::transactions(&self.inner)
|
|
||||||
}
|
|
||||||
fn into_ethereum_body(self) -> BlockBody {
|
|
||||||
self.inner
|
|
||||||
}
|
|
||||||
fn into_transactions(self) -> Vec<Self::Transaction> {
|
|
||||||
self.inner.into_transactions()
|
|
||||||
}
|
|
||||||
fn withdrawals(&self) -> Option<&alloy_rpc_types::Withdrawals> {
|
|
||||||
self.inner.withdrawals()
|
|
||||||
}
|
|
||||||
fn ommers(&self) -> Option<&[Self::OmmerHeader]> {
|
|
||||||
self.inner.ommers()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_tx_root(&self) -> alloy_primitives::B256 {
|
|
||||||
alloy_consensus::proofs::calculate_transaction_root(
|
|
||||||
&self
|
|
||||||
.transactions()
|
|
||||||
.iter()
|
|
||||||
.filter(|tx| !tx.is_system_transaction())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Block for HL
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub struct HlBlock {
|
|
||||||
pub header: Header,
|
|
||||||
pub body: HlBlockBody,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InMemorySize for HlBlock {
|
|
||||||
fn size(&self) -> usize {
|
|
||||||
self.header.size() + self.body.size()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Block for HlBlock {
|
|
||||||
type Header = Header;
|
|
||||||
type Body = HlBlockBody;
|
|
||||||
|
|
||||||
fn new(header: Self::Header, body: Self::Body) -> Self {
|
|
||||||
Self { header, body }
|
|
||||||
}
|
|
||||||
fn header(&self) -> &Self::Header {
|
|
||||||
&self.header
|
|
||||||
}
|
|
||||||
fn body(&self) -> &Self::Body {
|
|
||||||
&self.body
|
|
||||||
}
|
|
||||||
fn split(self) -> (Self::Header, Self::Body) {
|
|
||||||
(self.header, self.body)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rlp_length(header: &Self::Header, body: &Self::Body) -> usize {
|
|
||||||
rlp::BlockHelper {
|
|
||||||
header: Cow::Borrowed(header),
|
|
||||||
transactions: Cow::Borrowed(&body.inner.transactions),
|
|
||||||
ommers: Cow::Borrowed(&body.inner.ommers),
|
|
||||||
withdrawals: body.inner.withdrawals.as_ref().map(Cow::Borrowed),
|
|
||||||
sidecars: body.sidecars.as_ref().map(Cow::Borrowed),
|
|
||||||
read_precompile_calls: body.read_precompile_calls.as_ref().map(Cow::Borrowed),
|
|
||||||
highest_precompile_address: body.highest_precompile_address.as_ref().map(Cow::Borrowed),
|
|
||||||
}
|
|
||||||
.length()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod rlp {
|
|
||||||
use super::*;
|
|
||||||
use alloy_eips::eip4895::Withdrawals;
|
|
||||||
use alloy_rlp::Decodable;
|
|
||||||
|
|
||||||
#[derive(RlpEncodable, RlpDecodable)]
|
|
||||||
#[rlp(trailing)]
|
|
||||||
struct BlockBodyHelper<'a> {
|
|
||||||
transactions: Cow<'a, Vec<TransactionSigned>>,
|
|
||||||
ommers: Cow<'a, Vec<Header>>,
|
|
||||||
withdrawals: Option<Cow<'a, Withdrawals>>,
|
|
||||||
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
|
||||||
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
|
||||||
highest_precompile_address: Option<Cow<'a, Address>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(RlpEncodable, RlpDecodable)]
|
|
||||||
#[rlp(trailing)]
|
|
||||||
pub(crate) struct BlockHelper<'a> {
|
|
||||||
pub(crate) header: Cow<'a, Header>,
|
|
||||||
pub(crate) transactions: Cow<'a, Vec<TransactionSigned>>,
|
|
||||||
pub(crate) ommers: Cow<'a, Vec<Header>>,
|
|
||||||
pub(crate) withdrawals: Option<Cow<'a, Withdrawals>>,
|
|
||||||
pub(crate) sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
|
||||||
pub(crate) read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
|
||||||
pub(crate) highest_precompile_address: Option<Cow<'a, Address>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> {
|
|
||||||
fn from(value: &'a HlBlockBody) -> Self {
|
|
||||||
let HlBlockBody {
|
|
||||||
inner: BlockBody { transactions, ommers, withdrawals },
|
|
||||||
sidecars,
|
|
||||||
read_precompile_calls,
|
|
||||||
highest_precompile_address,
|
|
||||||
} = value;
|
|
||||||
Self {
|
|
||||||
transactions: Cow::Borrowed(transactions),
|
|
||||||
ommers: Cow::Borrowed(ommers),
|
|
||||||
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
|
|
||||||
sidecars: sidecars.as_ref().map(Cow::Borrowed),
|
|
||||||
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
|
|
||||||
highest_precompile_address: highest_precompile_address.as_ref().map(Cow::Borrowed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a HlBlock> for BlockHelper<'a> {
|
|
||||||
fn from(value: &'a HlBlock) -> Self {
|
|
||||||
let HlBlock {
|
|
||||||
header,
|
|
||||||
body:
|
|
||||||
HlBlockBody {
|
|
||||||
inner: BlockBody { transactions, ommers, withdrawals },
|
|
||||||
sidecars,
|
|
||||||
read_precompile_calls,
|
|
||||||
highest_precompile_address,
|
|
||||||
},
|
|
||||||
} = value;
|
|
||||||
Self {
|
|
||||||
header: Cow::Borrowed(header),
|
|
||||||
transactions: Cow::Borrowed(transactions),
|
|
||||||
ommers: Cow::Borrowed(ommers),
|
|
||||||
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
|
|
||||||
sidecars: sidecars.as_ref().map(Cow::Borrowed),
|
|
||||||
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
|
|
||||||
highest_precompile_address: highest_precompile_address.as_ref().map(Cow::Borrowed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Encodable for HlBlockBody {
|
|
||||||
fn encode(&self, out: &mut dyn bytes::BufMut) {
|
|
||||||
BlockBodyHelper::from(self).encode(out);
|
|
||||||
}
|
|
||||||
fn length(&self) -> usize {
|
|
||||||
BlockBodyHelper::from(self).length()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Decodable for HlBlockBody {
|
|
||||||
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
|
|
||||||
let BlockBodyHelper {
|
|
||||||
transactions,
|
|
||||||
ommers,
|
|
||||||
withdrawals,
|
|
||||||
sidecars,
|
|
||||||
read_precompile_calls,
|
|
||||||
highest_precompile_address,
|
|
||||||
} = BlockBodyHelper::decode(buf)?;
|
|
||||||
Ok(Self {
|
|
||||||
inner: BlockBody {
|
|
||||||
transactions: transactions.into_owned(),
|
|
||||||
ommers: ommers.into_owned(),
|
|
||||||
withdrawals: withdrawals.map(|w| w.into_owned()),
|
|
||||||
},
|
|
||||||
sidecars: sidecars.map(|s| s.into_owned()),
|
|
||||||
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
|
||||||
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Encodable for HlBlock {
|
|
||||||
fn encode(&self, out: &mut dyn bytes::BufMut) {
|
|
||||||
BlockHelper::from(self).encode(out);
|
|
||||||
}
|
|
||||||
fn length(&self) -> usize {
|
|
||||||
BlockHelper::from(self).length()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Decodable for HlBlock {
|
|
||||||
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
|
|
||||||
let BlockHelper {
|
|
||||||
header,
|
|
||||||
transactions,
|
|
||||||
ommers,
|
|
||||||
withdrawals,
|
|
||||||
sidecars,
|
|
||||||
read_precompile_calls,
|
|
||||||
highest_precompile_address,
|
|
||||||
} = BlockHelper::decode(buf)?;
|
|
||||||
Ok(Self {
|
|
||||||
header: header.into_owned(),
|
|
||||||
body: HlBlockBody {
|
|
||||||
inner: BlockBody {
|
|
||||||
transactions: transactions.into_owned(),
|
|
||||||
ommers: ommers.into_owned(),
|
|
||||||
withdrawals: withdrawals.map(|w| w.into_owned()),
|
|
||||||
},
|
|
||||||
sidecars: sidecars.map(|s| s.into_owned()),
|
|
||||||
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
|
||||||
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod serde_bincode_compat {
|
|
||||||
use super::*;
|
|
||||||
use reth_primitives_traits::serde_bincode_compat::{BincodeReprFor, SerdeBincodeCompat};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct HlBlockBodyBincode<'a> {
|
|
||||||
inner: BincodeReprFor<'a, BlockBody>,
|
|
||||||
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
|
||||||
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
|
||||||
highest_precompile_address: Option<Cow<'a, Address>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct HlBlockBincode<'a> {
|
|
||||||
header: BincodeReprFor<'a, Header>,
|
|
||||||
body: BincodeReprFor<'a, HlBlockBody>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SerdeBincodeCompat for HlBlockBody {
|
|
||||||
type BincodeRepr<'a> = HlBlockBodyBincode<'a>;
|
|
||||||
|
|
||||||
fn as_repr(&self) -> Self::BincodeRepr<'_> {
|
|
||||||
HlBlockBodyBincode {
|
|
||||||
inner: self.inner.as_repr(),
|
|
||||||
sidecars: self.sidecars.as_ref().map(Cow::Borrowed),
|
|
||||||
read_precompile_calls: self.read_precompile_calls.as_ref().map(Cow::Borrowed),
|
|
||||||
highest_precompile_address: self
|
|
||||||
.highest_precompile_address
|
|
||||||
.as_ref()
|
|
||||||
.map(Cow::Borrowed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
|
||||||
let HlBlockBodyBincode {
|
|
||||||
inner,
|
|
||||||
sidecars,
|
|
||||||
read_precompile_calls,
|
|
||||||
highest_precompile_address,
|
|
||||||
} = repr;
|
|
||||||
Self {
|
|
||||||
inner: BlockBody::from_repr(inner),
|
|
||||||
sidecars: sidecars.map(|s| s.into_owned()),
|
|
||||||
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
|
||||||
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SerdeBincodeCompat for HlBlock {
|
|
||||||
type BincodeRepr<'a> = HlBlockBincode<'a>;
|
|
||||||
|
|
||||||
fn as_repr(&self) -> Self::BincodeRepr<'_> {
|
|
||||||
HlBlockBincode { header: self.header.as_repr(), body: self.body.as_repr() }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
|
||||||
let HlBlockBincode { header, body } = repr;
|
|
||||||
Self { header: Header::from_repr(header), body: HlBlockBody::from_repr(body) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
142
src/node/primitives/rlp.rs
Normal file
142
src/node/primitives/rlp.rs
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
#![allow(clippy::owned_cow)]
|
||||||
|
use super::{HlBlock, HlBlockBody, TransactionSigned};
|
||||||
|
use crate::{node::types::ReadPrecompileCalls, HlHeader};
|
||||||
|
use alloy_consensus::{BlobTransactionSidecar, BlockBody};
|
||||||
|
use alloy_eips::eip4895::Withdrawals;
|
||||||
|
use alloy_primitives::Address;
|
||||||
|
use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
#[derive(RlpEncodable, RlpDecodable)]
|
||||||
|
#[rlp(trailing)]
|
||||||
|
struct BlockBodyHelper<'a> {
|
||||||
|
transactions: Cow<'a, Vec<TransactionSigned>>,
|
||||||
|
ommers: Cow<'a, Vec<HlHeader>>,
|
||||||
|
withdrawals: Option<Cow<'a, Withdrawals>>,
|
||||||
|
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
||||||
|
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
||||||
|
highest_precompile_address: Option<Cow<'a, Address>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(RlpEncodable, RlpDecodable)]
|
||||||
|
#[rlp(trailing)]
|
||||||
|
pub(crate) struct BlockHelper<'a> {
|
||||||
|
pub(crate) header: Cow<'a, HlHeader>,
|
||||||
|
pub(crate) transactions: Cow<'a, Vec<TransactionSigned>>,
|
||||||
|
pub(crate) ommers: Cow<'a, Vec<HlHeader>>,
|
||||||
|
pub(crate) withdrawals: Option<Cow<'a, Withdrawals>>,
|
||||||
|
pub(crate) sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
||||||
|
pub(crate) read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
||||||
|
pub(crate) highest_precompile_address: Option<Cow<'a, Address>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<&'a HlBlockBody> for BlockBodyHelper<'a> {
|
||||||
|
fn from(value: &'a HlBlockBody) -> Self {
|
||||||
|
let HlBlockBody {
|
||||||
|
inner: BlockBody { transactions, ommers, withdrawals },
|
||||||
|
sidecars,
|
||||||
|
read_precompile_calls,
|
||||||
|
highest_precompile_address,
|
||||||
|
} = value;
|
||||||
|
Self {
|
||||||
|
transactions: Cow::Borrowed(transactions),
|
||||||
|
ommers: Cow::Borrowed(ommers),
|
||||||
|
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
|
||||||
|
sidecars: sidecars.as_ref().map(Cow::Borrowed),
|
||||||
|
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
|
||||||
|
highest_precompile_address: highest_precompile_address.as_ref().map(Cow::Borrowed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<&'a HlBlock> for BlockHelper<'a> {
|
||||||
|
fn from(value: &'a HlBlock) -> Self {
|
||||||
|
let HlBlock {
|
||||||
|
header,
|
||||||
|
body:
|
||||||
|
HlBlockBody {
|
||||||
|
inner: BlockBody { transactions, ommers, withdrawals },
|
||||||
|
sidecars,
|
||||||
|
read_precompile_calls,
|
||||||
|
highest_precompile_address,
|
||||||
|
},
|
||||||
|
} = value;
|
||||||
|
Self {
|
||||||
|
header: Cow::Borrowed(header),
|
||||||
|
transactions: Cow::Borrowed(transactions),
|
||||||
|
ommers: Cow::Borrowed(ommers),
|
||||||
|
withdrawals: withdrawals.as_ref().map(Cow::Borrowed),
|
||||||
|
sidecars: sidecars.as_ref().map(Cow::Borrowed),
|
||||||
|
read_precompile_calls: read_precompile_calls.as_ref().map(Cow::Borrowed),
|
||||||
|
highest_precompile_address: highest_precompile_address.as_ref().map(Cow::Borrowed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for HlBlockBody {
|
||||||
|
fn encode(&self, out: &mut dyn bytes::BufMut) {
|
||||||
|
BlockBodyHelper::from(self).encode(out);
|
||||||
|
}
|
||||||
|
fn length(&self) -> usize {
|
||||||
|
BlockBodyHelper::from(self).length()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Decodable for HlBlockBody {
|
||||||
|
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
|
||||||
|
let BlockBodyHelper {
|
||||||
|
transactions,
|
||||||
|
ommers,
|
||||||
|
withdrawals,
|
||||||
|
sidecars,
|
||||||
|
read_precompile_calls,
|
||||||
|
highest_precompile_address,
|
||||||
|
} = BlockBodyHelper::decode(buf)?;
|
||||||
|
Ok(Self {
|
||||||
|
inner: BlockBody {
|
||||||
|
transactions: transactions.into_owned(),
|
||||||
|
ommers: ommers.into_owned(),
|
||||||
|
withdrawals: withdrawals.map(|w| w.into_owned()),
|
||||||
|
},
|
||||||
|
sidecars: sidecars.map(|s| s.into_owned()),
|
||||||
|
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
||||||
|
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for HlBlock {
|
||||||
|
fn encode(&self, out: &mut dyn bytes::BufMut) {
|
||||||
|
BlockHelper::from(self).encode(out);
|
||||||
|
}
|
||||||
|
fn length(&self) -> usize {
|
||||||
|
BlockHelper::from(self).length()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Decodable for HlBlock {
|
||||||
|
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
|
||||||
|
let BlockHelper {
|
||||||
|
header,
|
||||||
|
transactions,
|
||||||
|
ommers,
|
||||||
|
withdrawals,
|
||||||
|
sidecars,
|
||||||
|
read_precompile_calls,
|
||||||
|
highest_precompile_address,
|
||||||
|
} = BlockHelper::decode(buf)?;
|
||||||
|
Ok(Self {
|
||||||
|
header: header.into_owned(),
|
||||||
|
body: HlBlockBody {
|
||||||
|
inner: BlockBody {
|
||||||
|
transactions: transactions.into_owned(),
|
||||||
|
ommers: ommers.into_owned(),
|
||||||
|
withdrawals: withdrawals.map(|w| w.into_owned()),
|
||||||
|
},
|
||||||
|
sidecars: sidecars.map(|s| s.into_owned()),
|
||||||
|
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
||||||
|
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
64
src/node/primitives/serde_bincode_compat.rs
Normal file
64
src/node/primitives/serde_bincode_compat.rs
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
#![allow(clippy::owned_cow)]
|
||||||
|
use alloy_consensus::BlobTransactionSidecar;
|
||||||
|
use alloy_primitives::Address;
|
||||||
|
use reth_primitives_traits::serde_bincode_compat::{BincodeReprFor, SerdeBincodeCompat};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use super::{HlBlock, HlBlockBody};
|
||||||
|
use crate::{node::{primitives::BlockBody, types::ReadPrecompileCalls}, HlHeader};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct HlBlockBodyBincode<'a> {
|
||||||
|
inner: BincodeReprFor<'a, BlockBody>,
|
||||||
|
sidecars: Option<Cow<'a, Vec<BlobTransactionSidecar>>>,
|
||||||
|
read_precompile_calls: Option<Cow<'a, ReadPrecompileCalls>>,
|
||||||
|
highest_precompile_address: Option<Cow<'a, Address>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct HlBlockBincode<'a> {
|
||||||
|
header: BincodeReprFor<'a, HlHeader>,
|
||||||
|
body: BincodeReprFor<'a, HlBlockBody>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SerdeBincodeCompat for HlBlockBody {
|
||||||
|
type BincodeRepr<'a> = HlBlockBodyBincode<'a>;
|
||||||
|
|
||||||
|
fn as_repr(&self) -> Self::BincodeRepr<'_> {
|
||||||
|
HlBlockBodyBincode {
|
||||||
|
inner: self.inner.as_repr(),
|
||||||
|
sidecars: self.sidecars.as_ref().map(Cow::Borrowed),
|
||||||
|
read_precompile_calls: self.read_precompile_calls.as_ref().map(Cow::Borrowed),
|
||||||
|
highest_precompile_address: self.highest_precompile_address.as_ref().map(Cow::Borrowed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
||||||
|
let HlBlockBodyBincode {
|
||||||
|
inner,
|
||||||
|
sidecars,
|
||||||
|
read_precompile_calls,
|
||||||
|
highest_precompile_address,
|
||||||
|
} = repr;
|
||||||
|
Self {
|
||||||
|
inner: BlockBody::from_repr(inner),
|
||||||
|
sidecars: sidecars.map(|s| s.into_owned()),
|
||||||
|
read_precompile_calls: read_precompile_calls.map(|s| s.into_owned()),
|
||||||
|
highest_precompile_address: highest_precompile_address.map(|s| s.into_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SerdeBincodeCompat for HlBlock {
|
||||||
|
type BincodeRepr<'a> = HlBlockBincode<'a>;
|
||||||
|
|
||||||
|
fn as_repr(&self) -> Self::BincodeRepr<'_> {
|
||||||
|
HlBlockBincode { header: self.header.as_repr(), body: self.body.as_repr() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_repr(repr: Self::BincodeRepr<'_>) -> Self {
|
||||||
|
let HlBlockBincode { header, body } = repr;
|
||||||
|
Self { header: HlHeader::from_repr(header), body: HlBlockBody::from_repr(body) }
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,33 +1,35 @@
|
|||||||
//! HlNodePrimitives::TransactionSigned; it's the same as ethereum transaction type,
|
//! HlNodePrimitives::TransactionSigned; it's the same as ethereum transaction type,
|
||||||
//! except that it supports pseudo signer for system transactions.
|
//! except that it supports pseudo signer for system transactions.
|
||||||
|
use std::convert::Infallible;
|
||||||
|
|
||||||
|
use crate::evm::transaction::HlTxEnv;
|
||||||
use alloy_consensus::{
|
use alloy_consensus::{
|
||||||
crypto::RecoveryError, error::ValueError, EthereumTxEnvelope, EthereumTypedTransaction,
|
|
||||||
SignableTransaction, Signed, Transaction as TransactionTrait, TransactionEnvelope, TxEip1559,
|
SignableTransaction, Signed, Transaction as TransactionTrait, TransactionEnvelope, TxEip1559,
|
||||||
TxEip2930, TxEip4844, TxEip4844WithSidecar, TxEip7702, TxLegacy, TxType, TypedTransaction,
|
TxEip2930, TxEip4844, TxEip7702, TxLegacy, TxType, TypedTransaction, crypto::RecoveryError,
|
||||||
|
error::ValueError, transaction::TxHashRef,
|
||||||
};
|
};
|
||||||
use alloy_eips::{eip7594::BlobTransactionSidecarVariant, Encodable2718};
|
use alloy_eips::Encodable2718;
|
||||||
use alloy_network::TxSigner;
|
use alloy_network::TxSigner;
|
||||||
use alloy_primitives::{address, Address, TxHash, U256};
|
use alloy_primitives::{Address, TxHash, U256, address};
|
||||||
use alloy_rpc_types::{Transaction, TransactionInfo, TransactionRequest};
|
use alloy_rpc_types::{Transaction, TransactionInfo, TransactionRequest};
|
||||||
use alloy_signer::Signature;
|
use alloy_signer::Signature;
|
||||||
use reth_codecs::alloy::transaction::FromTxCompact;
|
use reth_codecs::alloy::transaction::{Envelope, FromTxCompact};
|
||||||
use reth_db::{
|
use reth_db::{
|
||||||
table::{Compress, Decompress},
|
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
|
table::{Compress, Decompress},
|
||||||
};
|
};
|
||||||
|
use reth_ethereum_primitives::PooledTransactionVariant;
|
||||||
use reth_evm::FromRecoveredTx;
|
use reth_evm::FromRecoveredTx;
|
||||||
use reth_primitives::Recovered;
|
use reth_primitives::Recovered;
|
||||||
use reth_primitives_traits::{
|
use reth_primitives_traits::{
|
||||||
serde_bincode_compat::SerdeBincodeCompat, InMemorySize, SignedTransaction, SignerRecoverable,
|
InMemorySize, SignedTransaction, SignerRecoverable, serde_bincode_compat::SerdeBincodeCompat,
|
||||||
};
|
};
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
transaction::{FromConsensusTx, TryIntoTxEnv},
|
|
||||||
EthTxEnvError, SignTxRequestError, SignableTxRequest, TryIntoSimTx,
|
EthTxEnvError, SignTxRequestError, SignableTxRequest, TryIntoSimTx,
|
||||||
|
transaction::{FromConsensusTx, TryIntoTxEnv},
|
||||||
};
|
};
|
||||||
use revm::context::{BlockEnv, CfgEnv, TxEnv};
|
use revm::context::{BlockEnv, CfgEnv, TxEnv};
|
||||||
|
|
||||||
use crate::evm::transaction::HlTxEnv;
|
|
||||||
|
|
||||||
type InnerType = alloy_consensus::EthereumTxEnvelope<TxEip4844>;
|
type InnerType = alloy_consensus::EthereumTxEnvelope<TxEip4844>;
|
||||||
|
|
||||||
#[derive(Debug, Clone, TransactionEnvelope)]
|
#[derive(Debug, Clone, TransactionEnvelope)]
|
||||||
@ -46,6 +48,12 @@ fn s_to_address(s: U256) -> Address {
|
|||||||
Address::from_slice(&buf)
|
Address::from_slice(&buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TxHashRef for TransactionSigned {
|
||||||
|
fn tx_hash(&self) -> &TxHash {
|
||||||
|
self.inner().tx_hash()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl SignerRecoverable for TransactionSigned {
|
impl SignerRecoverable for TransactionSigned {
|
||||||
fn recover_signer(&self) -> Result<Address, RecoveryError> {
|
fn recover_signer(&self) -> Result<Address, RecoveryError> {
|
||||||
if self.is_system_transaction() {
|
if self.is_system_transaction() {
|
||||||
@ -69,11 +77,7 @@ impl SignerRecoverable for TransactionSigned {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SignedTransaction for TransactionSigned {
|
impl SignedTransaction for TransactionSigned {}
|
||||||
fn tx_hash(&self) -> &TxHash {
|
|
||||||
self.inner().tx_hash()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------
|
// ------------------------------------------------------------
|
||||||
// NOTE: All lines below are just wrappers for the inner type.
|
// NOTE: All lines below are just wrappers for the inner type.
|
||||||
@ -157,16 +161,8 @@ impl TransactionSigned {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn signature(&self) -> &Signature {
|
|
||||||
self.inner().signature()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn tx_type(&self) -> TxType {
|
|
||||||
self.inner().tx_type()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_system_transaction(&self) -> bool {
|
pub fn is_system_transaction(&self) -> bool {
|
||||||
self.gas_price().is_some() && self.gas_price().unwrap() == 0
|
matches!(self.gas_price(), Some(0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,26 +181,16 @@ impl SerdeBincodeCompat for TransactionSigned {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type BlockBody = alloy_consensus::BlockBody<TransactionSigned>;
|
impl TryFrom<TransactionSigned> for PooledTransactionVariant {
|
||||||
|
type Error = <InnerType as TryInto<PooledTransactionVariant>>::Error;
|
||||||
impl TryFrom<TransactionSigned>
|
|
||||||
for EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>
|
|
||||||
{
|
|
||||||
type Error = <InnerType as TryInto<
|
|
||||||
EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>,
|
|
||||||
>>::Error;
|
|
||||||
|
|
||||||
fn try_from(value: TransactionSigned) -> Result<Self, Self::Error> {
|
fn try_from(value: TransactionSigned) -> Result<Self, Self::Error> {
|
||||||
value.into_inner().try_into()
|
value.into_inner().try_into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>>
|
impl From<PooledTransactionVariant> for TransactionSigned {
|
||||||
for TransactionSigned
|
fn from(value: PooledTransactionVariant) -> Self {
|
||||||
{
|
|
||||||
fn from(
|
|
||||||
value: EthereumTxEnvelope<TxEip4844WithSidecar<BlobTransactionSidecarVariant>>,
|
|
||||||
) -> Self {
|
|
||||||
Self::Default(value.into())
|
Self::Default(value.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -223,22 +209,6 @@ impl Decompress for TransactionSigned {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn convert_to_eth_block_body(value: BlockBody) -> alloy_consensus::BlockBody<InnerType> {
|
|
||||||
alloy_consensus::BlockBody {
|
|
||||||
transactions: value.transactions.into_iter().map(|tx| tx.into_inner()).collect(),
|
|
||||||
ommers: value.ommers,
|
|
||||||
withdrawals: value.withdrawals,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn convert_to_hl_block_body(value: alloy_consensus::BlockBody<InnerType>) -> BlockBody {
|
|
||||||
BlockBody {
|
|
||||||
transactions: value.transactions.into_iter().map(TransactionSigned::Default).collect(),
|
|
||||||
ommers: value.ommers,
|
|
||||||
withdrawals: value.withdrawals,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryIntoSimTx<TransactionSigned> for TransactionRequest {
|
impl TryIntoSimTx<TransactionSigned> for TransactionRequest {
|
||||||
fn try_into_sim_tx(self) -> Result<TransactionSigned, ValueError<Self>> {
|
fn try_into_sim_tx(self) -> Result<TransactionSigned, ValueError<Self>> {
|
||||||
let tx = self
|
let tx = self
|
||||||
@ -266,9 +236,17 @@ impl TryIntoTxEnv<HlTxEnv<TxEnv>> for TransactionRequest {
|
|||||||
|
|
||||||
impl FromConsensusTx<TransactionSigned> for Transaction {
|
impl FromConsensusTx<TransactionSigned> for Transaction {
|
||||||
type TxInfo = TransactionInfo;
|
type TxInfo = TransactionInfo;
|
||||||
|
type Err = Infallible;
|
||||||
|
|
||||||
fn from_consensus_tx(tx: TransactionSigned, signer: Address, tx_info: Self::TxInfo) -> Self {
|
fn from_consensus_tx(
|
||||||
Self::from_transaction(Recovered::new_unchecked(tx.into_inner().into(), signer), tx_info)
|
tx: TransactionSigned,
|
||||||
|
signer: Address,
|
||||||
|
tx_info: Self::TxInfo,
|
||||||
|
) -> Result<Self, Self::Err> {
|
||||||
|
Ok(Self::from_transaction(
|
||||||
|
Recovered::new_unchecked(tx.into_inner().into(), signer),
|
||||||
|
tx_info,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -277,26 +255,7 @@ impl SignableTxRequest<TransactionSigned> for TransactionRequest {
|
|||||||
self,
|
self,
|
||||||
signer: impl TxSigner<Signature> + Send,
|
signer: impl TxSigner<Signature> + Send,
|
||||||
) -> Result<TransactionSigned, SignTxRequestError> {
|
) -> Result<TransactionSigned, SignTxRequestError> {
|
||||||
let mut tx =
|
let signed = SignableTxRequest::<InnerType>::try_build_and_sign(self, signer).await?;
|
||||||
self.build_typed_tx().map_err(|_| SignTxRequestError::InvalidTransactionRequest)?;
|
|
||||||
let signature = signer.sign_transaction(&mut tx).await?;
|
|
||||||
let signed = match tx {
|
|
||||||
EthereumTypedTransaction::Legacy(tx) => {
|
|
||||||
EthereumTxEnvelope::Legacy(tx.into_signed(signature))
|
|
||||||
}
|
|
||||||
EthereumTypedTransaction::Eip2930(tx) => {
|
|
||||||
EthereumTxEnvelope::Eip2930(tx.into_signed(signature))
|
|
||||||
}
|
|
||||||
EthereumTypedTransaction::Eip1559(tx) => {
|
|
||||||
EthereumTxEnvelope::Eip1559(tx.into_signed(signature))
|
|
||||||
}
|
|
||||||
EthereumTypedTransaction::Eip4844(tx) => {
|
|
||||||
EthereumTxEnvelope::Eip4844(TxEip4844::from(tx).into_signed(signature))
|
|
||||||
}
|
|
||||||
EthereumTypedTransaction::Eip7702(tx) => {
|
|
||||||
EthereumTxEnvelope::Eip7702(tx.into_signed(signature))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(TransactionSigned::Default(signed))
|
Ok(TransactionSigned::Default(signed))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1,12 +1,12 @@
|
|||||||
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
|
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
|
||||||
use reth::rpc::server_types::eth::{
|
use reth::rpc::server_types::eth::{
|
||||||
builder::config::PendingBlockKind, error::FromEvmError, EthApiError, PendingBlock,
|
EthApiError, PendingBlock, builder::config::PendingBlockKind, error::FromEvmError,
|
||||||
};
|
};
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
helpers::{
|
|
||||||
pending_block::PendingEnvBuilder, EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt,
|
|
||||||
},
|
|
||||||
RpcConvert,
|
RpcConvert,
|
||||||
|
helpers::{
|
||||||
|
EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, pending_block::PendingEnvBuilder,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<N, Rpc> EthBlocks for HlEthApi<N, Rpc>
|
impl<N, Rpc> EthBlocks for HlEthApi<N, Rpc>
|
||||||
@ -29,7 +29,7 @@ impl<N, Rpc> LoadPendingBlock for HlEthApi<N, Rpc>
|
|||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
EthApiError: FromEvmError<N::Evm>,
|
EthApiError: FromEvmError<N::Evm>,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives>,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn pending_block(&self) -> &tokio::sync::Mutex<Option<PendingBlock<N::Primitives>>> {
|
fn pending_block(&self) -> &tokio::sync::Mutex<Option<PendingBlock<N::Primitives>>> {
|
||||||
@ -50,7 +50,6 @@ where
|
|||||||
impl<N, Rpc> LoadReceipt for HlEthApi<N, Rpc>
|
impl<N, Rpc> LoadReceipt for HlEthApi<N, Rpc>
|
||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
EthApiError: FromEvmError<N::Evm>,
|
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +1,19 @@
|
|||||||
|
use core::fmt;
|
||||||
|
|
||||||
use super::{HlEthApi, HlRpcNodeCore};
|
use super::{HlEthApi, HlRpcNodeCore};
|
||||||
use crate::{node::evm::apply_precompiles, HlBlock};
|
use crate::{HlBlock, node::evm::apply_precompiles};
|
||||||
|
use alloy_consensus::transaction::TxHashRef;
|
||||||
use alloy_evm::Evm;
|
use alloy_evm::Evm;
|
||||||
use alloy_primitives::B256;
|
use alloy_primitives::B256;
|
||||||
use reth::rpc::server_types::eth::EthApiError;
|
use reth::rpc::server_types::eth::EthApiError;
|
||||||
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, TxEnvFor};
|
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, HaltReasonFor, InspectorFor, SpecFor, TxEnvFor};
|
||||||
use reth_primitives::{NodePrimitives, Recovered};
|
use reth_primitives::{NodePrimitives, Recovered};
|
||||||
use reth_primitives_traits::SignedTransaction;
|
|
||||||
use reth_provider::{ProviderError, ProviderTx};
|
use reth_provider::{ProviderError, ProviderTx};
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
helpers::{estimate::EstimateCall, Call, EthCall},
|
|
||||||
FromEvmError, RpcConvert, RpcNodeCore,
|
FromEvmError, RpcConvert, RpcNodeCore,
|
||||||
|
helpers::{Call, EthCall},
|
||||||
};
|
};
|
||||||
use revm::DatabaseCommit;
|
use revm::{DatabaseCommit, context::result::ResultAndState};
|
||||||
|
|
||||||
impl<N> HlRpcNodeCore for N where N: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
|
impl<N> HlRpcNodeCore for N where N: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
|
||||||
|
|
||||||
@ -19,15 +21,12 @@ impl<N, Rpc> EthCall for HlEthApi<N, Rpc>
|
|||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
EthApiError: FromEvmError<N::Evm>,
|
EthApiError: FromEvmError<N::Evm>,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
|
Rpc: RpcConvert<
|
||||||
{
|
Primitives = N::Primitives,
|
||||||
}
|
Error = EthApiError,
|
||||||
|
TxEnv = TxEnvFor<N::Evm>,
|
||||||
impl<N, Rpc> EstimateCall for HlEthApi<N, Rpc>
|
Spec = SpecFor<N::Evm>,
|
||||||
where
|
>,
|
||||||
N: HlRpcNodeCore,
|
|
||||||
EthApiError: FromEvmError<N::Evm>,
|
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,7 +34,12 @@ impl<N, Rpc> Call for HlEthApi<N, Rpc>
|
|||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
EthApiError: FromEvmError<N::Evm>,
|
EthApiError: FromEvmError<N::Evm>,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError, TxEnv = TxEnvFor<N::Evm>>,
|
Rpc: RpcConvert<
|
||||||
|
Primitives = N::Primitives,
|
||||||
|
Error = EthApiError,
|
||||||
|
TxEnv = TxEnvFor<N::Evm>,
|
||||||
|
Spec = SpecFor<N::Evm>,
|
||||||
|
>,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn call_gas_limit(&self) -> u64 {
|
fn call_gas_limit(&self) -> u64 {
|
||||||
@ -47,6 +51,46 @@ where
|
|||||||
self.inner.eth_api.max_simulate_blocks()
|
self.inner.eth_api.max_simulate_blocks()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn transact<DB>(
|
||||||
|
&self,
|
||||||
|
db: DB,
|
||||||
|
evm_env: EvmEnvFor<Self::Evm>,
|
||||||
|
tx_env: TxEnvFor<Self::Evm>,
|
||||||
|
) -> Result<ResultAndState<HaltReasonFor<Self::Evm>>, Self::Error>
|
||||||
|
where
|
||||||
|
DB: Database<Error = ProviderError> + fmt::Debug,
|
||||||
|
{
|
||||||
|
let block_number = evm_env.block_env().number;
|
||||||
|
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
|
||||||
|
|
||||||
|
let mut evm = self.evm_config().evm_with_env(db, evm_env);
|
||||||
|
apply_precompiles(&mut evm, &hl_extras);
|
||||||
|
let res = evm.transact(tx_env).map_err(Self::Error::from_evm_err)?;
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transact_with_inspector<DB, I>(
|
||||||
|
&self,
|
||||||
|
db: DB,
|
||||||
|
evm_env: EvmEnvFor<Self::Evm>,
|
||||||
|
tx_env: TxEnvFor<Self::Evm>,
|
||||||
|
inspector: I,
|
||||||
|
) -> Result<ResultAndState<HaltReasonFor<Self::Evm>>, Self::Error>
|
||||||
|
where
|
||||||
|
DB: Database<Error = ProviderError> + fmt::Debug,
|
||||||
|
I: InspectorFor<Self::Evm, DB>,
|
||||||
|
{
|
||||||
|
let block_number = evm_env.block_env().number;
|
||||||
|
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
|
||||||
|
|
||||||
|
let mut evm = self.evm_config().evm_with_env_and_inspector(db, evm_env, inspector);
|
||||||
|
apply_precompiles(&mut evm, &hl_extras);
|
||||||
|
let res = evm.transact(tx_env).map_err(Self::Error::from_evm_err)?;
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
fn replay_transactions_until<'a, DB, I>(
|
fn replay_transactions_until<'a, DB, I>(
|
||||||
&self,
|
&self,
|
||||||
db: &mut DB,
|
db: &mut DB,
|
||||||
@ -59,7 +103,7 @@ where
|
|||||||
I: IntoIterator<Item = Recovered<&'a ProviderTx<Self::Provider>>>,
|
I: IntoIterator<Item = Recovered<&'a ProviderTx<Self::Provider>>>,
|
||||||
{
|
{
|
||||||
let block_number = evm_env.block_env().number;
|
let block_number = evm_env.block_env().number;
|
||||||
let hl_extras = self.get_hl_extras(block_number.try_into().unwrap())?;
|
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
|
||||||
|
|
||||||
let mut evm = self.evm_config().evm_with_env(db, evm_env);
|
let mut evm = self.evm_config().evm_with_env(db, evm_env);
|
||||||
apply_precompiles(&mut evm, &hl_extras);
|
apply_precompiles(&mut evm, &hl_extras);
|
||||||
|
|||||||
@ -9,7 +9,7 @@ use alloy_primitives::B256;
|
|||||||
use alloy_rpc_types_engine::PayloadError;
|
use alloy_rpc_types_engine::PayloadError;
|
||||||
use reth::{
|
use reth::{
|
||||||
api::{FullNodeComponents, NodeTypes},
|
api::{FullNodeComponents, NodeTypes},
|
||||||
builder::{rpc::PayloadValidatorBuilder, AddOnsContext},
|
builder::{AddOnsContext, rpc::PayloadValidatorBuilder},
|
||||||
};
|
};
|
||||||
use reth_engine_primitives::{ExecutionPayload, PayloadValidator};
|
use reth_engine_primitives::{ExecutionPayload, PayloadValidator};
|
||||||
use reth_payload_primitives::NewPayloadError;
|
use reth_payload_primitives::NewPayloadError;
|
||||||
|
|||||||
214
src/node/rpc/estimate.rs
Normal file
214
src/node/rpc/estimate.rs
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
use super::{HlEthApi, HlRpcNodeCore, apply_precompiles};
|
||||||
|
use alloy_evm::overrides::{StateOverrideError, apply_state_overrides};
|
||||||
|
use alloy_network::TransactionBuilder;
|
||||||
|
use alloy_primitives::{TxKind, U256};
|
||||||
|
use alloy_rpc_types_eth::state::StateOverride;
|
||||||
|
use reth_chainspec::MIN_TRANSACTION_GAS;
|
||||||
|
use reth_errors::ProviderError;
|
||||||
|
use reth_evm::{ConfigureEvm, Evm, EvmEnvFor, SpecFor, TransactionEnv, TxEnvFor};
|
||||||
|
use reth_revm::{database::StateProviderDatabase, db::CacheDB};
|
||||||
|
use reth_rpc_convert::{RpcConvert, RpcTxReq};
|
||||||
|
use reth_rpc_eth_api::{
|
||||||
|
AsEthApiError, IntoEthApiError, RpcNodeCore,
|
||||||
|
helpers::{
|
||||||
|
Call,
|
||||||
|
estimate::{EstimateCall, update_estimated_gas_range},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use reth_rpc_eth_types::{
|
||||||
|
EthApiError, RevertError, RpcInvalidTransactionError,
|
||||||
|
error::{FromEvmError, api::FromEvmHalt},
|
||||||
|
};
|
||||||
|
use reth_rpc_server_types::constants::gas_oracle::{CALL_STIPEND_GAS, ESTIMATE_GAS_ERROR_RATIO};
|
||||||
|
use reth_storage_api::StateProvider;
|
||||||
|
use revm::context_interface::{Transaction, result::ExecutionResult};
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
|
impl<N, Rpc> EstimateCall for HlEthApi<N, Rpc>
|
||||||
|
where
|
||||||
|
Self: Call,
|
||||||
|
N: HlRpcNodeCore,
|
||||||
|
EthApiError: FromEvmError<N::Evm> + From<StateOverrideError<ProviderError>>,
|
||||||
|
Rpc: RpcConvert<
|
||||||
|
Primitives = N::Primitives,
|
||||||
|
Error = EthApiError,
|
||||||
|
TxEnv = TxEnvFor<N::Evm>,
|
||||||
|
Spec = SpecFor<N::Evm>,
|
||||||
|
>,
|
||||||
|
{
|
||||||
|
// Modified version that adds `apply_precompiles`; comments are stripped out.
|
||||||
|
fn estimate_gas_with<S>(
|
||||||
|
&self,
|
||||||
|
mut evm_env: EvmEnvFor<Self::Evm>,
|
||||||
|
mut request: RpcTxReq<<Self::RpcConvert as RpcConvert>::Network>,
|
||||||
|
state: S,
|
||||||
|
state_override: Option<StateOverride>,
|
||||||
|
) -> Result<U256, Self::Error>
|
||||||
|
where
|
||||||
|
S: StateProvider,
|
||||||
|
{
|
||||||
|
evm_env.cfg_env.disable_eip3607 = true;
|
||||||
|
evm_env.cfg_env.disable_base_fee = true;
|
||||||
|
|
||||||
|
request.as_mut().take_nonce();
|
||||||
|
|
||||||
|
let tx_request_gas_limit = request.as_ref().gas_limit();
|
||||||
|
let tx_request_gas_price = request.as_ref().gas_price();
|
||||||
|
let max_gas_limit = evm_env
|
||||||
|
.cfg_env
|
||||||
|
.tx_gas_limit_cap
|
||||||
|
.map_or(evm_env.block_env.gas_limit, |cap| cap.min(evm_env.block_env.gas_limit));
|
||||||
|
|
||||||
|
let mut highest_gas_limit = tx_request_gas_limit
|
||||||
|
.map(|mut tx_gas_limit| {
|
||||||
|
if max_gas_limit < tx_gas_limit {
|
||||||
|
tx_gas_limit = max_gas_limit;
|
||||||
|
}
|
||||||
|
tx_gas_limit
|
||||||
|
})
|
||||||
|
.unwrap_or(max_gas_limit);
|
||||||
|
|
||||||
|
let mut db = CacheDB::new(StateProviderDatabase::new(state));
|
||||||
|
|
||||||
|
if let Some(state_override) = state_override {
|
||||||
|
apply_state_overrides(state_override, &mut db).map_err(
|
||||||
|
|err: StateOverrideError<ProviderError>| {
|
||||||
|
let eth_api_error: EthApiError = EthApiError::from(err);
|
||||||
|
Self::Error::from(eth_api_error)
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut tx_env = self.create_txn_env(&evm_env, request, &mut db)?;
|
||||||
|
|
||||||
|
let mut is_basic_transfer = false;
|
||||||
|
if tx_env.input().is_empty() &&
|
||||||
|
let TxKind::Call(to) = tx_env.kind() &&
|
||||||
|
let Ok(code) = db.db.account_code(&to)
|
||||||
|
{
|
||||||
|
is_basic_transfer = code.map(|code| code.is_empty()).unwrap_or(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if tx_env.gas_price() > 0 {
|
||||||
|
highest_gas_limit =
|
||||||
|
highest_gas_limit.min(self.caller_gas_allowance(&mut db, &evm_env, &tx_env)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
tx_env.set_gas_limit(tx_env.gas_limit().min(highest_gas_limit));
|
||||||
|
|
||||||
|
let block_number = evm_env.block_env().number;
|
||||||
|
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
|
||||||
|
|
||||||
|
let mut evm = self.evm_config().evm_with_env(&mut db, evm_env);
|
||||||
|
apply_precompiles(&mut evm, &hl_extras);
|
||||||
|
|
||||||
|
if is_basic_transfer {
|
||||||
|
let mut min_tx_env = tx_env.clone();
|
||||||
|
min_tx_env.set_gas_limit(MIN_TRANSACTION_GAS);
|
||||||
|
|
||||||
|
if let Ok(res) = evm.transact(min_tx_env).map_err(Self::Error::from_evm_err) &&
|
||||||
|
res.result.is_success()
|
||||||
|
{
|
||||||
|
return Ok(U256::from(MIN_TRANSACTION_GAS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trace!(target: "rpc::eth::estimate", ?tx_env, gas_limit = tx_env.gas_limit(), is_basic_transfer, "Starting gas estimation");
|
||||||
|
|
||||||
|
let mut res = match evm.transact(tx_env.clone()).map_err(Self::Error::from_evm_err) {
|
||||||
|
Err(err)
|
||||||
|
if err.is_gas_too_high() &&
|
||||||
|
(tx_request_gas_limit.is_some() || tx_request_gas_price.is_some()) =>
|
||||||
|
{
|
||||||
|
return Self::map_out_of_gas_err(&mut evm, tx_env, max_gas_limit);
|
||||||
|
}
|
||||||
|
Err(err) if err.is_gas_too_low() => {
|
||||||
|
return Err(RpcInvalidTransactionError::GasRequiredExceedsAllowance {
|
||||||
|
gas_limit: tx_env.gas_limit(),
|
||||||
|
}
|
||||||
|
.into_eth_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
ethres => ethres?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let gas_refund = match res.result {
|
||||||
|
ExecutionResult::Success { gas_refunded, .. } => gas_refunded,
|
||||||
|
ExecutionResult::Halt { reason, .. } => {
|
||||||
|
return Err(Self::Error::from_evm_halt(reason, tx_env.gas_limit()));
|
||||||
|
}
|
||||||
|
ExecutionResult::Revert { output, .. } => {
|
||||||
|
return if tx_request_gas_limit.is_some() || tx_request_gas_price.is_some() {
|
||||||
|
Self::map_out_of_gas_err(&mut evm, tx_env, max_gas_limit)
|
||||||
|
} else {
|
||||||
|
Err(RpcInvalidTransactionError::Revert(RevertError::new(output)).into_eth_err())
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
highest_gas_limit = tx_env.gas_limit();
|
||||||
|
|
||||||
|
let mut gas_used = res.result.gas_used();
|
||||||
|
|
||||||
|
let mut lowest_gas_limit = gas_used.saturating_sub(1);
|
||||||
|
|
||||||
|
let optimistic_gas_limit = (gas_used + gas_refund + CALL_STIPEND_GAS) * 64 / 63;
|
||||||
|
if optimistic_gas_limit < highest_gas_limit {
|
||||||
|
let mut optimistic_tx_env = tx_env.clone();
|
||||||
|
optimistic_tx_env.set_gas_limit(optimistic_gas_limit);
|
||||||
|
|
||||||
|
res = evm.transact(optimistic_tx_env).map_err(Self::Error::from_evm_err)?;
|
||||||
|
|
||||||
|
gas_used = res.result.gas_used();
|
||||||
|
|
||||||
|
update_estimated_gas_range(
|
||||||
|
res.result,
|
||||||
|
optimistic_gas_limit,
|
||||||
|
&mut highest_gas_limit,
|
||||||
|
&mut lowest_gas_limit,
|
||||||
|
)?;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut mid_gas_limit = std::cmp::min(
|
||||||
|
gas_used * 3,
|
||||||
|
((highest_gas_limit as u128 + lowest_gas_limit as u128) / 2) as u64,
|
||||||
|
);
|
||||||
|
|
||||||
|
trace!(target: "rpc::eth::estimate", ?highest_gas_limit, ?lowest_gas_limit, ?mid_gas_limit, "Starting binary search for gas");
|
||||||
|
|
||||||
|
while lowest_gas_limit + 1 < highest_gas_limit {
|
||||||
|
if (highest_gas_limit - lowest_gas_limit) as f64 / (highest_gas_limit as f64) <
|
||||||
|
ESTIMATE_GAS_ERROR_RATIO
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut mid_tx_env = tx_env.clone();
|
||||||
|
mid_tx_env.set_gas_limit(mid_gas_limit);
|
||||||
|
|
||||||
|
match evm.transact(mid_tx_env).map_err(Self::Error::from_evm_err) {
|
||||||
|
Err(err) if err.is_gas_too_high() => {
|
||||||
|
highest_gas_limit = mid_gas_limit;
|
||||||
|
}
|
||||||
|
Err(err) if err.is_gas_too_low() => {
|
||||||
|
lowest_gas_limit = mid_gas_limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
ethres => {
|
||||||
|
res = ethres?;
|
||||||
|
|
||||||
|
update_estimated_gas_range(
|
||||||
|
res.result,
|
||||||
|
mid_gas_limit,
|
||||||
|
&mut highest_gas_limit,
|
||||||
|
&mut lowest_gas_limit,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mid_gas_limit = ((highest_gas_limit as u128 + lowest_gas_limit as u128) / 2) as u64;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(U256::from(highest_gas_limit))
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,40 +1,43 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlock, HlPrimitives,
|
||||||
chainspec::HlChainSpec,
|
chainspec::HlChainSpec,
|
||||||
node::{evm::apply_precompiles, types::HlExtras},
|
node::{evm::apply_precompiles, types::HlExtras},
|
||||||
HlBlock, HlPrimitives,
|
|
||||||
};
|
};
|
||||||
|
use alloy_eips::BlockId;
|
||||||
use alloy_evm::Evm;
|
use alloy_evm::Evm;
|
||||||
use alloy_network::Ethereum;
|
use alloy_network::Ethereum;
|
||||||
use alloy_primitives::U256;
|
use alloy_primitives::U256;
|
||||||
use reth::{
|
use reth::{
|
||||||
api::{FullNodeTypes, HeaderTy, NodeTypes, PrimitivesTy},
|
api::{FullNodeTypes, HeaderTy, NodeTypes, PrimitivesTy},
|
||||||
builder::{
|
builder::{
|
||||||
rpc::{EthApiBuilder, EthApiCtx},
|
|
||||||
FullNodeComponents,
|
FullNodeComponents,
|
||||||
|
rpc::{EthApiBuilder, EthApiCtx},
|
||||||
},
|
},
|
||||||
rpc::{
|
rpc::{
|
||||||
eth::{core::EthApiInner, DevSigner, FullEthApiServer},
|
eth::{DevSigner, FullEthApiServer, core::EthApiInner},
|
||||||
server_types::eth::{
|
server_types::eth::{
|
||||||
receipt::EthReceiptConverter, EthApiError, EthStateCache, FeeHistoryCache,
|
EthApiError, EthStateCache, FeeHistoryCache, GasPriceOracle,
|
||||||
GasPriceOracle,
|
receipt::EthReceiptConverter,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
tasks::{
|
tasks::{
|
||||||
pool::{BlockingTaskGuard, BlockingTaskPool},
|
|
||||||
TaskSpawner,
|
TaskSpawner,
|
||||||
|
pool::{BlockingTaskGuard, BlockingTaskPool},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, HaltReasonFor, InspectorFor, TxEnvFor};
|
use reth_evm::{ConfigureEvm, Database, EvmEnvFor, HaltReasonFor, InspectorFor, TxEnvFor};
|
||||||
use reth_primitives::NodePrimitives;
|
use reth_primitives::NodePrimitives;
|
||||||
use reth_provider::{BlockReader, ChainSpecProvider, ProviderError, ProviderHeader, ProviderTx};
|
use reth_provider::{
|
||||||
|
BlockReaderIdExt, ChainSpecProvider, ProviderError, ProviderHeader, ProviderTx,
|
||||||
|
};
|
||||||
use reth_rpc::RpcTypes;
|
use reth_rpc::RpcTypes;
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
helpers::{
|
|
||||||
pending_block::BuildPendingEnv, spec::SignersForApi, AddDevSigners, EthApiSpec, EthFees,
|
|
||||||
EthState, LoadFee, LoadState, SpawnBlocking, Trace,
|
|
||||||
},
|
|
||||||
EthApiTypes, FromEvmError, RpcConvert, RpcConverter, RpcNodeCore, RpcNodeCoreExt,
|
EthApiTypes, FromEvmError, RpcConvert, RpcConverter, RpcNodeCore, RpcNodeCoreExt,
|
||||||
SignableTxRequest,
|
SignableTxRequest,
|
||||||
|
helpers::{
|
||||||
|
AddDevSigners, EthApiSpec, EthFees, EthState, LoadFee, LoadPendingBlock, LoadState,
|
||||||
|
SpawnBlocking, Trace, pending_block::BuildPendingEnv, spec::SignersForApi,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use revm::context::result::ResultAndState;
|
use revm::context::result::ResultAndState;
|
||||||
use std::{fmt, marker::PhantomData, sync::Arc};
|
use std::{fmt, marker::PhantomData, sync::Arc};
|
||||||
@ -42,6 +45,8 @@ use std::{fmt, marker::PhantomData, sync::Arc};
|
|||||||
mod block;
|
mod block;
|
||||||
mod call;
|
mod call;
|
||||||
pub mod engine_api;
|
pub mod engine_api;
|
||||||
|
mod estimate;
|
||||||
|
pub mod precompile;
|
||||||
mod transaction;
|
mod transaction;
|
||||||
|
|
||||||
pub trait HlRpcNodeCore: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
|
pub trait HlRpcNodeCore: RpcNodeCore<Primitives: NodePrimitives<Block = HlBlock>> {}
|
||||||
@ -55,12 +60,17 @@ pub(crate) struct HlEthApiInner<N: HlRpcNodeCore, Rpc: RpcConvert> {
|
|||||||
type HlRpcConvert<N, NetworkT> =
|
type HlRpcConvert<N, NetworkT> =
|
||||||
RpcConverter<NetworkT, <N as FullNodeComponents>::Evm, EthReceiptConverter<HlChainSpec>>;
|
RpcConverter<NetworkT, <N as FullNodeComponents>::Evm, EthReceiptConverter<HlChainSpec>>;
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct HlEthApi<N: HlRpcNodeCore, Rpc: RpcConvert> {
|
pub struct HlEthApi<N: HlRpcNodeCore, Rpc: RpcConvert> {
|
||||||
/// Gateway to node's core components.
|
/// Gateway to node's core components.
|
||||||
pub(crate) inner: Arc<HlEthApiInner<N, Rpc>>,
|
pub(crate) inner: Arc<HlEthApiInner<N, Rpc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<N: HlRpcNodeCore, Rpc: RpcConvert> Clone for HlEthApi<N, Rpc> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Self { inner: self.inner.clone() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<N, Rpc> fmt::Debug for HlEthApi<N, Rpc>
|
impl<N, Rpc> fmt::Debug for HlEthApi<N, Rpc>
|
||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
@ -74,7 +84,7 @@ where
|
|||||||
impl<N, Rpc> EthApiTypes for HlEthApi<N, Rpc>
|
impl<N, Rpc> EthApiTypes for HlEthApi<N, Rpc>
|
||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives>,
|
||||||
{
|
{
|
||||||
type Error = EthApiError;
|
type Error = EthApiError;
|
||||||
type NetworkTypes = Rpc::Network;
|
type NetworkTypes = Rpc::Network;
|
||||||
@ -150,7 +160,7 @@ where
|
|||||||
impl<N, Rpc> SpawnBlocking for HlEthApi<N, Rpc>
|
impl<N, Rpc> SpawnBlocking for HlEthApi<N, Rpc>
|
||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives>,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn io_task_spawner(&self) -> impl TaskSpawner {
|
fn io_task_spawner(&self) -> impl TaskSpawner {
|
||||||
@ -189,6 +199,7 @@ impl<N, Rpc> LoadState for HlEthApi<N, Rpc>
|
|||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
||||||
|
Self: LoadPendingBlock,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,6 +207,7 @@ impl<N, Rpc> EthState for HlEthApi<N, Rpc>
|
|||||||
where
|
where
|
||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
||||||
|
Self: LoadPendingBlock,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn max_proof_window(&self) -> u64 {
|
fn max_proof_window(&self) -> u64 {
|
||||||
@ -229,7 +241,7 @@ where
|
|||||||
I: InspectorFor<Self::Evm, DB>,
|
I: InspectorFor<Self::Evm, DB>,
|
||||||
{
|
{
|
||||||
let block_number = evm_env.block_env().number;
|
let block_number = evm_env.block_env().number;
|
||||||
let hl_extras = self.get_hl_extras(block_number.try_into().unwrap())?;
|
let hl_extras = self.get_hl_extras(block_number.to::<u64>().into())?;
|
||||||
|
|
||||||
let mut evm = self.evm_config().evm_with_env_and_inspector(db, evm_env, inspector);
|
let mut evm = self.evm_config().evm_with_env_and_inspector(db, evm_env, inspector);
|
||||||
apply_precompiles(&mut evm, &hl_extras);
|
apply_precompiles(&mut evm, &hl_extras);
|
||||||
@ -242,10 +254,10 @@ where
|
|||||||
N: HlRpcNodeCore,
|
N: HlRpcNodeCore,
|
||||||
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
||||||
{
|
{
|
||||||
fn get_hl_extras(&self, block_number: u64) -> Result<HlExtras, ProviderError> {
|
fn get_hl_extras(&self, block: BlockId) -> Result<HlExtras, ProviderError> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.provider()
|
.provider()
|
||||||
.block_by_number(block_number)?
|
.block_by_id(block)?
|
||||||
.map(|block| HlExtras {
|
.map(|block| HlExtras {
|
||||||
read_precompile_calls: block.body.read_precompile_calls.clone(),
|
read_precompile_calls: block.body.read_precompile_calls.clone(),
|
||||||
highest_precompile_address: block.body.highest_precompile_address,
|
highest_precompile_address: block.body.highest_precompile_address,
|
||||||
|
|||||||
44
src/node/rpc/precompile.rs
Normal file
44
src/node/rpc/precompile.rs
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
use alloy_eips::BlockId;
|
||||||
|
use jsonrpsee::proc_macros::rpc;
|
||||||
|
use jsonrpsee_core::{RpcResult, async_trait};
|
||||||
|
use reth_rpc_convert::RpcConvert;
|
||||||
|
use reth_rpc_eth_types::EthApiError;
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
|
use crate::node::{
|
||||||
|
rpc::{HlEthApi, HlRpcNodeCore},
|
||||||
|
types::HlExtras,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// A custom RPC trait for fetching block precompile data.
|
||||||
|
#[rpc(server, namespace = "eth")]
|
||||||
|
#[async_trait]
|
||||||
|
pub trait HlBlockPrecompileApi {
|
||||||
|
/// Fetches precompile data for a given block.
|
||||||
|
#[method(name = "blockPrecompileData")]
|
||||||
|
async fn block_precompile_data(&self, block: BlockId) -> RpcResult<HlExtras>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HlBlockPrecompileExt<N: HlRpcNodeCore, Rpc: RpcConvert> {
|
||||||
|
eth_api: HlEthApi<N, Rpc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<N: HlRpcNodeCore, Rpc: RpcConvert> HlBlockPrecompileExt<N, Rpc> {
|
||||||
|
/// Creates a new instance of the [`HlBlockPrecompileExt`].
|
||||||
|
pub fn new(eth_api: HlEthApi<N, Rpc>) -> Self {
|
||||||
|
Self { eth_api }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<N, Rpc> HlBlockPrecompileApiServer for HlBlockPrecompileExt<N, Rpc>
|
||||||
|
where
|
||||||
|
N: HlRpcNodeCore,
|
||||||
|
Rpc: RpcConvert<Primitives = N::Primitives, Error = EthApiError>,
|
||||||
|
{
|
||||||
|
async fn block_precompile_data(&self, block: BlockId) -> RpcResult<HlExtras> {
|
||||||
|
trace!(target: "rpc::eth", ?block, "Serving eth_blockPrecompileData");
|
||||||
|
let hl_extras = self.eth_api.get_hl_extras(block).map_err(EthApiError::from)?;
|
||||||
|
Ok(hl_extras)
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,9 +1,11 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
|
use crate::node::rpc::{HlEthApi, HlRpcNodeCore};
|
||||||
use alloy_primitives::{Bytes, B256};
|
use alloy_primitives::{B256, Bytes};
|
||||||
use reth::rpc::server_types::eth::EthApiError;
|
use reth::rpc::server_types::eth::EthApiError;
|
||||||
use reth_rpc_eth_api::{
|
use reth_rpc_eth_api::{
|
||||||
helpers::{spec::SignersForRpc, EthTransactions, LoadTransaction},
|
|
||||||
RpcConvert,
|
RpcConvert,
|
||||||
|
helpers::{EthTransactions, LoadTransaction, spec::SignersForRpc},
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<N, Rpc> LoadTransaction for HlEthApi<N, Rpc>
|
impl<N, Rpc> LoadTransaction for HlEthApi<N, Rpc>
|
||||||
@ -25,4 +27,8 @@ where
|
|||||||
async fn send_raw_transaction(&self, _tx: Bytes) -> Result<B256, Self::Error> {
|
async fn send_raw_transaction(&self, _tx: Bytes) -> Result<B256, Self::Error> {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn send_raw_transaction_sync_timeout(&self) -> Duration {
|
||||||
|
self.inner.eth_api.send_raw_transaction_sync_timeout()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,6 +5,8 @@ use std::collections::BTreeMap;
|
|||||||
|
|
||||||
use crate::chainspec::{MAINNET_CHAIN_ID, TESTNET_CHAIN_ID};
|
use crate::chainspec::{MAINNET_CHAIN_ID, TESTNET_CHAIN_ID};
|
||||||
|
|
||||||
|
mod patch;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
struct EvmContract {
|
struct EvmContract {
|
||||||
address: Address,
|
address: Address,
|
||||||
@ -58,5 +60,10 @@ pub(crate) fn erc20_contract_to_spot_token(chain_id: u64) -> Result<BTreeMap<Add
|
|||||||
map.insert(evm_contract.address, SpotId { index: token.index });
|
map.insert(evm_contract.address, SpotId { index: token.index });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if chain_id == TESTNET_CHAIN_ID {
|
||||||
|
patch::patch_testnet_spot_meta(&mut map);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(map)
|
Ok(map)
|
||||||
}
|
}
|
||||||
8
src/node/spot_meta/patch.rs
Normal file
8
src/node/spot_meta/patch.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
use crate::node::spot_meta::SpotId;
|
||||||
|
use alloy_primitives::{Address, address};
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
/// Testnet-specific fix for #67
|
||||||
|
pub(super) fn patch_testnet_spot_meta(map: &mut BTreeMap<Address, SpotId>) {
|
||||||
|
map.insert(address!("0xd9cbec81df392a88aeff575e962d149d57f4d6bc"), SpotId { index: 0 });
|
||||||
|
}
|
||||||
@ -1,29 +1,27 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
node::{
|
HlBlock, HlBlockBody, HlHeader, HlPrimitives,
|
||||||
primitives::tx_wrapper::{convert_to_eth_block_body, convert_to_hl_block_body},
|
node::{primitives::TransactionSigned, types::HlExtras},
|
||||||
types::HlExtras,
|
|
||||||
},
|
|
||||||
HlBlock, HlBlockBody, HlPrimitives,
|
|
||||||
};
|
};
|
||||||
use alloy_consensus::BlockHeader;
|
use alloy_consensus::BlockHeader;
|
||||||
use alloy_primitives::Bytes;
|
use alloy_primitives::Bytes;
|
||||||
use reth_chainspec::EthereumHardforks;
|
use reth_chainspec::EthereumHardforks;
|
||||||
use reth_db::{
|
use reth_db::{
|
||||||
|
DbTxUnwindExt,
|
||||||
cursor::{DbCursorRO, DbCursorRW},
|
cursor::{DbCursorRO, DbCursorRW},
|
||||||
transaction::{DbTx, DbTxMut},
|
transaction::{DbTx, DbTxMut},
|
||||||
DbTxUnwindExt,
|
|
||||||
};
|
};
|
||||||
|
use reth_primitives_traits::Block;
|
||||||
use reth_provider::{
|
use reth_provider::{
|
||||||
providers::{ChainStorage, NodeTypesForProvider},
|
|
||||||
BlockBodyReader, BlockBodyWriter, ChainSpecProvider, ChainStorageReader, ChainStorageWriter,
|
BlockBodyReader, BlockBodyWriter, ChainSpecProvider, ChainStorageReader, ChainStorageWriter,
|
||||||
DBProvider, DatabaseProvider, EthStorage, ProviderResult, ReadBodyInput, StorageLocation,
|
DBProvider, DatabaseProvider, EthStorage, ProviderResult, ReadBodyInput, StorageLocation,
|
||||||
|
providers::{ChainStorage, NodeTypesForProvider},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod tables;
|
pub mod tables;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct HlStorage(EthStorage);
|
pub struct HlStorage(EthStorage<TransactionSigned, HlHeader>);
|
||||||
|
|
||||||
impl HlStorage {
|
impl HlStorage {
|
||||||
fn write_precompile_calls<Provider>(
|
fn write_precompile_calls<Provider>(
|
||||||
@ -89,30 +87,17 @@ where
|
|||||||
let mut read_precompile_calls = Vec::with_capacity(bodies.len());
|
let mut read_precompile_calls = Vec::with_capacity(bodies.len());
|
||||||
|
|
||||||
for (block_number, body) in bodies {
|
for (block_number, body) in bodies {
|
||||||
match body {
|
let (inner_opt, extras) = match body {
|
||||||
Some(HlBlockBody {
|
Some(HlBlockBody {
|
||||||
inner,
|
inner,
|
||||||
sidecars: _,
|
sidecars: _,
|
||||||
read_precompile_calls: rpc,
|
read_precompile_calls,
|
||||||
highest_precompile_address,
|
highest_precompile_address,
|
||||||
}) => {
|
}) => (Some(inner), HlExtras { read_precompile_calls, highest_precompile_address }),
|
||||||
eth_bodies.push((block_number, Some(convert_to_eth_block_body(inner))));
|
None => Default::default(),
|
||||||
read_precompile_calls.push((
|
};
|
||||||
block_number,
|
eth_bodies.push((block_number, inner_opt));
|
||||||
HlExtras { read_precompile_calls: rpc, highest_precompile_address },
|
read_precompile_calls.push((block_number, extras));
|
||||||
));
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
eth_bodies.push((block_number, None));
|
|
||||||
read_precompile_calls.push((
|
|
||||||
block_number,
|
|
||||||
HlExtras {
|
|
||||||
read_precompile_calls: Default::default(),
|
|
||||||
highest_precompile_address: None,
|
|
||||||
},
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.0.write_block_bodies(provider, eth_bodies, write_to)?;
|
self.0.write_block_bodies(provider, eth_bodies, write_to)?;
|
||||||
@ -146,22 +131,16 @@ where
|
|||||||
inputs: Vec<ReadBodyInput<'_, Self::Block>>,
|
inputs: Vec<ReadBodyInput<'_, Self::Block>>,
|
||||||
) -> ProviderResult<Vec<HlBlockBody>> {
|
) -> ProviderResult<Vec<HlBlockBody>> {
|
||||||
let read_precompile_calls = self.read_precompile_calls(provider, &inputs)?;
|
let read_precompile_calls = self.read_precompile_calls(provider, &inputs)?;
|
||||||
let eth_bodies = self.0.read_block_bodies(
|
let inputs: Vec<(&<Self::Block as Block>::Header, _)> = inputs;
|
||||||
provider,
|
let eth_bodies = self.0.read_block_bodies(provider, inputs)?;
|
||||||
inputs
|
let eth_bodies: Vec<alloy_consensus::BlockBody<_, HlHeader>> = eth_bodies;
|
||||||
.into_iter()
|
|
||||||
.map(|(header, transactions)| {
|
|
||||||
(header, transactions.into_iter().map(|tx| tx.into_inner()).collect())
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// NOTE: sidecars are not used in HyperEVM yet.
|
// NOTE: sidecars are not used in HyperEVM yet.
|
||||||
Ok(eth_bodies
|
Ok(eth_bodies
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(read_precompile_calls)
|
.zip(read_precompile_calls)
|
||||||
.map(|(inner, extra)| HlBlockBody {
|
.map(|(inner, extra)| HlBlockBody {
|
||||||
inner: convert_to_hl_block_body(inner),
|
inner,
|
||||||
sidecars: None,
|
sidecars: None,
|
||||||
read_precompile_calls: extra.read_precompile_calls,
|
read_precompile_calls: extra.read_precompile_calls,
|
||||||
highest_precompile_address: extra.highest_precompile_address,
|
highest_precompile_address: extra.highest_precompile_address,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use alloy_primitives::{BlockNumber, Bytes};
|
use alloy_primitives::{BlockNumber, Bytes};
|
||||||
use reth_db::{table::TableInfo, tables, TableSet, TableType, TableViewer};
|
use reth_db::{TableSet, TableType, TableViewer, table::TableInfo, tables};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
tables! {
|
tables! {
|
||||||
|
|||||||
@ -2,16 +2,19 @@
|
|||||||
//!
|
//!
|
||||||
//! Changes:
|
//! Changes:
|
||||||
//! - ReadPrecompileCalls supports RLP encoding / decoding
|
//! - ReadPrecompileCalls supports RLP encoding / decoding
|
||||||
use alloy_primitives::{Address, Bytes, Log, B256};
|
use alloy_consensus::TxType;
|
||||||
|
use alloy_primitives::{Address, B256, Bytes, Log};
|
||||||
use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable};
|
use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable};
|
||||||
use bytes::BufMut;
|
use bytes::BufMut;
|
||||||
|
use reth_ethereum_primitives::EthereumReceipt;
|
||||||
|
use reth_primitives_traits::InMemorySize;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::HlBlock;
|
use crate::HlBlock;
|
||||||
|
|
||||||
pub type ReadPrecompileCall = (Address, Vec<(ReadPrecompileInput, ReadPrecompileResult)>);
|
pub type ReadPrecompileCall = (Address, Vec<(ReadPrecompileInput, ReadPrecompileResult)>);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Default)]
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Default, Hash)]
|
||||||
pub struct ReadPrecompileCalls(pub Vec<ReadPrecompileCall>);
|
pub struct ReadPrecompileCalls(pub Vec<ReadPrecompileCall>);
|
||||||
|
|
||||||
pub(crate) mod reth_compat;
|
pub(crate) mod reth_compat;
|
||||||
@ -22,6 +25,13 @@ pub struct HlExtras {
|
|||||||
pub highest_precompile_address: Option<Address>,
|
pub highest_precompile_address: Option<Address>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InMemorySize for HlExtras {
|
||||||
|
fn size(&self) -> usize {
|
||||||
|
self.read_precompile_calls.as_ref().map_or(0, |s| s.0.len()) +
|
||||||
|
self.highest_precompile_address.as_ref().map_or(0, |_| 20)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Encodable for ReadPrecompileCalls {
|
impl Encodable for ReadPrecompileCalls {
|
||||||
fn encode(&self, out: &mut dyn BufMut) {
|
fn encode(&self, out: &mut dyn BufMut) {
|
||||||
let buf: Bytes = rmp_serde::to_vec(&self.0).unwrap().into();
|
let buf: Bytes = rmp_serde::to_vec(&self.0).unwrap().into();
|
||||||
@ -56,6 +66,7 @@ impl BlockAndReceipts {
|
|||||||
self.read_precompile_calls.clone(),
|
self.read_precompile_calls.clone(),
|
||||||
self.highest_precompile_address,
|
self.highest_precompile_address,
|
||||||
self.system_txs.clone(),
|
self.system_txs.clone(),
|
||||||
|
self.receipts.clone(),
|
||||||
chain_id,
|
chain_id,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -84,6 +95,23 @@ pub struct LegacyReceipt {
|
|||||||
logs: Vec<Log>,
|
logs: Vec<Log>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<LegacyReceipt> for EthereumReceipt {
|
||||||
|
fn from(r: LegacyReceipt) -> Self {
|
||||||
|
EthereumReceipt {
|
||||||
|
tx_type: match r.tx_type {
|
||||||
|
LegacyTxType::Legacy => TxType::Legacy,
|
||||||
|
LegacyTxType::Eip2930 => TxType::Eip2930,
|
||||||
|
LegacyTxType::Eip1559 => TxType::Eip1559,
|
||||||
|
LegacyTxType::Eip4844 => TxType::Eip4844,
|
||||||
|
LegacyTxType::Eip7702 => TxType::Eip7702,
|
||||||
|
},
|
||||||
|
success: r.success,
|
||||||
|
cumulative_gas_used: r.cumulative_gas_used,
|
||||||
|
logs: r.logs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
|
||||||
enum LegacyTxType {
|
enum LegacyTxType {
|
||||||
Legacy = 0,
|
Legacy = 0,
|
||||||
@ -117,7 +145,7 @@ pub struct ReadPrecompileInput {
|
|||||||
pub gas_limit: u64,
|
pub gas_limit: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||||
pub enum ReadPrecompileResult {
|
pub enum ReadPrecompileResult {
|
||||||
Ok { gas_used: u64, bytes: Bytes },
|
Ok { gas_used: u64, bytes: Bytes },
|
||||||
OutOfGas,
|
OutOfGas,
|
||||||
|
|||||||
@ -10,12 +10,12 @@ use std::{
|
|||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
HlBlock, HlBlockBody, HlHeader,
|
||||||
node::{
|
node::{
|
||||||
primitives::TransactionSigned as TxSigned,
|
primitives::TransactionSigned as TxSigned,
|
||||||
spot_meta::{erc20_contract_to_spot_token, SpotId},
|
spot_meta::{SpotId, erc20_contract_to_spot_token},
|
||||||
types::{ReadPrecompileCalls, SystemTx},
|
types::{LegacyReceipt, ReadPrecompileCalls, SystemTx},
|
||||||
},
|
},
|
||||||
HlBlock, HlBlockBody,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A raw transaction.
|
/// A raw transaction.
|
||||||
@ -114,22 +114,36 @@ impl SealedBlock {
|
|||||||
read_precompile_calls: ReadPrecompileCalls,
|
read_precompile_calls: ReadPrecompileCalls,
|
||||||
highest_precompile_address: Option<Address>,
|
highest_precompile_address: Option<Address>,
|
||||||
system_txs: Vec<super::SystemTx>,
|
system_txs: Vec<super::SystemTx>,
|
||||||
|
receipts: Vec<LegacyReceipt>,
|
||||||
chain_id: u64,
|
chain_id: u64,
|
||||||
) -> HlBlock {
|
) -> HlBlock {
|
||||||
let mut merged_txs = vec![];
|
let mut merged_txs = vec![];
|
||||||
merged_txs.extend(system_txs.iter().map(|tx| system_tx_to_reth_transaction(tx, chain_id)));
|
merged_txs.extend(system_txs.iter().map(|tx| system_tx_to_reth_transaction(tx, chain_id)));
|
||||||
merged_txs.extend(self.body.transactions.iter().map(|tx| tx.to_reth_transaction()));
|
merged_txs.extend(self.body.transactions.iter().map(|tx| tx.to_reth_transaction()));
|
||||||
|
|
||||||
|
let mut merged_receipts = vec![];
|
||||||
|
merged_receipts.extend(system_txs.iter().map(|tx| tx.receipt.clone().unwrap().into()));
|
||||||
|
merged_receipts.extend(receipts.into_iter().map(From::from));
|
||||||
|
|
||||||
let block_body = HlBlockBody {
|
let block_body = HlBlockBody {
|
||||||
inner: reth_primitives::BlockBody {
|
inner: reth_primitives::BlockBody {
|
||||||
transactions: merged_txs,
|
transactions: merged_txs,
|
||||||
withdrawals: self.body.withdrawals.clone(),
|
withdrawals: self.body.withdrawals.clone(),
|
||||||
ommers: self.body.ommers.clone(),
|
ommers: vec![],
|
||||||
},
|
},
|
||||||
sidecars: None,
|
sidecars: None,
|
||||||
read_precompile_calls: Some(read_precompile_calls),
|
read_precompile_calls: Some(read_precompile_calls),
|
||||||
highest_precompile_address,
|
highest_precompile_address,
|
||||||
};
|
};
|
||||||
|
|
||||||
HlBlock { header: self.header.header.clone(), body: block_body }
|
let system_tx_count = system_txs.len() as u64;
|
||||||
|
HlBlock {
|
||||||
|
header: HlHeader::from_ethereum_header(
|
||||||
|
self.header.header.clone(),
|
||||||
|
&merged_receipts,
|
||||||
|
system_tx_count,
|
||||||
|
),
|
||||||
|
body: block_body,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -46,7 +46,7 @@ impl BlockSourceConfig {
|
|||||||
.expect("home dir not found")
|
.expect("home dir not found")
|
||||||
.join("hl")
|
.join("hl")
|
||||||
.join("data")
|
.join("data")
|
||||||
.join("evm_blocks_and_receipts"),
|
.join("evm_block_and_receipts"),
|
||||||
},
|
},
|
||||||
block_source_from_node: None,
|
block_source_from_node: None,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -37,6 +37,7 @@ pub async fn start_pseudo_peer(
|
|||||||
chain_spec: Arc<HlChainSpec>,
|
chain_spec: Arc<HlChainSpec>,
|
||||||
destination_peer: String,
|
destination_peer: String,
|
||||||
block_source: BlockSourceBoxed,
|
block_source: BlockSourceBoxed,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
) -> eyre::Result<()> {
|
) -> eyre::Result<()> {
|
||||||
let blockhash_cache = new_blockhash_cache();
|
let blockhash_cache = new_blockhash_cache();
|
||||||
|
|
||||||
@ -46,6 +47,7 @@ pub async fn start_pseudo_peer(
|
|||||||
destination_peer,
|
destination_peer,
|
||||||
block_source.clone(),
|
block_source.clone(),
|
||||||
blockhash_cache.clone(),
|
blockhash_cache.clone(),
|
||||||
|
debug_cutoff_height,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
use super::service::{BlockHashCache, BlockPoller};
|
use super::service::{BlockHashCache, BlockPoller};
|
||||||
use crate::{chainspec::HlChainSpec, node::network::HlNetworkPrimitives, HlPrimitives};
|
use crate::{HlPrimitives, chainspec::HlChainSpec, node::network::HlNetworkPrimitives};
|
||||||
use reth_network::{
|
use reth_network::{
|
||||||
config::{rng_secret_key, SecretKey},
|
|
||||||
NetworkConfig, NetworkManager, PeersConfig,
|
NetworkConfig, NetworkManager, PeersConfig,
|
||||||
|
config::{SecretKey, rng_secret_key},
|
||||||
};
|
};
|
||||||
use reth_network_peers::TrustedPeer;
|
use reth_network_peers::TrustedPeer;
|
||||||
use reth_provider::test_utils::NoopProvider;
|
use reth_provider::test_utils::NoopProvider;
|
||||||
@ -20,6 +20,7 @@ pub struct NetworkBuilder {
|
|||||||
discovery_port: u16,
|
discovery_port: u16,
|
||||||
listener_port: u16,
|
listener_port: u16,
|
||||||
chain_spec: HlChainSpec,
|
chain_spec: HlChainSpec,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for NetworkBuilder {
|
impl Default for NetworkBuilder {
|
||||||
@ -31,6 +32,7 @@ impl Default for NetworkBuilder {
|
|||||||
discovery_port: 0,
|
discovery_port: 0,
|
||||||
listener_port: 0,
|
listener_port: 0,
|
||||||
chain_spec: HlChainSpec::default(),
|
chain_spec: HlChainSpec::default(),
|
||||||
|
debug_cutoff_height: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -46,6 +48,11 @@ impl NetworkBuilder {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn with_debug_cutoff_height(mut self, debug_cutoff_height: Option<u64>) -> Self {
|
||||||
|
self.debug_cutoff_height = debug_cutoff_height;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn build<BS>(
|
pub async fn build<BS>(
|
||||||
self,
|
self,
|
||||||
block_source: Arc<Box<dyn super::sources::BlockSource>>,
|
block_source: Arc<Box<dyn super::sources::BlockSource>>,
|
||||||
@ -58,8 +65,12 @@ impl NetworkBuilder {
|
|||||||
.listener_addr(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), self.listener_port));
|
.listener_addr(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), self.listener_port));
|
||||||
let chain_id = self.chain_spec.inner.chain().id();
|
let chain_id = self.chain_spec.inner.chain().id();
|
||||||
|
|
||||||
let (block_poller, start_tx) =
|
let (block_poller, start_tx) = BlockPoller::new_suspended(
|
||||||
BlockPoller::new_suspended(chain_id, block_source, blockhash_cache);
|
chain_id,
|
||||||
|
block_source,
|
||||||
|
blockhash_cache,
|
||||||
|
self.debug_cutoff_height,
|
||||||
|
);
|
||||||
let config = builder.block_import(Box::new(block_poller)).build(Arc::new(NoopProvider::<
|
let config = builder.block_import(Box::new(block_poller)).build(Arc::new(NoopProvider::<
|
||||||
HlChainSpec,
|
HlChainSpec,
|
||||||
HlPrimitives,
|
HlPrimitives,
|
||||||
@ -77,10 +88,12 @@ pub async fn create_network_manager<BS>(
|
|||||||
destination_peer: String,
|
destination_peer: String,
|
||||||
block_source: Arc<Box<dyn super::sources::BlockSource>>,
|
block_source: Arc<Box<dyn super::sources::BlockSource>>,
|
||||||
blockhash_cache: BlockHashCache,
|
blockhash_cache: BlockHashCache,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
) -> eyre::Result<(NetworkManager<HlNetworkPrimitives>, mpsc::Sender<()>)> {
|
) -> eyre::Result<(NetworkManager<HlNetworkPrimitives>, mpsc::Sender<()>)> {
|
||||||
NetworkBuilder::default()
|
NetworkBuilder::default()
|
||||||
.with_boot_nodes(vec![TrustedPeer::from_str(&destination_peer).unwrap()])
|
.with_boot_nodes(vec![TrustedPeer::from_str(&destination_peer).unwrap()])
|
||||||
.with_chain_spec(chain_spec)
|
.with_chain_spec(chain_spec)
|
||||||
|
.with_debug_cutoff_height(debug_cutoff_height)
|
||||||
.build::<BS>(block_source, blockhash_cache)
|
.build::<BS>(block_source, blockhash_cache)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|||||||
@ -52,12 +52,12 @@ impl BlockPoller {
|
|||||||
chain_id: u64,
|
chain_id: u64,
|
||||||
block_source: BS,
|
block_source: BS,
|
||||||
blockhash_cache: BlockHashCache,
|
blockhash_cache: BlockHashCache,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
) -> (Self, mpsc::Sender<()>) {
|
) -> (Self, mpsc::Sender<()>) {
|
||||||
let block_source = Arc::new(block_source);
|
let block_source = Arc::new(block_source);
|
||||||
let (start_tx, start_rx) = mpsc::channel(1);
|
let (start_tx, start_rx) = mpsc::channel(1);
|
||||||
let (block_tx, block_rx) = mpsc::channel(100);
|
let (block_tx, block_rx) = mpsc::channel(100);
|
||||||
let block_tx_clone = block_tx.clone();
|
let task = tokio::spawn(Self::task(start_rx, block_source, block_tx, debug_cutoff_height));
|
||||||
let task = tokio::spawn(Self::task(start_rx, block_source, block_tx_clone));
|
|
||||||
(Self { chain_id, block_rx, task, blockhash_cache: blockhash_cache.clone() }, start_tx)
|
(Self { chain_id, block_rx, task, blockhash_cache: blockhash_cache.clone() }, start_tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,7 +69,8 @@ impl BlockPoller {
|
|||||||
async fn task<BS: BlockSource>(
|
async fn task<BS: BlockSource>(
|
||||||
mut start_rx: mpsc::Receiver<()>,
|
mut start_rx: mpsc::Receiver<()>,
|
||||||
block_source: Arc<BS>,
|
block_source: Arc<BS>,
|
||||||
block_tx_clone: mpsc::Sender<(u64, BlockAndReceipts)>,
|
block_tx: mpsc::Sender<(u64, BlockAndReceipts)>,
|
||||||
|
debug_cutoff_height: Option<u64>,
|
||||||
) -> eyre::Result<()> {
|
) -> eyre::Result<()> {
|
||||||
start_rx.recv().await.ok_or(eyre::eyre!("Failed to receive start signal"))?;
|
start_rx.recv().await.ok_or(eyre::eyre!("Failed to receive start signal"))?;
|
||||||
info!("Starting block poller");
|
info!("Starting block poller");
|
||||||
@ -81,9 +82,15 @@ impl BlockPoller {
|
|||||||
.ok_or(eyre::eyre!("Failed to find latest block number"))?;
|
.ok_or(eyre::eyre!("Failed to find latest block number"))?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
if let Some(debug_cutoff_height) = debug_cutoff_height
|
||||||
|
&& next_block_number > debug_cutoff_height
|
||||||
|
{
|
||||||
|
next_block_number = debug_cutoff_height;
|
||||||
|
}
|
||||||
|
|
||||||
match block_source.collect_block(next_block_number).await {
|
match block_source.collect_block(next_block_number).await {
|
||||||
Ok(block) => {
|
Ok(block) => {
|
||||||
block_tx_clone.send((next_block_number, block)).await?;
|
block_tx.send((next_block_number, block)).await?;
|
||||||
next_block_number += 1;
|
next_block_number += 1;
|
||||||
}
|
}
|
||||||
Err(_) => tokio::time::sleep(polling_interval).await,
|
Err(_) => tokio::time::sleep(polling_interval).await,
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
use super::{BlockSource, BlockSourceBoxed};
|
use super::{BlockSource, BlockSourceBoxed};
|
||||||
use crate::node::types::BlockAndReceipts;
|
use crate::node::types::BlockAndReceipts;
|
||||||
use futures::{future::BoxFuture, FutureExt};
|
use futures::{FutureExt, future::BoxFuture};
|
||||||
use reth_network::cache::LruMap;
|
use reth_network::cache::LruMap;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
|
|||||||
@ -27,7 +27,7 @@ impl LocalBlocksCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_block(&mut self, height: u64) -> Option<BlockAndReceipts> {
|
pub fn get_block(&mut self, height: u64) -> Option<BlockAndReceipts> {
|
||||||
self.cache.remove(&height)
|
self.cache.get(&height).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_path_for_height(&self, height: u64) -> Option<PathBuf> {
|
pub fn get_path_for_height(&self, height: u64) -> Option<PathBuf> {
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use super::{scan::Scanner, time_utils::TimeUtils, HOURLY_SUBDIR};
|
use super::{HOURLY_SUBDIR, scan::Scanner, time_utils::TimeUtils};
|
||||||
use crate::node::types::BlockAndReceipts;
|
use crate::node::types::BlockAndReceipts;
|
||||||
use std::{
|
use std::{
|
||||||
fs::File,
|
fs::File,
|
||||||
|
|||||||
@ -8,12 +8,13 @@ mod time_utils;
|
|||||||
use self::{
|
use self::{
|
||||||
cache::LocalBlocksCache,
|
cache::LocalBlocksCache,
|
||||||
file_ops::FileOperations,
|
file_ops::FileOperations,
|
||||||
scan::{ScanOptions, Scanner},
|
scan::{LineStream, ScanOptions, Scanner},
|
||||||
time_utils::TimeUtils,
|
time_utils::TimeUtils,
|
||||||
};
|
};
|
||||||
use super::{BlockSource, BlockSourceBoxed};
|
use super::{BlockSource, BlockSourceBoxed};
|
||||||
use crate::node::types::BlockAndReceipts;
|
use crate::node::types::BlockAndReceipts;
|
||||||
use futures::future::BoxFuture;
|
use futures::future::BoxFuture;
|
||||||
|
use reth_metrics::{Metrics, metrics, metrics::Counter};
|
||||||
use std::{
|
use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
@ -41,6 +42,18 @@ pub struct HlNodeBlockSource {
|
|||||||
pub local_blocks_cache: Arc<Mutex<LocalBlocksCache>>,
|
pub local_blocks_cache: Arc<Mutex<LocalBlocksCache>>,
|
||||||
pub last_local_fetch: Arc<Mutex<Option<(u64, OffsetDateTime)>>>,
|
pub last_local_fetch: Arc<Mutex<Option<(u64, OffsetDateTime)>>>,
|
||||||
pub args: HlNodeBlockSourceArgs,
|
pub args: HlNodeBlockSourceArgs,
|
||||||
|
pub metrics: HlNodeBlockSourceMetrics,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Metrics, Clone)]
|
||||||
|
#[metrics(scope = "block_source.hl_node")]
|
||||||
|
pub struct HlNodeBlockSourceMetrics {
|
||||||
|
/// How many times the HL node block source is polling for a block
|
||||||
|
pub fetched_from_hl_node: Counter,
|
||||||
|
/// How many times the HL node block source is fetched from the fallback
|
||||||
|
pub fetched_from_fallback: Counter,
|
||||||
|
/// How many times `try_collect_local_block` was faster than ingest loop
|
||||||
|
pub file_read_triggered: Counter,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockSource for HlNodeBlockSource {
|
impl BlockSource for HlNodeBlockSource {
|
||||||
@ -49,11 +62,15 @@ impl BlockSource for HlNodeBlockSource {
|
|||||||
let args = self.args.clone();
|
let args = self.args.clone();
|
||||||
let local_blocks_cache = self.local_blocks_cache.clone();
|
let local_blocks_cache = self.local_blocks_cache.clone();
|
||||||
let last_local_fetch = self.last_local_fetch.clone();
|
let last_local_fetch = self.last_local_fetch.clone();
|
||||||
|
let metrics = self.metrics.clone();
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let now = OffsetDateTime::now_utc();
|
let now = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
if let Some(block) = Self::try_collect_local_block(local_blocks_cache, height).await {
|
if let Some(block) =
|
||||||
|
Self::try_collect_local_block(&metrics, local_blocks_cache, height).await
|
||||||
|
{
|
||||||
Self::update_last_fetch(last_local_fetch, height, now).await;
|
Self::update_last_fetch(last_local_fetch, height, now).await;
|
||||||
|
metrics.fetched_from_hl_node.increment(1);
|
||||||
return Ok(block);
|
return Ok(block);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,12 +79,13 @@ impl BlockSource for HlNodeBlockSource {
|
|||||||
let too_soon = now - last_poll_time < args.fallback_threshold;
|
let too_soon = now - last_poll_time < args.fallback_threshold;
|
||||||
if more_recent && too_soon {
|
if more_recent && too_soon {
|
||||||
return Err(eyre::eyre!(
|
return Err(eyre::eyre!(
|
||||||
"Not found locally; limiting polling rate before fallback so that hl-node has chance to catch up"
|
"Not found locally; limiting polling rate before fallback so that hl-node has chance to catch up"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let block = fallback.collect_block(height).await?;
|
let block = fallback.collect_block(height).await?;
|
||||||
|
metrics.fetched_from_fallback.increment(1);
|
||||||
Self::update_last_fetch(last_local_fetch, height, now).await;
|
Self::update_last_fetch(last_local_fetch, height, now).await;
|
||||||
Ok(block)
|
Ok(block)
|
||||||
})
|
})
|
||||||
@ -106,6 +124,28 @@ impl BlockSource for HlNodeBlockSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct CurrentFile {
|
||||||
|
path: PathBuf,
|
||||||
|
line_stream: Option<LineStream>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CurrentFile {
|
||||||
|
pub fn from_datetime(dt: OffsetDateTime, root: &Path) -> Self {
|
||||||
|
let (hour, day_str) = (dt.hour(), TimeUtils::date_from_datetime(dt));
|
||||||
|
let path = root.join(HOURLY_SUBDIR).join(&day_str).join(format!("{}", hour));
|
||||||
|
Self { path, line_stream: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open(&mut self) -> eyre::Result<()> {
|
||||||
|
if self.line_stream.is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.line_stream = Some(LineStream::from_path(&self.path)?);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl HlNodeBlockSource {
|
impl HlNodeBlockSource {
|
||||||
async fn update_last_fetch(
|
async fn update_last_fetch(
|
||||||
last_local_fetch: Arc<Mutex<Option<(u64, OffsetDateTime)>>>,
|
last_local_fetch: Arc<Mutex<Option<(u64, OffsetDateTime)>>>,
|
||||||
@ -119,6 +159,7 @@ impl HlNodeBlockSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn try_collect_local_block(
|
async fn try_collect_local_block(
|
||||||
|
metrics: &HlNodeBlockSourceMetrics,
|
||||||
local_blocks_cache: Arc<Mutex<LocalBlocksCache>>,
|
local_blocks_cache: Arc<Mutex<LocalBlocksCache>>,
|
||||||
height: u64,
|
height: u64,
|
||||||
) -> Option<BlockAndReceipts> {
|
) -> Option<BlockAndReceipts> {
|
||||||
@ -128,9 +169,10 @@ impl HlNodeBlockSource {
|
|||||||
}
|
}
|
||||||
let path = u_cache.get_path_for_height(height)?;
|
let path = u_cache.get_path_for_height(height)?;
|
||||||
info!("Loading block data from {:?}", path);
|
info!("Loading block data from {:?}", path);
|
||||||
|
metrics.file_read_triggered.increment(1);
|
||||||
|
let mut line_stream = LineStream::from_path(&path).ok()?;
|
||||||
let scan_result = Scanner::scan_hour_file(
|
let scan_result = Scanner::scan_hour_file(
|
||||||
&path,
|
&mut line_stream,
|
||||||
&mut 0,
|
|
||||||
ScanOptions { start_height: 0, only_load_ranges: false },
|
ScanOptions { start_height: 0, only_load_ranges: false },
|
||||||
);
|
);
|
||||||
u_cache.load_scan_result(scan_result);
|
u_cache.load_scan_result(scan_result);
|
||||||
@ -151,9 +193,10 @@ impl HlNodeBlockSource {
|
|||||||
} else {
|
} else {
|
||||||
warn!("Failed to parse last line of file: {:?}", subfile);
|
warn!("Failed to parse last line of file: {:?}", subfile);
|
||||||
}
|
}
|
||||||
|
let mut line_stream =
|
||||||
|
LineStream::from_path(&subfile).expect("Failed to open line stream");
|
||||||
let mut scan_result = Scanner::scan_hour_file(
|
let mut scan_result = Scanner::scan_hour_file(
|
||||||
&subfile,
|
&mut line_stream,
|
||||||
&mut 0,
|
|
||||||
ScanOptions { start_height: cutoff_height, only_load_ranges: true },
|
ScanOptions { start_height: cutoff_height, only_load_ranges: true },
|
||||||
);
|
);
|
||||||
scan_result.new_blocks.clear(); // Only store ranges, load data lazily
|
scan_result.new_blocks.clear(); // Only store ranges, load data lazily
|
||||||
@ -174,15 +217,13 @@ impl HlNodeBlockSource {
|
|||||||
}
|
}
|
||||||
tokio::time::sleep(TAIL_INTERVAL).await;
|
tokio::time::sleep(TAIL_INTERVAL).await;
|
||||||
};
|
};
|
||||||
let (mut hour, mut day_str, mut last_line) =
|
let mut current_file = CurrentFile::from_datetime(dt, &root);
|
||||||
(dt.hour(), TimeUtils::date_from_datetime(dt), 0);
|
|
||||||
info!("Starting local ingest loop from height: {}", current_head);
|
info!("Starting local ingest loop from height: {}", current_head);
|
||||||
loop {
|
loop {
|
||||||
let hour_file = root.join(HOURLY_SUBDIR).join(&day_str).join(format!("{hour}"));
|
let _ = current_file.open();
|
||||||
if hour_file.exists() {
|
if let Some(line_stream) = &mut current_file.line_stream {
|
||||||
let scan_result = Scanner::scan_hour_file(
|
let scan_result = Scanner::scan_hour_file(
|
||||||
&hour_file,
|
line_stream,
|
||||||
&mut last_line,
|
|
||||||
ScanOptions { start_height: next_height, only_load_ranges: false },
|
ScanOptions { start_height: next_height, only_load_ranges: false },
|
||||||
);
|
);
|
||||||
next_height = scan_result.next_expected_height;
|
next_height = scan_result.next_expected_height;
|
||||||
@ -191,11 +232,8 @@ impl HlNodeBlockSource {
|
|||||||
let now = OffsetDateTime::now_utc();
|
let now = OffsetDateTime::now_utc();
|
||||||
if dt + ONE_HOUR < now {
|
if dt + ONE_HOUR < now {
|
||||||
dt += ONE_HOUR;
|
dt += ONE_HOUR;
|
||||||
(hour, day_str, last_line) = (dt.hour(), TimeUtils::date_from_datetime(dt), 0);
|
current_file = CurrentFile::from_datetime(dt, &root);
|
||||||
info!(
|
info!("Moving to new file: {:?}", current_file.path);
|
||||||
"Moving to new file: {:?}",
|
|
||||||
root.join(HOURLY_SUBDIR).join(&day_str).join(format!("{hour}"))
|
|
||||||
);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
tokio::time::sleep(TAIL_INTERVAL).await;
|
tokio::time::sleep(TAIL_INTERVAL).await;
|
||||||
@ -224,6 +262,7 @@ impl HlNodeBlockSource {
|
|||||||
args,
|
args,
|
||||||
local_blocks_cache: Arc::new(Mutex::new(LocalBlocksCache::new(CACHE_SIZE))),
|
local_blocks_cache: Arc::new(Mutex::new(LocalBlocksCache::new(CACHE_SIZE))),
|
||||||
last_local_fetch: Arc::new(Mutex::new(None)),
|
last_local_fetch: Arc::new(Mutex::new(None)),
|
||||||
|
metrics: HlNodeBlockSourceMetrics::default(),
|
||||||
};
|
};
|
||||||
block_source.run(next_block_number).await.unwrap();
|
block_source.run(next_block_number).await.unwrap();
|
||||||
block_source
|
block_source
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use crate::node::types::{BlockAndReceipts, EvmBlock};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
fs::File,
|
fs::File,
|
||||||
io::{BufRead, BufReader},
|
io::{BufRead, BufReader, Seek, SeekFrom},
|
||||||
ops::RangeInclusive,
|
ops::RangeInclusive,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
@ -25,6 +25,57 @@ pub struct ScanOptions {
|
|||||||
|
|
||||||
pub struct Scanner;
|
pub struct Scanner;
|
||||||
|
|
||||||
|
/// Stream for sequentially reading lines from a file.
|
||||||
|
///
|
||||||
|
/// This struct allows sequential iteration over lines over [Self::next] method.
|
||||||
|
/// It is resilient to cases where the line producer process is interrupted while writing:
|
||||||
|
/// - If a line is incomplete but still ends with a line ending, it is skipped: later, the fallback
|
||||||
|
/// block source will be used to retrieve the missing block.
|
||||||
|
/// - If a line does not end with a newline (i.e., the write was incomplete), the method returns
|
||||||
|
/// `None` to break out of the loop and avoid reading partial data.
|
||||||
|
/// - If a temporary I/O error occurs, the stream exits the loop without rewinding the cursor, which
|
||||||
|
/// will result in skipping ahead to the next unread bytes.
|
||||||
|
pub struct LineStream {
|
||||||
|
path: PathBuf,
|
||||||
|
reader: BufReader<File>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LineStream {
|
||||||
|
pub fn from_path(path: &Path) -> std::io::Result<Self> {
|
||||||
|
let reader = BufReader::with_capacity(1024 * 1024, File::open(path)?);
|
||||||
|
Ok(Self { path: path.to_path_buf(), reader })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(&mut self) -> Option<String> {
|
||||||
|
let mut line_buffer = vec![];
|
||||||
|
let Ok(size) = self.reader.read_until(b'\n', &mut line_buffer) else {
|
||||||
|
// Temporary I/O error; restart the loop
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Now cursor is right after the end of the line
|
||||||
|
// On UTF-8 error, skip the line
|
||||||
|
let Ok(mut line) = String::from_utf8(line_buffer) else {
|
||||||
|
return Some(String::new());
|
||||||
|
};
|
||||||
|
|
||||||
|
// If line is not completed yet, return None so that we can break the loop
|
||||||
|
if line.ends_with('\n') {
|
||||||
|
if line.ends_with('\r') {
|
||||||
|
line.pop();
|
||||||
|
}
|
||||||
|
line.pop();
|
||||||
|
return Some(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
// info!("Line is not completed yet: {}", line);
|
||||||
|
if size != 0 {
|
||||||
|
self.reader.seek(SeekFrom::Current(-(size as i64))).unwrap();
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Scanner {
|
impl Scanner {
|
||||||
pub fn line_to_evm_block(line: &str) -> serde_json::Result<(BlockAndReceipts, u64)> {
|
pub fn line_to_evm_block(line: &str) -> serde_json::Result<(BlockAndReceipts, u64)> {
|
||||||
let LocalBlockAndReceipts(_, parsed_block): LocalBlockAndReceipts =
|
let LocalBlockAndReceipts(_, parsed_block): LocalBlockAndReceipts =
|
||||||
@ -35,31 +86,20 @@ impl Scanner {
|
|||||||
Ok((parsed_block, height))
|
Ok((parsed_block, height))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scan_hour_file(path: &Path, last_line: &mut usize, options: ScanOptions) -> ScanResult {
|
pub fn scan_hour_file(line_stream: &mut LineStream, options: ScanOptions) -> ScanResult {
|
||||||
let lines: Vec<String> =
|
|
||||||
BufReader::new(File::open(path).expect("Failed to open hour file"))
|
|
||||||
.lines()
|
|
||||||
.collect::<Result<_, _>>()
|
|
||||||
.unwrap();
|
|
||||||
let skip = if *last_line == 0 { 0 } else { *last_line - 1 };
|
|
||||||
let mut new_blocks = Vec::new();
|
let mut new_blocks = Vec::new();
|
||||||
let mut last_height = options.start_height;
|
let mut last_height = options.start_height;
|
||||||
let mut block_ranges = Vec::new();
|
let mut block_ranges = Vec::new();
|
||||||
let mut current_range: Option<(u64, u64)> = None;
|
let mut current_range: Option<(u64, u64)> = None;
|
||||||
|
|
||||||
for (line_idx, line) in lines.iter().enumerate().skip(skip) {
|
while let Some(line) = line_stream.next() {
|
||||||
if line_idx < *last_line || line.trim().is_empty() {
|
match Self::line_to_evm_block(&line) {
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match Self::line_to_evm_block(line) {
|
|
||||||
Ok((parsed_block, height)) => {
|
Ok((parsed_block, height)) => {
|
||||||
if height >= options.start_height {
|
if height >= options.start_height {
|
||||||
last_height = last_height.max(height);
|
last_height = last_height.max(height);
|
||||||
if !options.only_load_ranges {
|
if !options.only_load_ranges {
|
||||||
new_blocks.push(parsed_block);
|
new_blocks.push(parsed_block);
|
||||||
}
|
}
|
||||||
*last_line = line_idx;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match current_range {
|
match current_range {
|
||||||
@ -74,16 +114,17 @@ impl Scanner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => warn!("Failed to parse line: {}...", line.get(0..50).unwrap_or(line)),
|
Err(_) => warn!("Failed to parse line: {}...", line.get(0..50).unwrap_or(&line)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((start, end)) = current_range {
|
if let Some((start, end)) = current_range {
|
||||||
block_ranges.push(start..=end);
|
block_ranges.push(start..=end);
|
||||||
}
|
}
|
||||||
|
|
||||||
ScanResult {
|
ScanResult {
|
||||||
path: path.to_path_buf(),
|
path: line_stream.path.clone(),
|
||||||
next_expected_height: last_height + 1,
|
next_expected_height: last_height + current_range.is_some() as u64,
|
||||||
new_blocks,
|
new_blocks,
|
||||||
new_block_ranges: block_ranges,
|
new_block_ranges: block_ranges,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
node::types::{reth_compat, ReadPrecompileCalls},
|
node::types::{ReadPrecompileCalls, reth_compat},
|
||||||
pseudo_peer::sources::{hl_node::scan::LocalBlockAndReceipts, LocalBlockSource},
|
pseudo_peer::sources::{LocalBlockSource, hl_node::scan::LocalBlockAndReceipts},
|
||||||
};
|
};
|
||||||
use alloy_consensus::{BlockBody, Header};
|
use alloy_consensus::{BlockBody, Header};
|
||||||
use alloy_primitives::{Address, Bloom, Bytes, B256, B64, U256};
|
use alloy_primitives::{Address, B64, B256, Bloom, Bytes, U256};
|
||||||
use std::{io::Write, time::Duration};
|
use std::{io::Write, time::Duration};
|
||||||
|
|
||||||
const DEFAULT_FALLBACK_THRESHOLD_FOR_TEST: Duration = Duration::from_millis(5000);
|
const DEFAULT_FALLBACK_THRESHOLD_FOR_TEST: Duration = Duration::from_millis(5000);
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use time::{macros::format_description, Date, OffsetDateTime, Time};
|
use time::{Date, OffsetDateTime, Time, macros::format_description};
|
||||||
|
|
||||||
pub struct TimeUtils;
|
pub struct TimeUtils;
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
use super::{utils, BlockSource};
|
use super::{BlockSource, utils};
|
||||||
use crate::node::types::BlockAndReceipts;
|
use crate::node::types::BlockAndReceipts;
|
||||||
use eyre::Context;
|
use eyre::Context;
|
||||||
use futures::{future::BoxFuture, FutureExt};
|
use futures::{FutureExt, future::BoxFuture};
|
||||||
|
use reth_metrics::{Metrics, metrics, metrics::Counter};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
@ -9,11 +10,21 @@ use tracing::info;
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct LocalBlockSource {
|
pub struct LocalBlockSource {
|
||||||
dir: PathBuf,
|
dir: PathBuf,
|
||||||
|
metrics: LocalBlockSourceMetrics,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Metrics, Clone)]
|
||||||
|
#[metrics(scope = "block_source.local")]
|
||||||
|
pub struct LocalBlockSourceMetrics {
|
||||||
|
/// How many times the local block source is polling for a block
|
||||||
|
pub polling_attempt: Counter,
|
||||||
|
/// How many times the local block source is fetched from the local filesystem
|
||||||
|
pub fetched: Counter,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LocalBlockSource {
|
impl LocalBlockSource {
|
||||||
pub fn new(dir: impl Into<PathBuf>) -> Self {
|
pub fn new(dir: impl Into<PathBuf>) -> Self {
|
||||||
Self { dir: dir.into() }
|
Self { dir: dir.into(), metrics: LocalBlockSourceMetrics::default() }
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn pick_path_with_highest_number(dir: PathBuf, is_dir: bool) -> Option<(u64, String)> {
|
async fn pick_path_with_highest_number(dir: PathBuf, is_dir: bool) -> Option<(u64, String)> {
|
||||||
@ -31,13 +42,17 @@ impl LocalBlockSource {
|
|||||||
impl BlockSource for LocalBlockSource {
|
impl BlockSource for LocalBlockSource {
|
||||||
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
|
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
|
||||||
let dir = self.dir.clone();
|
let dir = self.dir.clone();
|
||||||
|
let metrics = self.metrics.clone();
|
||||||
async move {
|
async move {
|
||||||
let path = dir.join(utils::rmp_path(height));
|
let path = dir.join(utils::rmp_path(height));
|
||||||
|
metrics.polling_attempt.increment(1);
|
||||||
|
|
||||||
let file = tokio::fs::read(&path)
|
let file = tokio::fs::read(&path)
|
||||||
.await
|
.await
|
||||||
.wrap_err_with(|| format!("Failed to read block from {path:?}"))?;
|
.wrap_err_with(|| format!("Failed to read block from {path:?}"))?;
|
||||||
let mut decoder = lz4_flex::frame::FrameDecoder::new(&file[..]);
|
let mut decoder = lz4_flex::frame::FrameDecoder::new(&file[..]);
|
||||||
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;
|
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;
|
||||||
|
metrics.fetched.increment(1);
|
||||||
Ok(blocks[0].clone())
|
Ok(blocks[0].clone())
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
use super::{utils, BlockSource};
|
use super::{BlockSource, utils};
|
||||||
use crate::node::types::BlockAndReceipts;
|
use crate::node::types::BlockAndReceipts;
|
||||||
use aws_sdk_s3::types::RequestPayer;
|
use aws_sdk_s3::types::RequestPayer;
|
||||||
use futures::{future::BoxFuture, FutureExt};
|
use futures::{FutureExt, future::BoxFuture};
|
||||||
|
use reth_metrics::{Metrics, metrics, metrics::Counter};
|
||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
@ -11,11 +12,26 @@ pub struct S3BlockSource {
|
|||||||
client: Arc<aws_sdk_s3::Client>,
|
client: Arc<aws_sdk_s3::Client>,
|
||||||
bucket: String,
|
bucket: String,
|
||||||
polling_interval: Duration,
|
polling_interval: Duration,
|
||||||
|
metrics: S3BlockSourceMetrics,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Metrics, Clone)]
|
||||||
|
#[metrics(scope = "block_source.s3")]
|
||||||
|
pub struct S3BlockSourceMetrics {
|
||||||
|
/// How many times the S3 block source is polling for a block
|
||||||
|
pub polling_attempt: Counter,
|
||||||
|
/// How many times the S3 block source has polled a block
|
||||||
|
pub fetched: Counter,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl S3BlockSource {
|
impl S3BlockSource {
|
||||||
pub fn new(client: aws_sdk_s3::Client, bucket: String, polling_interval: Duration) -> Self {
|
pub fn new(client: aws_sdk_s3::Client, bucket: String, polling_interval: Duration) -> Self {
|
||||||
Self { client: client.into(), bucket, polling_interval }
|
Self {
|
||||||
|
client: client.into(),
|
||||||
|
bucket,
|
||||||
|
polling_interval,
|
||||||
|
metrics: S3BlockSourceMetrics::default(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn pick_path_with_highest_number(
|
async fn pick_path_with_highest_number(
|
||||||
@ -52,14 +68,18 @@ impl BlockSource for S3BlockSource {
|
|||||||
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
|
fn collect_block(&self, height: u64) -> BoxFuture<'static, eyre::Result<BlockAndReceipts>> {
|
||||||
let client = self.client.clone();
|
let client = self.client.clone();
|
||||||
let bucket = self.bucket.clone();
|
let bucket = self.bucket.clone();
|
||||||
|
let metrics = self.metrics.clone();
|
||||||
async move {
|
async move {
|
||||||
let path = utils::rmp_path(height);
|
let path = utils::rmp_path(height);
|
||||||
|
metrics.polling_attempt.increment(1);
|
||||||
|
|
||||||
let request = client
|
let request = client
|
||||||
.get_object()
|
.get_object()
|
||||||
.request_payer(RequestPayer::Requester)
|
.request_payer(RequestPayer::Requester)
|
||||||
.bucket(&bucket)
|
.bucket(&bucket)
|
||||||
.key(path);
|
.key(path);
|
||||||
let response = request.send().await?;
|
let response = request.send().await?;
|
||||||
|
metrics.fetched.increment(1);
|
||||||
let bytes = response.body.collect().await?.into_bytes();
|
let bytes = response.body.collect().await?.into_bytes();
|
||||||
let mut decoder = lz4_flex::frame::FrameDecoder::new(&bytes[..]);
|
let mut decoder = lz4_flex::frame::FrameDecoder::new(&bytes[..]);
|
||||||
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;
|
let blocks: Vec<BlockAndReceipts> = rmp_serde::from_read(&mut decoder)?;
|
||||||
|
|||||||
35
src/version.rs
Normal file
35
src/version.rs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use reth_node_core::version::{RethCliVersionConsts, try_init_version_metadata};
|
||||||
|
|
||||||
|
pub fn init_reth_hl_version() {
|
||||||
|
let cargo_pkg_version = env!("CARGO_PKG_VERSION").to_string();
|
||||||
|
|
||||||
|
let short = env!("RETH_HL_SHORT_VERSION").to_string();
|
||||||
|
let long = format!(
|
||||||
|
"{}\n{}\n{}\n{}\n{}",
|
||||||
|
env!("RETH_HL_LONG_VERSION_0"),
|
||||||
|
env!("RETH_HL_LONG_VERSION_1"),
|
||||||
|
env!("RETH_HL_LONG_VERSION_2"),
|
||||||
|
env!("RETH_HL_LONG_VERSION_3"),
|
||||||
|
env!("RETH_HL_LONG_VERSION_4"),
|
||||||
|
);
|
||||||
|
let p2p = env!("RETH_HL_P2P_CLIENT_VERSION").to_string();
|
||||||
|
|
||||||
|
let meta = RethCliVersionConsts {
|
||||||
|
name_client: Cow::Borrowed("reth_hl"),
|
||||||
|
cargo_pkg_version: Cow::Owned(cargo_pkg_version.clone()),
|
||||||
|
vergen_git_sha_long: Cow::Owned(env!("VERGEN_GIT_SHA").to_string()),
|
||||||
|
vergen_git_sha: Cow::Owned(env!("VERGEN_GIT_SHA_SHORT").to_string()),
|
||||||
|
vergen_build_timestamp: Cow::Owned(env!("VERGEN_BUILD_TIMESTAMP").to_string()),
|
||||||
|
vergen_cargo_target_triple: Cow::Owned(env!("VERGEN_CARGO_TARGET_TRIPLE").to_string()),
|
||||||
|
vergen_cargo_features: Cow::Owned(env!("VERGEN_CARGO_FEATURES").to_string()),
|
||||||
|
short_version: Cow::Owned(short),
|
||||||
|
long_version: Cow::Owned(long),
|
||||||
|
build_profile_name: Cow::Owned(env!("RETH_HL_BUILD_PROFILE").to_string()),
|
||||||
|
p2p_client_version: Cow::Owned(p2p),
|
||||||
|
extra_data: Cow::Owned(format!("reth_hl/v{}/{}", cargo_pkg_version, std::env::consts::OS)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = try_init_version_metadata(meta);
|
||||||
|
}
|
||||||
49
tests/run_tests.sh
Normal file
49
tests/run_tests.sh
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
export ETH_RPC_URL="${ETH_RPC_URL:-wss://hl-archive-node.xyz}"
|
||||||
|
|
||||||
|
success() {
|
||||||
|
echo "Success: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
fail() {
|
||||||
|
echo "Failed: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
ensure_cmd() {
|
||||||
|
command -v "$1" > /dev/null 2>&1 || fail "$1 is required"
|
||||||
|
}
|
||||||
|
|
||||||
|
ensure_cmd jq
|
||||||
|
ensure_cmd cast
|
||||||
|
ensure_cmd wscat
|
||||||
|
|
||||||
|
if [[ ! "$ETH_RPC_URL" =~ ^wss?:// ]]; then
|
||||||
|
fail "ETH_RPC_URL must be a websocket url"
|
||||||
|
fi
|
||||||
|
|
||||||
|
TITLE="Issue #78 - eth_getLogs should return system transactions"
|
||||||
|
cast logs \
|
||||||
|
--rpc-url "$ETH_RPC_URL" \
|
||||||
|
--from-block 15312567 \
|
||||||
|
--to-block 15312570 \
|
||||||
|
--address 0x9fdbda0a5e284c32744d2f17ee5c74b284993463 \
|
||||||
|
0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef \
|
||||||
|
| grep -q "0x00000000000000000000000020000000000000000000000000000000000000c5" \
|
||||||
|
&& success "$TITLE" || fail "$TITLE"
|
||||||
|
|
||||||
|
TITLE="Issue #78 - eth_getBlockByNumber should return the same logsBloom as official RPC"
|
||||||
|
OFFICIAL_RPC="https://rpc.hyperliquid.xyz/evm"
|
||||||
|
A=$(cast block 1394092 --rpc-url "$ETH_RPC_URL" -f logsBloom | md5sum)
|
||||||
|
B=$(cast block 1394092 --rpc-url "$OFFICIAL_RPC" -f logsBloom | md5sum)
|
||||||
|
echo node "$A"
|
||||||
|
echo rpc\ "$B"
|
||||||
|
[[ "$A" == "$B" ]] && success "$TITLE" || fail "$TITLE"
|
||||||
|
|
||||||
|
TITLE="eth_subscribe newHeads via wscat"
|
||||||
|
CMD='{"jsonrpc":"2.0","id":1,"method":"eth_subscribe","params":["newHeads"]}'
|
||||||
|
wscat -w 2 -c "$ETH_RPC_URL" -x "$CMD" | tail -1 | jq -r .params.result.nonce | grep 0x \
|
||||||
|
&& success "$TITLE" || fail "$TITLE"
|
||||||
Reference in New Issue
Block a user