Compare commits

..

No commits in common. "main" and "pyth-price-pusher-v6.4.0" have entirely different histories.

395 changed files with 12554 additions and 44962 deletions

View File

@ -15,4 +15,5 @@
.git .git
!apps/hermes/src/state/cache.rs hermes/wormhole
!hermes/src/state/cache.rs

View File

@ -21,10 +21,10 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Download CLI - name: Download CLI
run: wget https://github.com/aptos-labs/aptos-core/releases/download/aptos-cli-v3.1.0/aptos-cli-3.1.0-Ubuntu-22.04-x86_64.zip run: wget https://github.com/aptos-labs/aptos-core/releases/download/aptos-cli-v1.0.4/aptos-cli-1.0.4-Ubuntu-22.04-x86_64.zip
- name: Unzip CLI - name: Unzip CLI
run: unzip aptos-cli-3.1.0-Ubuntu-22.04-x86_64.zip run: unzip aptos-cli-1.0.4-Ubuntu-22.04-x86_64.zip
- name: Run tests - name: Run tests
run: ./aptos move test run: ./aptos move test

View File

@ -2,10 +2,10 @@ name: Check Fortuna
on: on:
pull_request: pull_request:
paths: [apps/fortuna/**] paths: [fortuna/**]
push: push:
branches: [main] branches: [main]
paths: [apps/fortuna/**] paths: [fortuna/**]
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -17,4 +17,4 @@ jobs:
toolchain: nightly-2023-07-23 toolchain: nightly-2023-07-23
override: true override: true
- name: Run executor tests - name: Run executor tests
run: cargo test --manifest-path ./apps/fortuna/Cargo.toml run: cargo test --manifest-path ./fortuna/Cargo.toml

View File

@ -1,35 +0,0 @@
name: Test Fuel Contract
on:
pull_request:
paths:
- target_chains/fuel/**
push:
branches:
- main
paths:
- target_chains/fuel/**
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: target_chains/fuel/contracts/
steps:
- uses: actions/checkout@v2
- name: Install Fuel toolchain
run: |
curl https://install.fuel.network | sh
echo "$HOME/.fuelup/bin" >> $GITHUB_PATH
- name: Build with Forc
run: forc build --verbose
- name: Run tests with Forc
run: forc test --verbose
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

View File

@ -2,10 +2,10 @@ name: Check Hermes
on: on:
pull_request: pull_request:
paths: [apps/hermes/**] paths: [hermes/**]
push: push:
branches: [main] branches: [main]
paths: [apps/hermes/**] paths: [hermes/**]
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -20,4 +20,4 @@ jobs:
- name: Install protoc - name: Install protoc
uses: arduino/setup-protoc@v3 uses: arduino/setup-protoc@v3
- name: Run executor tests - name: Run executor tests
run: cargo test --manifest-path ./apps/hermes/Cargo.toml run: cargo test --manifest-path ./hermes/Cargo.toml

View File

@ -1,37 +0,0 @@
name: Starknet contract
on:
pull_request:
paths:
- target_chains/starknet/contracts/**
push:
branches:
- main
paths:
- target_chains/starknet/contracts/**
jobs:
check:
name: Starknet Foundry tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: target_chains/starknet/contracts/
steps:
- uses: actions/checkout@v3
- name: Install Scarb
uses: software-mansion/setup-scarb@v1
with:
tool-versions: target_chains/starknet/contracts/.tool-versions
- name: Install Starknet Foundry
uses: foundry-rs/setup-snfoundry@v3
with:
tool-versions: target_chains/starknet/contracts/.tool-versions
- name: Install Starkli
run: curl https://get.starkli.sh | sh && . ~/.config/.starkli/env && starkliup -v $(awk '/starkli/{print $2}' .tool-versions)
- name: Install Katana
run: curl -L https://install.dojoengine.org | bash && PATH="$PATH:$HOME/.config/.dojo/bin" dojoup -v $(awk '/dojo/{print $2}' .tool-versions)
- name: Check formatting
run: scarb fmt --check
- name: Run tests
run: snforge test
- name: Test local deployment script
run: bash -c 'PATH="$PATH:$HOME/.config/.dojo/bin" katana & . ~/.config/.starkli/env && deploy/local_deploy'

View File

@ -12,7 +12,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: "18" node-version: "16"
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- run: npm ci - run: npm ci
- run: npx lerna run build --no-private - run: npx lerna run build --no-private

View File

@ -11,14 +11,8 @@ jobs:
steps: steps:
- name: Checkout sources - name: Checkout sources
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
profile: minimal
- run: cargo +stable-x86_64-unknown-linux-gnu publish --token ${CARGO_REGISTRY_TOKEN} - run: cargo publish --token ${CARGO_REGISTRY_TOKEN}
env: env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
working-directory: "target_chains/solana/pyth_solana_receiver_sdk" working-directory: "target_chains/solana/pyth_solana_receiver_sdk"

View File

@ -46,7 +46,7 @@ jobs:
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with: with:
context: . context: .
file: "./apps/fortuna/Dockerfile" file: "./fortuna/Dockerfile"
push: true push: true
tags: ${{ steps.metadata_fortuna.outputs.tags }} tags: ${{ steps.metadata_fortuna.outputs.tags }}
labels: ${{ steps.metadata_fortuna.outputs.labels }} labels: ${{ steps.metadata_fortuna.outputs.labels }}

View File

@ -37,7 +37,7 @@ jobs:
env: env:
AWS_REGION: us-east-1 AWS_REGION: us-east-1
- run: | - run: |
DOCKER_BUILDKIT=1 docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f apps/hermes/Dockerfile . DOCKER_BUILDKIT=1 docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f hermes/Dockerfile .
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
env: env:
ECR_REGISTRY: public.ecr.aws ECR_REGISTRY: public.ecr.aws

View File

@ -40,7 +40,7 @@ jobs:
id: ecr_login id: ecr_login
- run: | - run: |
DOCKER_BUILDKIT=1 docker build -t lerna -f Dockerfile.lerna . DOCKER_BUILDKIT=1 docker build -t lerna -f Dockerfile.lerna .
DOCKER_BUILDKIT=1 docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f apps/price_pusher/Dockerfile . DOCKER_BUILDKIT=1 docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f price_pusher/Dockerfile .
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
env: env:
ECR_REGISTRY: public.ecr.aws ECR_REGISTRY: public.ecr.aws

View File

@ -5,17 +5,12 @@ on:
tags: tags:
- "python-v*" - "python-v*"
env:
PYTHON_VERSION: "3.11"
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install dependencies - name: Install dependencies
run: | run: |
python3 -m pip install --upgrade poetry python3 -m pip install --upgrade poetry

1
.npmrc
View File

@ -1 +0,0 @@
engine-strict=true

View File

@ -47,22 +47,22 @@ repos:
- id: cargo-fmt-hermes - id: cargo-fmt-hermes
name: Cargo format for Hermes name: Cargo format for Hermes
language: "rust" language: "rust"
entry: cargo +nightly-2024-03-26 fmt --manifest-path ./apps/hermes/Cargo.toml --all -- --config-path rustfmt.toml entry: cargo +nightly-2024-03-26 fmt --manifest-path ./hermes/Cargo.toml --all -- --config-path rustfmt.toml
pass_filenames: false pass_filenames: false
files: apps/hermes files: hermes
- id: cargo-clippy-hermes - id: cargo-clippy-hermes
name: Cargo clippy for Hermes name: Cargo clippy for Hermes
language: "rust" language: "rust"
entry: cargo +nightly-2024-03-26 clippy --manifest-path ./apps/hermes/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings entry: cargo +nightly-2024-03-26 clippy --manifest-path ./hermes/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings
pass_filenames: false pass_filenames: false
files: apps/hermes files: hermes
# Hooks for Fortuna # Hooks for Fortuna
- id: cargo-fmt-fortuna - id: cargo-fmt-fortuna
name: Cargo format for Fortuna name: Cargo format for Fortuna
language: "rust" language: "rust"
entry: cargo +nightly-2023-07-23 fmt --manifest-path ./apps/fortuna/Cargo.toml --all -- --config-path rustfmt.toml entry: cargo +nightly-2023-07-23 fmt --manifest-path ./fortuna/Cargo.toml --all -- --config-path rustfmt.toml
pass_filenames: false pass_filenames: false
files: apps/fortuna files: fortuna
# Hooks for message buffer contract # Hooks for message buffer contract
- id: cargo-fmt-message-buffer - id: cargo-fmt-message-buffer
name: Cargo format for message buffer contract name: Cargo format for message buffer contract
@ -80,13 +80,13 @@ repos:
- id: cargo-fmt-pythnet-sdk - id: cargo-fmt-pythnet-sdk
name: Cargo format for pythnet SDK name: Cargo format for pythnet SDK
language: "rust" language: "rust"
entry: cargo +nightly-2024-03-26 fmt --manifest-path ./pythnet/pythnet_sdk/Cargo.toml --all -- --config-path rustfmt.toml entry: cargo +nightly-2023-07-23 fmt --manifest-path ./pythnet/pythnet_sdk/Cargo.toml --all -- --config-path rustfmt.toml
pass_filenames: false pass_filenames: false
files: pythnet/pythnet_sdk files: pythnet/pythnet_sdk
- id: cargo-clippy-pythnet-sdk - id: cargo-clippy-pythnet-sdk
name: Cargo clippy for pythnet SDK name: Cargo clippy for pythnet SDK
language: "rust" language: "rust"
entry: cargo +nightly-2024-03-26 clippy --manifest-path ./pythnet/pythnet_sdk/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings entry: cargo +nightly-2023-07-23 clippy --manifest-path ./pythnet/pythnet_sdk/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings
pass_filenames: false pass_filenames: false
files: pythnet/pythnet_sdk files: pythnet/pythnet_sdk
# Hooks for solana receiver contract # Hooks for solana receiver contract

View File

@ -16,7 +16,7 @@ contracts, SDKs, and examples.
## Hermes ## Hermes
> [hermes](./apps/hermes/) > [hermes](./hermes/)
Hermes is an off-chain service which constantly observes Pythnet and the Hermes is an off-chain service which constantly observes Pythnet and the
Wormhole network watching for price updates emitted from the Pyth contract. It Wormhole network watching for price updates emitted from the Pyth contract. It
@ -79,11 +79,10 @@ Lerna has some common failure modes that you may encounter:
1. `npm ci` fails with a typescript compilation error about a missing package. 1. `npm ci` fails with a typescript compilation error about a missing package.
This error likely means that the failing package has a `prepare` entry compiling the typescript in its `package.json`. This error likely means that the failing package has a `prepare` entry compiling the typescript in its `package.json`.
Fix this error by moving that logic to the `prepublishOnly` entry. Fix this error by moving that logic to the `prepublishOnly` entry.
2. The software builds locally but fails in CI, or vice-versa. 1. The software builds locally but fails in CI, or vice-versa.
This error likely means that some local build caches need to be cleaned. This error likely means that some local build caches need to be cleaned.
The build error may not indicate that this is a caching issue, e.g., it may appear that the packages are being built in the wrong order. The build error may not indicate that this is a caching issue, e.g., it may appear that the packages are being built in the wrong order.
Delete `node_modules/`, `lib/` and `tsconfig.tsbuildinfo` from each package's subdirectory. then try again. Delete `node_modules/`, `lib/` and `tsconfig.tsbuildinfo` from each package's subdirectory. then try again.
3. `npm ci` fails due to wrong node version. Make sure to be using `v18`. Node version `v21` is not supported and known to cause issues.
## Audit / Feature Status ## Audit / Feature Status

View File

@ -1,7 +0,0 @@
chains:
lightlink-pegasus:
commitments:
# prettier-ignore
- seed: [219,125,217,197,234,88,208,120,21,181,172,143,239,102,41,233,167,212,237,106,37,255,184,165,238,121,230,155,116,158,173,48]
chain_length: 10000
original_commitment_sequence_number: 104

View File

@ -1 +0,0 @@
nightly-2023-07-23

View File

@ -1,487 +0,0 @@
use {
crate::{
api::{
self,
BlockchainState,
},
chain::{
ethereum::SignablePythContract,
reader::{
BlockNumber,
RequestedWithCallbackEvent,
},
},
config::EthereumConfig,
},
anyhow::{
anyhow,
Result,
},
ethers::{
contract::ContractError,
providers::{
Middleware,
Provider,
Ws,
},
types::U256,
},
futures::StreamExt,
std::sync::Arc,
tokio::{
spawn,
sync::mpsc,
time::{
self,
Duration,
},
},
tracing::{
self,
Instrument,
},
};
#[derive(Debug)]
pub struct BlockRange {
pub from: BlockNumber,
pub to: BlockNumber,
}
/// How much to wait before retrying in case of an RPC error
const RETRY_INTERVAL: Duration = Duration::from_secs(5);
/// How many blocks to look back for events that might be missed when starting the keeper
const BACKLOG_RANGE: u64 = 1000;
/// How many blocks to fetch events for in a single rpc call
const BLOCK_BATCH_SIZE: u64 = 100;
/// How much to wait before polling the next latest block
const POLL_INTERVAL: Duration = Duration::from_secs(5);
/// Get the latest safe block number for the chain. Retry internally if there is an error.
async fn get_latest_safe_block(chain_state: &BlockchainState) -> BlockNumber {
loop {
match chain_state
.contract
.get_block_number(chain_state.confirmed_block_status)
.await
{
Ok(latest_confirmed_block) => {
tracing::info!(
"Fetched latest safe block {}",
latest_confirmed_block - chain_state.reveal_delay_blocks
);
return latest_confirmed_block - chain_state.reveal_delay_blocks;
}
Err(e) => {
tracing::error!("Error while getting block number. error: {:?}", e);
time::sleep(RETRY_INTERVAL).await;
}
}
}
}
/// Run threads to handle events for the last `BACKLOG_RANGE` blocks, watch for new blocks and
/// handle any events for the new blocks.
#[tracing::instrument(name="keeper", skip_all, fields(chain_id=chain_state.id))]
pub async fn run_keeper_threads(
private_key: String,
chain_eth_config: EthereumConfig,
chain_state: BlockchainState,
) {
tracing::info!("starting keeper");
let latest_safe_block = get_latest_safe_block(&chain_state).in_current_span().await;
tracing::info!("latest safe block: {}", &latest_safe_block);
let contract = Arc::new(
SignablePythContract::from_config(&chain_eth_config, &private_key)
.await
.expect("Chain config should be valid"),
);
// Spawn a thread to handle the events from last BACKLOG_RANGE blocks.
spawn(
process_backlog(
BlockRange {
from: latest_safe_block.saturating_sub(BACKLOG_RANGE),
to: latest_safe_block,
},
contract.clone(),
chain_eth_config.gas_limit,
chain_state.clone(),
)
.in_current_span(),
);
let (tx, rx) = mpsc::channel::<BlockRange>(1000);
// Spawn a thread to watch for new blocks and send the range of blocks for which events has not been handled to the `tx` channel.
spawn(
watch_blocks_wrapper(
chain_state.clone(),
latest_safe_block,
tx,
chain_eth_config.geth_rpc_wss.clone(),
)
.in_current_span(),
);
// Spawn a thread that listens for block ranges on the `rx` channel and processes the events for those blocks.
spawn(
process_new_blocks(
chain_state.clone(),
rx,
Arc::clone(&contract),
chain_eth_config.gas_limit,
)
.in_current_span(),
);
}
/// Process an event for a chain. It estimates the gas for the reveal with callback and
/// submits the transaction if the gas estimate is below the gas limit.
/// It will return an Error if the gas estimation failed with a provider error or if the
/// reveal with callback failed with a provider error.
pub async fn process_event(
event: RequestedWithCallbackEvent,
chain_config: &BlockchainState,
contract: &Arc<SignablePythContract>,
gas_limit: U256,
) -> Result<()> {
if chain_config.provider_address != event.provider_address {
return Ok(());
}
let provider_revelation = match chain_config.state.reveal(event.sequence_number) {
Ok(result) => result,
Err(e) => {
tracing::error!(
sequence_number = &event.sequence_number,
"Error while revealing with error: {:?}",
e
);
return Ok(());
}
};
let gas_estimate_res = chain_config
.contract
.estimate_reveal_with_callback_gas(
event.provider_address,
event.sequence_number,
event.user_random_number,
provider_revelation,
)
.in_current_span()
.await;
match gas_estimate_res {
Ok(gas_estimate_option) => match gas_estimate_option {
Some(gas_estimate) => {
// Pad the gas estimate by 33%
let (gas_estimate, _) = gas_estimate
.saturating_mul(U256::from(4))
.div_mod(U256::from(3));
if gas_estimate > gas_limit {
tracing::error!(
sequence_number = &event.sequence_number,
"Gas estimate for reveal with callback is higher than the gas limit"
);
return Ok(());
}
let contract_call = contract
.reveal_with_callback(
event.provider_address,
event.sequence_number,
event.user_random_number,
provider_revelation,
)
.gas(gas_estimate);
let res = contract_call.send().await;
let pending_tx = match res {
Ok(pending_tx) => pending_tx,
Err(e) => match e {
// If there is a provider error, we weren't able to send the transaction.
// We will return an error. So, that the caller can decide what to do (retry).
ContractError::ProviderError { e } => return Err(e.into()),
// For all the other errors, it is likely the case we won't be able to reveal for
// ever. We will return an Ok(()) to signal that we have processed this reveal
// and concluded that its Ok to not reveal.
_ => {
tracing::error!(
sequence_number = &event.sequence_number,
"Error while revealing with error: {:?}",
e
);
return Ok(());
}
},
};
match pending_tx.await {
Ok(res) => {
tracing::info!(
sequence_number = &event.sequence_number,
"Revealed with res: {:?}",
res
);
Ok(())
}
Err(e) => {
tracing::error!(
sequence_number = &event.sequence_number,
"Error while revealing with error: {:?}",
e
);
Err(e.into())
}
}
}
None => {
tracing::info!(
sequence_number = &event.sequence_number,
"Not processing event"
);
Ok(())
}
},
Err(e) => {
tracing::error!(
sequence_number = &event.sequence_number,
"Error while simulating reveal with error: {:?}",
e
);
Err(e)
}
}
}
/// Process a range of blocks in batches. It calls the `process_single_block_batch` method for each batch.
#[tracing::instrument(skip_all, fields(range_from_block=block_range.from, range_to_block=block_range.to))]
pub async fn process_block_range(
block_range: BlockRange,
contract: Arc<SignablePythContract>,
gas_limit: U256,
chain_state: api::BlockchainState,
) {
let BlockRange {
from: first_block,
to: last_block,
} = block_range;
let mut current_block = first_block;
while current_block <= last_block {
let mut to_block = current_block + BLOCK_BATCH_SIZE;
if to_block > last_block {
to_block = last_block;
}
process_single_block_batch(
BlockRange {
from: current_block,
to: to_block,
},
contract.clone(),
gas_limit,
chain_state.clone(),
)
.in_current_span()
.await;
current_block = to_block + 1;
}
}
/// Process a batch of blocks for a chain. It will fetch events for all the blocks in a single call for the provided batch
/// and then try to process them one by one. If the process fails, it will retry indefinitely.
#[tracing::instrument(name="batch", skip_all, fields(batch_from_block=block_range.from, batch_to_block=block_range.to))]
pub async fn process_single_block_batch(
block_range: BlockRange,
contract: Arc<SignablePythContract>,
gas_limit: U256,
chain_state: api::BlockchainState,
) {
loop {
let events_res = chain_state
.contract
.get_request_with_callback_events(block_range.from, block_range.to)
.await;
match events_res {
Ok(events) => {
tracing::info!(num_of_events = &events.len(), "Processing",);
for event in &events {
tracing::info!(sequence_number = &event.sequence_number, "Processing event",);
while let Err(e) =
process_event(event.clone(), &chain_state, &contract, gas_limit)
.in_current_span()
.await
{
tracing::error!(
sequence_number = &event.sequence_number,
"Error while processing event. Waiting for {} seconds before retry. error: {:?}",
RETRY_INTERVAL.as_secs(),
e
);
time::sleep(RETRY_INTERVAL).await;
}
tracing::info!(sequence_number = &event.sequence_number, "Processed event",);
}
tracing::info!(num_of_events = &events.len(), "Processed",);
break;
}
Err(e) => {
tracing::error!(
"Error while getting events. Waiting for {} seconds before retry. error: {:?}",
RETRY_INTERVAL.as_secs(),
e
);
time::sleep(RETRY_INTERVAL).await;
}
}
}
}
/// Wrapper for the `watch_blocks` method. If there was an error while watching, it will retry after a delay.
/// It retries indefinitely.
#[tracing::instrument(name="watch_blocks", skip_all, fields(initial_safe_block=latest_safe_block))]
pub async fn watch_blocks_wrapper(
chain_state: BlockchainState,
latest_safe_block: BlockNumber,
tx: mpsc::Sender<BlockRange>,
geth_rpc_wss: Option<String>,
) {
let mut last_safe_block_processed = latest_safe_block;
loop {
if let Err(e) = watch_blocks(
chain_state.clone(),
&mut last_safe_block_processed,
tx.clone(),
geth_rpc_wss.clone(),
)
.in_current_span()
.await
{
tracing::error!("watching blocks. error: {:?}", e);
time::sleep(RETRY_INTERVAL).await;
}
}
}
/// Watch for new blocks and send the range of blocks for which events have not been handled to the `tx` channel.
/// We are subscribing to new blocks instead of events. If we miss some blocks, it will be fine as we are sending
/// block ranges to the `tx` channel. If we have subscribed to events, we could have missed those and won't even
/// know about it.
pub async fn watch_blocks(
chain_state: BlockchainState,
last_safe_block_processed: &mut BlockNumber,
tx: mpsc::Sender<BlockRange>,
geth_rpc_wss: Option<String>,
) -> Result<()> {
tracing::info!("Watching blocks to handle new events");
let provider_option = match geth_rpc_wss {
Some(wss) => Some(match Provider::<Ws>::connect(wss.clone()).await {
Ok(provider) => provider,
Err(e) => {
tracing::error!("Error while connecting to wss: {}. error: {:?}", wss, e);
return Err(e.into());
}
}),
None => {
tracing::info!("No wss provided");
None
}
};
let mut stream_option = match provider_option {
Some(ref provider) => Some(match provider.subscribe_blocks().await {
Ok(client) => client,
Err(e) => {
tracing::error!("Error while subscribing to blocks. error {:?}", e);
return Err(e.into());
}
}),
None => None,
};
loop {
match stream_option {
Some(ref mut stream) => {
if let None = stream.next().await {
tracing::error!("Error blocks subscription stream ended");
return Err(anyhow!("Error blocks subscription stream ended"));
}
}
None => {
time::sleep(POLL_INTERVAL).await;
}
}
let latest_safe_block = get_latest_safe_block(&chain_state).in_current_span().await;
if latest_safe_block > *last_safe_block_processed {
match tx
.send(BlockRange {
from: *last_safe_block_processed + 1,
to: latest_safe_block,
})
.await
{
Ok(_) => {
tracing::info!(
from_block = *last_safe_block_processed + 1,
to_block = &latest_safe_block,
"Block range sent to handle events",
);
*last_safe_block_processed = latest_safe_block;
}
Err(e) => {
tracing::error!(
"Error while sending block range to handle events. These will be handled in next call. error: {:?}",
e
);
}
};
}
}
}
/// It waits on rx channel to receive block ranges and then calls process_block_range to process them.
#[tracing::instrument(skip_all)]
pub async fn process_new_blocks(
chain_state: BlockchainState,
mut rx: mpsc::Receiver<BlockRange>,
contract: Arc<SignablePythContract>,
gas_limit: U256,
) {
tracing::info!("Waiting for new block ranges to process");
loop {
if let Some(block_range) = rx.recv().await {
process_block_range(
block_range,
Arc::clone(&contract),
gas_limit,
chain_state.clone(),
)
.in_current_span()
.await;
}
}
}
/// Processes the backlog_range for a chain.
#[tracing::instrument(skip_all)]
pub async fn process_backlog(
backlog_range: BlockRange,
contract: Arc<SignablePythContract>,
gas_limit: U256,
chain_state: BlockchainState,
) {
tracing::info!("Processing backlog");
process_block_range(backlog_range, contract, gas_limit, chain_state)
.in_current_span()
.await;
tracing::info!("Backlog processed");
}

View File

@ -1,25 +0,0 @@
use {
crate::{
api::ApiState,
state::aggregate::Aggregates,
},
axum::{
extract::State,
http::StatusCode,
response::{
IntoResponse,
Response,
},
},
};
pub async fn ready<S>(State(state): State<ApiState<S>>) -> Response
where
S: Aggregates,
{
let state = &*state.state;
match Aggregates::is_ready(state).await {
true => (StatusCode::OK, "OK").into_response(),
false => (StatusCode::SERVICE_UNAVAILABLE, "Service Unavailable").into_response(),
}
}

View File

@ -1,235 +0,0 @@
use {
crate::{
api::{
rest::{
verify_price_ids_exist,
RestError,
},
types::{
BinaryPriceUpdate,
EncodingType,
ParsedPriceUpdate,
PriceIdInput,
PriceUpdate,
RpcPriceIdentifier,
},
ApiState,
},
state::aggregate::{
Aggregates,
AggregationEvent,
RequestTime,
},
},
anyhow::Result,
axum::{
extract::State,
response::sse::{
Event,
KeepAlive,
Sse,
},
},
futures::Stream,
pyth_sdk::PriceIdentifier,
serde::Deserialize,
serde_qs::axum::QsQuery,
std::convert::Infallible,
tokio::sync::broadcast,
tokio_stream::{
wrappers::BroadcastStream,
StreamExt as _,
},
utoipa::IntoParams,
};
#[derive(Debug, Deserialize, IntoParams)]
#[into_params(parameter_in = Query)]
pub struct StreamPriceUpdatesQueryParams {
/// Get the most recent price update for this set of price feed ids.
///
/// This parameter can be provided multiple times to retrieve multiple price updates,
/// for example see the following query string:
///
/// ```
/// ?ids[]=a12...&ids[]=b4c...
/// ```
#[param(rename = "ids[]")]
#[param(example = "e62df6c8b4a85fe1a67db44dc12de5db330f7ac66b72dc658afedf0f4a415b43")]
ids: Vec<PriceIdInput>,
/// If true, include the parsed price update in the `parsed` field of each returned feed. Default is `hex`.
#[serde(default)]
encoding: EncodingType,
/// If true, include the parsed price update in the `parsed` field of each returned feed. Default is `true`.
#[serde(default = "default_true")]
parsed: bool,
/// If true, allows unordered price updates to be included in the stream.
#[serde(default)]
allow_unordered: bool,
/// If true, only include benchmark prices that are the initial price updates at a given timestamp (i.e., prevPubTime != pubTime).
#[serde(default)]
benchmarks_only: bool,
}
fn default_true() -> bool {
true
}
#[utoipa::path(
get,
path = "/v2/updates/price/stream",
responses(
(status = 200, description = "Price updates retrieved successfully", body = PriceUpdate),
(status = 404, description = "Price ids not found", body = String)
),
params(StreamPriceUpdatesQueryParams)
)]
/// SSE route handler for streaming price updates.
pub async fn price_stream_sse_handler<S>(
State(state): State<ApiState<S>>,
QsQuery(params): QsQuery<StreamPriceUpdatesQueryParams>,
) -> Result<Sse<impl Stream<Item = Result<Event, Infallible>>>, RestError>
where
S: Aggregates,
S: Sync,
S: Send,
S: 'static,
{
let price_ids: Vec<PriceIdentifier> = params.ids.into_iter().map(Into::into).collect();
verify_price_ids_exist(&state, &price_ids).await?;
// Clone the update_tx receiver to listen for new price updates
let update_rx: broadcast::Receiver<AggregationEvent> = Aggregates::subscribe(&*state.state);
// Convert the broadcast receiver into a Stream
let stream = BroadcastStream::new(update_rx);
let sse_stream = stream.then(move |message| {
let state_clone = state.clone(); // Clone again to use inside the async block
let price_ids_clone = price_ids.clone(); // Clone again for use inside the async block
async move {
match message {
Ok(event) => {
match handle_aggregation_event(
event,
state_clone,
price_ids_clone,
params.encoding,
params.parsed,
params.benchmarks_only,
params.allow_unordered,
)
.await
{
Ok(Some(update)) => Ok(Event::default()
.json_data(update)
.unwrap_or_else(|e| error_event(e))),
Ok(None) => Ok(Event::default().comment("No update available")),
Err(e) => Ok(error_event(e)),
}
}
Err(e) => Ok(error_event(e)),
}
}
});
Ok(Sse::new(sse_stream).keep_alive(KeepAlive::default()))
}
async fn handle_aggregation_event<S>(
event: AggregationEvent,
state: ApiState<S>,
mut price_ids: Vec<PriceIdentifier>,
encoding: EncodingType,
parsed: bool,
benchmarks_only: bool,
allow_unordered: bool,
) -> Result<Option<PriceUpdate>>
where
S: Aggregates,
{
// Handle out-of-order events
if let AggregationEvent::OutOfOrder { .. } = event {
if !allow_unordered {
return Ok(None);
}
}
// We check for available price feed ids to ensure that the price feed ids provided exists since price feeds can be removed.
let available_price_feed_ids = Aggregates::get_price_feed_ids(&*state.state).await;
price_ids.retain(|price_feed_id| available_price_feed_ids.contains(price_feed_id));
let mut price_feeds_with_update_data = Aggregates::get_price_feeds_with_update_data(
&*state.state,
&price_ids,
RequestTime::AtSlot(event.slot()),
)
.await?;
let mut parsed_price_updates: Vec<ParsedPriceUpdate> = price_feeds_with_update_data
.price_feeds
.into_iter()
.map(|price_feed| price_feed.into())
.collect();
if benchmarks_only {
// Remove those with metadata.prev_publish_time != price.publish_time from parsed_price_updates
parsed_price_updates.retain(|price_feed| {
price_feed
.metadata
.prev_publish_time
.map_or(false, |prev_time| {
prev_time != price_feed.price.publish_time
})
});
// Retain price id in price_ids that are in parsed_price_updates
price_ids.retain(|price_id| {
parsed_price_updates
.iter()
.any(|price_feed| price_feed.id == RpcPriceIdentifier::from(*price_id))
});
price_feeds_with_update_data = Aggregates::get_price_feeds_with_update_data(
&*state.state,
&price_ids,
RequestTime::AtSlot(event.slot()),
)
.await?;
}
// Check if price_ids is empty after filtering and return None if it is
if price_ids.is_empty() {
return Ok(None);
}
let price_update_data = price_feeds_with_update_data.update_data;
let encoded_data: Vec<String> = price_update_data
.into_iter()
.map(|data| encoding.encode_str(&data))
.collect();
let binary_price_update = BinaryPriceUpdate {
encoding,
data: encoded_data,
};
Ok(Some(PriceUpdate {
binary: binary_price_update,
parsed: if parsed {
Some(parsed_price_updates)
} else {
None
},
}))
}
fn error_event<E: std::fmt::Debug>(e: E) -> Event {
Event::default()
.event("error")
.data(format!("Error receiving update: {:?}", e))
}

View File

@ -1,96 +0,0 @@
use {
crate::{
api::types::{
AssetType,
PriceFeedMetadata,
},
state::State,
},
anyhow::Result,
tokio::sync::RwLock,
};
pub const DEFAULT_PRICE_FEEDS_CACHE_UPDATE_INTERVAL: u64 = 600;
pub struct PriceFeedMetaState {
pub data: RwLock<Vec<PriceFeedMetadata>>,
}
impl PriceFeedMetaState {
pub fn new() -> Self {
Self {
data: RwLock::new(Vec::new()),
}
}
}
/// Allow downcasting State into CacheState for functions that depend on the `Cache` service.
impl<'a> From<&'a State> for &'a PriceFeedMetaState {
fn from(state: &'a State) -> &'a PriceFeedMetaState {
&state.price_feed_meta
}
}
#[async_trait::async_trait]
pub trait PriceFeedMeta {
async fn retrieve_price_feeds_metadata(&self) -> Result<Vec<PriceFeedMetadata>>;
async fn store_price_feeds_metadata(
&self,
price_feeds_metadata: &[PriceFeedMetadata],
) -> Result<()>;
async fn get_price_feeds_metadata(
&self,
query: Option<String>,
asset_type: Option<AssetType>,
) -> Result<Vec<PriceFeedMetadata>>;
}
#[async_trait::async_trait]
impl<T> PriceFeedMeta for T
where
for<'a> &'a T: Into<&'a PriceFeedMetaState>,
T: Sync,
{
async fn retrieve_price_feeds_metadata(&self) -> Result<Vec<PriceFeedMetadata>> {
let price_feeds_metadata = self.into().data.read().await;
Ok(price_feeds_metadata.clone())
}
async fn store_price_feeds_metadata(
&self,
price_feeds_metadata: &[PriceFeedMetadata],
) -> Result<()> {
let mut price_feeds_metadata_write_guard = self.into().data.write().await;
*price_feeds_metadata_write_guard = price_feeds_metadata.to_vec();
Ok(())
}
async fn get_price_feeds_metadata(
&self,
query: Option<String>,
asset_type: Option<AssetType>,
) -> Result<Vec<PriceFeedMetadata>> {
let mut price_feeds_metadata = self.retrieve_price_feeds_metadata().await?;
// Filter by query if provided
if let Some(query_str) = &query {
price_feeds_metadata.retain(|feed| {
feed.attributes.get("symbol").map_or(false, |symbol| {
symbol.to_lowercase().contains(&query_str.to_lowercase())
})
});
}
// Filter by asset_type if provided
if let Some(asset_type) = &asset_type {
price_feeds_metadata.retain(|feed| {
feed.attributes.get("asset_type").map_or(false, |type_str| {
type_str.to_lowercase() == asset_type.to_string().to_lowercase()
})
});
}
Ok(price_feeds_metadata)
}
}

View File

@ -1,13 +0,0 @@
{
"endpoint": "https://api.mainnet-beta.solana.com",
"keypair-file": "./id.json",
"shard-id": 1,
"jito-endpoint": "mainnet.block-engine.jito.wtf",
"jito-keypair-file": "./jito.json",
"jito-tip-lamports": "100000",
"jito-bundle-size": "5",
"price-config-file": "./price-config.yaml",
"price-service-endpoint": "https://hermes.pyth.network/",
"pyth-contract-address": "pythWSnswVUd12oZpeFP8e9CVaEqJg25g1Vtc2biRsT",
"pushing-frequency": "30"
}

View File

@ -1,9 +0,0 @@
{
"endpoint": "https://api.devnet.solana.com",
"keypair-file": "./id.json",
"shard-id": 1,
"price-config-file": "./price-config.yaml",
"price-service-endpoint": "https://hermes.pyth.network/",
"pyth-contract-address": "pythWSnswVUd12oZpeFP8e9CVaEqJg25g1Vtc2biRsT",
"pushing-frequency": "30"
}

View File

@ -1,180 +0,0 @@
import { PythSolanaReceiver } from "@pythnetwork/pyth-solana-receiver";
import {
ChainPriceListener,
IPricePusher,
PriceInfo,
PriceItem,
} from "../interface";
import { DurationInSeconds } from "../utils";
import { PriceServiceConnection } from "@pythnetwork/price-service-client";
import {
sendTransactions,
sendTransactionsJito,
} from "@pythnetwork/solana-utils";
import { SearcherClient } from "jito-ts/dist/sdk/block-engine/searcher";
export class SolanaPriceListener extends ChainPriceListener {
constructor(
private pythSolanaReceiver: PythSolanaReceiver,
private shardId: number,
priceItems: PriceItem[],
config: {
pollingFrequency: DurationInSeconds;
}
) {
super("solana", config.pollingFrequency, priceItems);
}
async getOnChainPriceInfo(priceId: string): Promise<PriceInfo | undefined> {
try {
const priceFeedAccount =
await this.pythSolanaReceiver.fetchPriceFeedAccount(
this.shardId,
Buffer.from(priceId, "hex")
);
console.log(
`Polled a Solana on chain price for feed ${this.priceIdToAlias.get(
priceId
)} (${priceId}).`
);
if (priceFeedAccount) {
return {
conf: priceFeedAccount.priceMessage.conf.toString(),
price: priceFeedAccount.priceMessage.price.toString(),
publishTime: priceFeedAccount.priceMessage.publishTime.toNumber(),
};
} else {
return undefined;
}
} catch (e) {
console.error(`Polling on-chain price for ${priceId} failed. Error:`);
console.error(e);
return undefined;
}
}
}
export class SolanaPricePusher implements IPricePusher {
constructor(
private pythSolanaReceiver: PythSolanaReceiver,
private priceServiceConnection: PriceServiceConnection,
private shardId: number,
private computeUnitPriceMicroLamports: number
) {}
async updatePriceFeed(
priceIds: string[],
pubTimesToPush: number[]
): Promise<void> {
if (priceIds.length === 0) {
return;
}
let priceFeedUpdateData;
try {
priceFeedUpdateData = await this.priceServiceConnection.getLatestVaas(
priceIds
);
} catch (e: any) {
console.error(new Date(), "getPriceFeedsUpdateData failed:", e);
return;
}
const transactionBuilder = this.pythSolanaReceiver.newTransactionBuilder({
closeUpdateAccounts: true,
});
await transactionBuilder.addUpdatePriceFeed(
priceFeedUpdateData,
this.shardId
);
const transactions = await transactionBuilder.buildVersionedTransactions({
computeUnitPriceMicroLamports: this.computeUnitPriceMicroLamports,
tightComputeBudget: true,
});
try {
await sendTransactions(
transactions,
this.pythSolanaReceiver.connection,
this.pythSolanaReceiver.wallet
);
console.log(new Date(), "updatePriceFeed successful");
} catch (e: any) {
console.error(new Date(), "updatePriceFeed failed", e);
return;
}
}
}
export class SolanaPricePusherJito implements IPricePusher {
constructor(
private pythSolanaReceiver: PythSolanaReceiver,
private priceServiceConnection: PriceServiceConnection,
private shardId: number,
private jitoTipLamports: number,
private searcherClient: SearcherClient,
private jitoBundleSize: number
) {}
async updatePriceFeed(
priceIds: string[],
pubTimesToPush: number[]
): Promise<void> {
let priceFeedUpdateData;
try {
priceFeedUpdateData = await this.priceServiceConnection.getLatestVaas(
priceIds
);
} catch (e: any) {
console.error(new Date(), "getPriceFeedsUpdateData failed:", e);
return;
}
const transactionBuilder = this.pythSolanaReceiver.newTransactionBuilder({
closeUpdateAccounts: false,
});
await transactionBuilder.addUpdatePriceFeed(
priceFeedUpdateData,
this.shardId
);
await transactionBuilder.addClosePreviousEncodedVaasInstructions();
const transactions = await transactionBuilder.buildVersionedTransactions({
jitoTipLamports: this.jitoTipLamports,
tightComputeBudget: true,
jitoBundleSize: this.jitoBundleSize,
});
const firstSignature = await sendTransactionsJito(
transactions.slice(0, this.jitoBundleSize),
this.searcherClient,
this.pythSolanaReceiver.wallet
);
const blockhashResult =
await this.pythSolanaReceiver.connection.getLatestBlockhashAndContext({
commitment: "confirmed",
});
await this.pythSolanaReceiver.connection.confirmTransaction(
{
signature: firstSignature,
blockhash: blockhashResult.value.blockhash,
lastValidBlockHeight: blockhashResult.value.lastValidBlockHeight,
},
"confirmed"
);
for (
let i = this.jitoBundleSize;
i < transactions.length;
i += this.jitoBundleSize
) {
await sendTransactionsJito(
transactions.slice(i, i + this.jitoBundleSize),
this.searcherClient,
this.pythSolanaReceiver.wallet
);
}
}
}

View File

@ -1,5 +1,5 @@
{ {
"name": "@pythnetwork/contract-manager", "name": "contract_manager",
"version": "1.0.0", "version": "1.0.0",
"description": "Set of tools to manage pyth contracts", "description": "Set of tools to manage pyth contracts",
"private": true, "private": true,
@ -23,7 +23,7 @@
"dependencies": { "dependencies": {
"@certusone/wormhole-sdk": "^0.9.8", "@certusone/wormhole-sdk": "^0.9.8",
"@coral-xyz/anchor": "^0.29.0", "@coral-xyz/anchor": "^0.29.0",
"@injectivelabs/networks": "^1.14.6", "@injectivelabs/networks": "1.0.68",
"@mysten/sui.js": "^0.49.1", "@mysten/sui.js": "^0.49.1",
"@pythnetwork/cosmwasm-deploy-tools": "*", "@pythnetwork/cosmwasm-deploy-tools": "*",
"@pythnetwork/entropy-sdk-solidity": "*", "@pythnetwork/entropy-sdk-solidity": "*",

View File

@ -5,7 +5,6 @@ import { createHash } from "crypto";
import { DefaultStore } from "../src/store"; import { DefaultStore } from "../src/store";
import { import {
CosmosUpgradeContract, CosmosUpgradeContract,
EvmExecute,
EvmSetWormholeAddress, EvmSetWormholeAddress,
EvmUpgradeContract, EvmUpgradeContract,
getProposalInstructions, getProposalInstructions,
@ -20,10 +19,8 @@ import {
import NodeWallet from "@coral-xyz/anchor/dist/cjs/nodewallet"; import NodeWallet from "@coral-xyz/anchor/dist/cjs/nodewallet";
import { AccountMeta, Keypair, PublicKey } from "@solana/web3.js"; import { AccountMeta, Keypair, PublicKey } from "@solana/web3.js";
import { import {
EvmEntropyContract,
EvmPriceFeedContract, EvmPriceFeedContract,
getCodeDigestWithoutAddress, WormholeEvmContract,
EvmWormholeContract,
} from "../src/contracts/evm"; } from "../src/contracts/evm";
import Web3 from "web3"; import Web3 from "web3";
@ -73,7 +70,7 @@ async function main() {
instruction.governanceAction.targetChainId instruction.governanceAction.targetChainId
) { ) {
const address = instruction.governanceAction.address; const address = instruction.governanceAction.address;
const contract = new EvmWormholeContract(chain, address); const contract = new WormholeEvmContract(chain, address);
const currentIndex = await contract.getCurrentGuardianSetIndex(); const currentIndex = await contract.getCurrentGuardianSetIndex();
const guardianSet = await contract.getGuardianSet(); const guardianSet = await contract.getGuardianSet();
@ -137,70 +134,6 @@ async function main() {
} }
} }
} }
if (instruction.governanceAction instanceof EvmExecute) {
// Note: it only checks for upgrade entropy contracts right now
console.log(
`Verifying EVMExecute Contract on ${instruction.governanceAction.targetChainId}`
);
for (const chain of Object.values(DefaultStore.chains)) {
if (
chain instanceof EvmChain &&
chain.wormholeChainName ===
instruction.governanceAction.targetChainId
) {
const executorAddress =
instruction.governanceAction.executorAddress;
const callAddress = instruction.governanceAction.callAddress;
const calldata = instruction.governanceAction.calldata;
// currently executor is only being used by the entropy contract
const contract = new EvmEntropyContract(chain, callAddress);
const owner = await contract.getOwner();
if (
executorAddress.toUpperCase() !==
owner.replace("0x", "").toUpperCase()
) {
console.log(
`Executor Address: ${executorAddress.toUpperCase()} is not equal to Owner Address: ${owner
.replace("0x", "")
.toUpperCase()}`
);
continue;
}
const calldataHex = calldata.toString("hex");
const web3 = new Web3();
const methodSignature = web3.eth.abi
.encodeFunctionSignature("upgradeTo(address)")
.replace("0x", "");
let newImplementationAddress: string | undefined = undefined;
if (calldataHex.startsWith(methodSignature)) {
newImplementationAddress = web3.eth.abi.decodeParameter(
"address",
calldataHex.replace(methodSignature, "")
) as unknown as string;
}
if (newImplementationAddress === undefined) {
console.log(
`We couldn't parse the instruction for ${chain.getId()}`
);
continue;
}
const newImplementationCode = await getCodeDigestWithoutAddress(
chain.getRpcUrl(),
newImplementationAddress
);
// this should be the same keccak256 of the deployedCode property generated by truffle
console.log(
`${chain.getId()} new implementation address:${newImplementationAddress} digest:${newImplementationCode}`
);
}
}
}
} }
} }
} }

View File

@ -1,18 +1,11 @@
import { import { DefaultStore, EvmChain, PrivateKey } from "../src";
DefaultStore,
EvmChain,
EvmEntropyContract,
EvmWormholeContract,
getDefaultDeploymentConfig,
PrivateKey,
} from "../src";
import { existsSync, readFileSync, writeFileSync } from "fs"; import { existsSync, readFileSync, writeFileSync } from "fs";
import { join } from "path"; import { join } from "path";
import Web3 from "web3"; import Web3 from "web3";
import { Contract } from "web3-eth-contract"; import { Contract } from "web3-eth-contract";
import { InferredOptionType } from "yargs"; import { InferredOptionType } from "yargs";
export interface BaseDeployConfig { interface DeployConfig {
gasMultiplier: number; gasMultiplier: number;
gasPriceMultiplier: number; gasPriceMultiplier: number;
jsonOutputDir: string; jsonOutputDir: string;
@ -26,7 +19,7 @@ export interface BaseDeployConfig {
export async function deployIfNotCached( export async function deployIfNotCached(
cacheFile: string, cacheFile: string,
chain: EvmChain, chain: EvmChain,
config: BaseDeployConfig, config: DeployConfig,
artifactName: string, artifactName: string,
deployArgs: any[], // eslint-disable-line @typescript-eslint/no-explicit-any deployArgs: any[], // eslint-disable-line @typescript-eslint/no-explicit-any
cacheKey?: string cacheKey?: string
@ -79,7 +72,7 @@ export const COMMON_DEPLOY_OPTIONS = {
chain: { chain: {
type: "array", type: "array",
demandOption: true, demandOption: true,
desc: "Chain to upload the contract on. Can be one of the chains available in the store", desc: "Chain to upload the contract on. Can be one of the evm chains available in the store",
}, },
"deployment-type": { "deployment-type": {
type: "string", type: "string",
@ -188,149 +181,3 @@ export function getSelectedChains(argv: {
} }
return selectedChains; return selectedChains;
} }
/**
* Finds the entropy contract for a given EVM chain.
* @param {EvmChain} chain The EVM chain to find the entropy contract for.
* @returns The entropy contract for the given EVM chain.
* @throws {Error} an error if the entropy contract is not found for the given EVM chain.
*/
export function findEntropyContract(chain: EvmChain): EvmEntropyContract {
for (const contract of Object.values(DefaultStore.entropy_contracts)) {
if (contract.getChain().getId() === chain.getId()) {
return contract;
}
}
throw new Error(`Entropy contract not found for chain ${chain.getId()}`);
}
/**
* Finds an EVM chain by its name.
* @param {string} chainName The name of the chain to find.
* @returns The EVM chain instance.
* @throws {Error} an error if the chain is not found or is not an EVM chain.
*/
export function findEvmChain(chainName: string): EvmChain {
const chain = DefaultStore.chains[chainName];
if (!chain) {
throw new Error(`Chain ${chainName} not found`);
} else if (!(chain instanceof EvmChain)) {
throw new Error(`Chain ${chainName} is not an EVM chain`);
}
return chain;
}
/**
* Finds the wormhole contract for a given EVM chain.
* @param {EvmChain} chain The EVM chain to find the wormhole contract for.
* @returns If found, the wormhole contract for the given EVM chain. Else, undefined
*/
export function findWormholeContract(
chain: EvmChain
): EvmWormholeContract | undefined {
for (const contract of Object.values(DefaultStore.wormhole_contracts)) {
if (
contract instanceof EvmWormholeContract &&
contract.getChain().getId() === chain.getId()
) {
return contract;
}
}
}
export interface DeployWormholeReceiverContractsConfig
extends BaseDeployConfig {
saveContract: boolean;
type: "stable" | "beta";
}
/**
* Deploys the wormhole receiver contract for a given EVM chain.
* @param {EvmChain} chain The EVM chain to find the wormhole receiver contract for.
* @param {DeployWormholeReceiverContractsConfig} config The deployment configuration.
* @param {string} cacheFile The path to the cache file.
* @returns {EvmWormholeContract} The wormhole contract for the given EVM chain.
*/
export async function deployWormholeContract(
chain: EvmChain,
config: DeployWormholeReceiverContractsConfig,
cacheFile: string
): Promise<EvmWormholeContract> {
const receiverSetupAddr = await deployIfNotCached(
cacheFile,
chain,
config,
"ReceiverSetup",
[]
);
const receiverImplAddr = await deployIfNotCached(
cacheFile,
chain,
config,
"ReceiverImplementation",
[]
);
// Craft the init data for the proxy contract
const setupContract = getWeb3Contract(
config.jsonOutputDir,
"ReceiverSetup",
receiverSetupAddr
);
const { wormholeConfig } = getDefaultDeploymentConfig(config.type);
const initData = setupContract.methods
.setup(
receiverImplAddr,
wormholeConfig.initialGuardianSet.map((addr: string) => "0x" + addr),
chain.getWormholeChainId(),
wormholeConfig.governanceChainId,
"0x" + wormholeConfig.governanceContract
)
.encodeABI();
const wormholeReceiverAddr = await deployIfNotCached(
cacheFile,
chain,
config,
"WormholeReceiver",
[receiverSetupAddr, initData]
);
const wormholeContract = new EvmWormholeContract(chain, wormholeReceiverAddr);
if (config.type === "stable") {
console.log(`Syncing mainnet guardian sets for ${chain.getId()}...`);
// TODO: Add a way to pass gas configs to this
await wormholeContract.syncMainnetGuardianSets(config.privateKey);
console.log(`✅ Synced mainnet guardian sets for ${chain.getId()}`);
}
if (config.saveContract) {
DefaultStore.wormhole_contracts[wormholeContract.getId()] =
wormholeContract;
DefaultStore.saveAllContracts();
}
return wormholeContract;
}
/**
* Returns the wormhole contract for a given EVM chain.
* If there was no wormhole contract deployed for the given chain, it will deploy the wormhole contract and save it to the default store.
* @param {EvmChain} chain The EVM chain to find the wormhole contract for.
* @param {DeployWormholeReceiverContractsConfig} config The deployment configuration.
* @param {string} cacheFile The path to the cache file.
* @returns {EvmWormholeContract} The wormhole contract for the given EVM chain.
*/
export async function getOrDeployWormholeContract(
chain: EvmChain,
config: DeployWormholeReceiverContractsConfig,
cacheFile: string
): Promise<EvmWormholeContract> {
return (
findWormholeContract(chain) ??
(await deployWormholeContract(chain, config, cacheFile))
);
}

View File

@ -5,23 +5,29 @@ import { DefaultStore } from "../src/store";
import { import {
DeploymentType, DeploymentType,
EvmEntropyContract, EvmEntropyContract,
EvmPriceFeedContract,
getDefaultDeploymentConfig, getDefaultDeploymentConfig,
PrivateKey,
toDeploymentType, toDeploymentType,
toPrivateKey, toPrivateKey,
WormholeEvmContract,
} from "../src"; } from "../src";
import { import {
COMMON_DEPLOY_OPTIONS, COMMON_DEPLOY_OPTIONS,
deployIfNotCached, deployIfNotCached,
getWeb3Contract, getWeb3Contract,
getOrDeployWormholeContract,
BaseDeployConfig,
} from "./common"; } from "./common";
import Web3 from "web3"; import Web3 from "web3";
interface DeploymentConfig extends BaseDeployConfig { type DeploymentConfig = {
type: DeploymentType; type: DeploymentType;
gasMultiplier: number;
gasPriceMultiplier: number;
privateKey: PrivateKey;
jsonOutputDir: string;
wormholeAddr: string;
saveContract: boolean; saveContract: boolean;
} };
const CACHE_FILE = ".cache-deploy-evm-entropy-contracts"; const CACHE_FILE = ".cache-deploy-evm-entropy-contracts";
const ENTROPY_DEFAULT_PROVIDER = { const ENTROPY_DEFAULT_PROVIDER = {
@ -45,8 +51,7 @@ const parser = yargs(hideBin(process.argv))
async function deployExecutorContracts( async function deployExecutorContracts(
chain: EvmChain, chain: EvmChain,
config: DeploymentConfig, config: DeploymentConfig
wormholeAddr: string
): Promise<string> { ): Promise<string> {
const executorImplAddr = await deployIfNotCached( const executorImplAddr = await deployIfNotCached(
CACHE_FILE, CACHE_FILE,
@ -67,7 +72,7 @@ async function deployExecutorContracts(
const executorInitData = executorImplContract.methods const executorInitData = executorImplContract.methods
.initialize( .initialize(
wormholeAddr, config.wormholeAddr,
0, // lastExecutedSequence, 0, // lastExecutedSequence,
chain.getWormholeChainId(), chain.getWormholeChainId(),
governanceDataSource.emitterChain, governanceDataSource.emitterChain,
@ -156,6 +161,19 @@ async function topupProviderIfNecessary(
} }
} }
async function findWormholeAddress(
chain: EvmChain
): Promise<string | undefined> {
for (const contract of Object.values(DefaultStore.contracts)) {
if (
contract instanceof EvmPriceFeedContract &&
contract.getChain().getId() === chain.getId()
) {
return (await contract.getWormholeContract()).address;
}
}
}
async function main() { async function main() {
const argv = await parser.argv; const argv = await parser.argv;
@ -167,6 +185,12 @@ async function main() {
throw new Error(`Chain ${chainName} is not an EVM chain`); throw new Error(`Chain ${chainName} is not an EVM chain`);
} }
const wormholeAddr = await findWormholeAddress(chain);
if (!wormholeAddr) {
// TODO: deploy wormhole if necessary and maintain a wormhole store
throw new Error(`Wormhole contract not found for chain ${chain.getId()}`);
}
const deploymentConfig: DeploymentConfig = { const deploymentConfig: DeploymentConfig = {
type: toDeploymentType(argv.deploymentType), type: toDeploymentType(argv.deploymentType),
gasMultiplier: argv.gasMultiplier, gasMultiplier: argv.gasMultiplier,
@ -174,14 +198,18 @@ async function main() {
privateKey: toPrivateKey(argv.privateKey), privateKey: toPrivateKey(argv.privateKey),
jsonOutputDir: argv.stdOutputDir, jsonOutputDir: argv.stdOutputDir,
saveContract: argv.saveContract, saveContract: argv.saveContract,
wormholeAddr,
}; };
const wormholeContract = new WormholeEvmContract(
const wormholeContract = await getOrDeployWormholeContract(
chain, chain,
deploymentConfig, deploymentConfig.wormholeAddr
CACHE_FILE
); );
const wormholeChainId = await wormholeContract.getChainId();
if (chain.getWormholeChainId() != wormholeChainId) {
throw new Error(
`Wormhole chain id mismatch. Expected ${chain.getWormholeChainId()} but got ${wormholeChainId}`
);
}
await topupProviderIfNecessary(chain, deploymentConfig); await topupProviderIfNecessary(chain, deploymentConfig);
console.log( console.log(
@ -190,11 +218,7 @@ async function main() {
console.log(`Deploying entropy contracts on ${chain.getId()}...`); console.log(`Deploying entropy contracts on ${chain.getId()}...`);
const executorAddr = await deployExecutorContracts( const executorAddr = await deployExecutorContracts(chain, deploymentConfig);
chain,
deploymentConfig,
wormholeContract.address
);
const entropyAddr = await deployEntropyContracts( const entropyAddr = await deployEntropyContracts(
chain, chain,
deploymentConfig, deploymentConfig,

View File

@ -6,23 +6,27 @@ import {
DeploymentType, DeploymentType,
EvmPriceFeedContract, EvmPriceFeedContract,
getDefaultDeploymentConfig, getDefaultDeploymentConfig,
PrivateKey,
toDeploymentType, toDeploymentType,
toPrivateKey, toPrivateKey,
WormholeEvmContract,
} from "../src"; } from "../src";
import { import {
COMMON_DEPLOY_OPTIONS, COMMON_DEPLOY_OPTIONS,
deployIfNotCached, deployIfNotCached,
getWeb3Contract, getWeb3Contract,
getOrDeployWormholeContract,
BaseDeployConfig,
} from "./common"; } from "./common";
interface DeploymentConfig extends BaseDeployConfig { type DeploymentConfig = {
type: DeploymentType; type: DeploymentType;
validTimePeriodSeconds: number; validTimePeriodSeconds: number;
singleUpdateFeeInWei: number; singleUpdateFeeInWei: number;
gasMultiplier: number;
gasPriceMultiplier: number;
privateKey: PrivateKey;
jsonOutputDir: string;
saveContract: boolean; saveContract: boolean;
} };
const CACHE_FILE = ".cache-deploy-evm"; const CACHE_FILE = ".cache-deploy-evm";
@ -47,6 +51,68 @@ const parser = yargs(hideBin(process.argv))
}, },
}); });
async function deployWormholeReceiverContracts(
chain: EvmChain,
config: DeploymentConfig
): Promise<string> {
const receiverSetupAddr = await deployIfNotCached(
CACHE_FILE,
chain,
config,
"ReceiverSetup",
[]
);
const receiverImplAddr = await deployIfNotCached(
CACHE_FILE,
chain,
config,
"ReceiverImplementation",
[]
);
// Craft the init data for the proxy contract
const setupContract = getWeb3Contract(
config.jsonOutputDir,
"ReceiverSetup",
receiverSetupAddr
);
const { wormholeConfig } = getDefaultDeploymentConfig(config.type);
const initData = setupContract.methods
.setup(
receiverImplAddr,
wormholeConfig.initialGuardianSet.map((addr: string) => "0x" + addr),
chain.getWormholeChainId(),
wormholeConfig.governanceChainId,
"0x" + wormholeConfig.governanceContract
)
.encodeABI();
const wormholeReceiverAddr = await deployIfNotCached(
CACHE_FILE,
chain,
config,
"WormholeReceiver",
[receiverSetupAddr, initData]
);
const wormholeEvmContract = new WormholeEvmContract(
chain,
wormholeReceiverAddr
);
if (config.type === "stable") {
console.log(`Syncing mainnet guardian sets for ${chain.getId()}...`);
// TODO: Add a way to pass gas configs to this
await wormholeEvmContract.syncMainnetGuardianSets(config.privateKey);
console.log(`✅ Synced mainnet guardian sets for ${chain.getId()}`);
}
return wormholeReceiverAddr;
}
async function deployPriceFeedContracts( async function deployPriceFeedContracts(
chain: EvmChain, chain: EvmChain,
config: DeploymentConfig, config: DeploymentConfig,
@ -120,16 +186,14 @@ async function main() {
console.log(`Deploying price feed contracts on ${chain.getId()}...`); console.log(`Deploying price feed contracts on ${chain.getId()}...`);
const wormholeContract = await getOrDeployWormholeContract( const wormholeAddr = await deployWormholeReceiverContracts(
chain, chain,
deploymentConfig, deploymentConfig
CACHE_FILE
); );
const priceFeedAddr = await deployPriceFeedContracts( const priceFeedAddr = await deployPriceFeedContracts(
chain, chain,
deploymentConfig, deploymentConfig,
wormholeContract.address wormholeAddr
); );
if (deploymentConfig.saveContract) { if (deploymentConfig.saveContract) {

View File

@ -1,64 +0,0 @@
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import { DefaultStore } from "../src";
function deserializeCommitmentMetadata(data: Buffer) {
const seed = Uint8Array.from(data.subarray(0, 32));
const chainLength = data.readBigInt64LE(32);
return {
seed,
chainLength,
};
}
const parser = yargs(hideBin(process.argv))
.usage("Usage: $0")
.options({
testnet: {
type: "boolean",
default: false,
desc: "Fetch the provider registration data for the testnet contracts.",
},
});
async function main() {
const argv = await parser.argv;
for (const contract of Object.values(DefaultStore.entropy_contracts)) {
if (contract.getChain().isMainnet() === argv.testnet) continue;
let provider;
let providerInfo;
try {
provider = await contract.getDefaultProvider();
providerInfo = await contract.getProviderInfo(provider);
} catch (e) {
console.error(`Error fetching info for ${contract.getId()}`, e);
continue;
}
const commitmentMetadata = providerInfo.commitmentMetadata.replace(
"0x",
""
);
// const binaryData = hexToBytes(commitmentMetadata);
const metadata = deserializeCommitmentMetadata(
Buffer.from(commitmentMetadata, "hex")
);
console.log("=".repeat(100));
console.log(`Fetched info for ${contract.getId()}`);
console.log(`chain : ${contract.getChain().getId()}`);
console.log(`contract : ${contract.address}`);
console.log(`provider : ${provider}`);
console.log(`commitment data : ${commitmentMetadata}`);
console.log(`chainLength : ${metadata.chainLength}`);
console.log(`seed : [${metadata.seed}]`);
console.log(
`original seq no : ${providerInfo.originalCommitmentSequenceNumber}`
);
}
}
main();

View File

@ -1,32 +1,33 @@
import yargs from "yargs"; import yargs from "yargs";
import { hideBin } from "yargs/helpers"; import { hideBin } from "yargs/helpers";
import { toPrivateKey } from "../src"; import { DefaultStore, toPrivateKey } from "../src";
import { import { COMMON_DEPLOY_OPTIONS } from "./common";
COMMON_DEPLOY_OPTIONS,
findEntropyContract,
findEvmChain,
} from "./common";
const parser = yargs(hideBin(process.argv)) const parser = yargs(hideBin(process.argv))
.usage( .usage(
"Requests and reveals a random number from an entropy contract while measuing the\n" + "Requests and reveals a random number from an entropy contract while measuing the\n" +
"latency between request submission and availablity of the provider revelation from fortuna.\n" + "latency between request submission and availablity of the provider revelation from fortuna.\n" +
"Usage: $0 --chain <chain-id> --private-key <private-key>" "Usage: $0 --contract <entropy_contract_id> --private-key <private-key>"
) )
.options({ .options({
chain: { contract: {
type: "string", type: "string",
demandOption: true, demandOption: true,
desc: "test latency for the contract on this chain", desc: "Contract to test latency for",
}, },
"private-key": COMMON_DEPLOY_OPTIONS["private-key"], "private-key": COMMON_DEPLOY_OPTIONS["private-key"],
}); });
async function main() { async function main() {
const argv = await parser.argv; const argv = await parser.argv;
const chain = findEvmChain(argv.chain); const contract = DefaultStore.entropy_contracts[argv.contract];
const contract = findEntropyContract(chain); if (!contract) {
throw new Error(
`Contract ${argv.contract} not found. Contracts found: ${Object.keys(
DefaultStore.entropy_contracts
)}`
);
}
const provider = await contract.getDefaultProvider(); const provider = await contract.getDefaultProvider();
const providerInfo = await contract.getProviderInfo(provider); const providerInfo = await contract.getProviderInfo(provider);
const userRandomNumber = contract.generateUserRandomNumber(); const userRandomNumber = contract.generateUserRandomNumber();

View File

@ -1,118 +0,0 @@
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import {
DefaultStore,
EvmEntropyContract,
PrivateKey,
toPrivateKey,
} from "../src";
import {
COMMON_DEPLOY_OPTIONS,
findEntropyContract,
findEvmChain,
} from "./common";
import Web3 from "web3";
const parser = yargs(hideBin(process.argv))
.usage(
"Requests a random number from an entropy contract and measures the\n" +
"latency between request submission and fulfillment by the Fortuna keeper service.\n" +
"Usage: $0 --private-key <private-key> --chain <chain-id> | --all-chains <testnet|mainnet>"
)
.options({
chain: {
type: "string",
desc: "test latency for the contract on this chain",
conflicts: "all-chains",
},
"all-chains": {
type: "string",
conflicts: "chain",
choices: ["testnet", "mainnet"],
desc: "test latency for all entropy contracts deployed either on mainnet or testnet",
},
"private-key": COMMON_DEPLOY_OPTIONS["private-key"],
});
async function testLatency(
contract: EvmEntropyContract,
privateKey: PrivateKey
) {
const provider = await contract.getDefaultProvider();
const userRandomNumber = contract.generateUserRandomNumber();
const requestResponse = await contract.requestRandomness(
userRandomNumber,
provider,
privateKey,
true // with callback
);
console.log(`Request tx hash : ${requestResponse.transactionHash}`);
// Read the sequence number for the request from the transaction events.
const sequenceNumber =
requestResponse.events.RequestedWithCallback.returnValues.sequenceNumber;
console.log(`sequence : ${sequenceNumber}`);
const startTime = Date.now();
let fromBlock = requestResponse.blockNumber;
const web3 = new Web3(contract.chain.getRpcUrl());
const entropyContract = contract.getContract();
// eslint-disable-next-line no-constant-condition
while (true) {
const currentBlock = await web3.eth.getBlockNumber();
if (fromBlock > currentBlock) {
continue;
}
const events = await entropyContract.getPastEvents("RevealedWithCallback", {
fromBlock: fromBlock,
toBlock: currentBlock,
});
fromBlock = currentBlock + 1;
const event = events.find(
(event) => event.returnValues.request[1] == sequenceNumber
);
if (event !== undefined) {
console.log(`Random number : ${event.returnValues.randomNumber}`);
const endTime = Date.now();
console.log(`Fortuna Latency : ${endTime - startTime}ms`);
console.log(
`Revealed after : ${
currentBlock - requestResponse.blockNumber
} blocks`
);
break;
}
await new Promise((resolve) => setTimeout(resolve, 300));
}
}
async function main() {
const argv = await parser.argv;
if (!argv.chain && !argv["all-chains"]) {
throw new Error("Must specify either --chain or --all-chains");
}
const privateKey = toPrivateKey(argv.privateKey);
if (argv["all-chains"]) {
for (const contract of Object.values(DefaultStore.entropy_contracts)) {
if (
contract.getChain().isMainnet() ===
(argv["all-chains"] === "mainnet")
) {
console.log(`Testing latency for ${contract.getId()}...`);
await testLatency(contract, privateKey);
}
}
} else if (argv.chain) {
const chain = findEvmChain(argv.chain);
const contract = findEntropyContract(chain);
await testLatency(contract, privateKey);
}
}
main();

View File

@ -13,22 +13,15 @@ const parser = yargs(hideBin(process.argv))
}, },
}); });
const KEEPER_ADDRESS = {
mainnet: "0xBcAb779fCa45290288C35F5E231c37F9fA87b130",
testnet: "0xa5A68ed167431Afe739846A22597786ba2da85df",
};
async function main() { async function main() {
const argv = await parser.argv; const argv = await parser.argv;
const entries = []; const entries = [];
const keeperAddress = KEEPER_ADDRESS[argv.testnet ? "testnet" : "mainnet"];
for (const contract of Object.values(DefaultStore.entropy_contracts)) { for (const contract of Object.values(DefaultStore.entropy_contracts)) {
if (contract.getChain().isMainnet() === argv.testnet) continue; if (contract.getChain().isMainnet() === argv.testnet) continue;
try { try {
const provider = await contract.getDefaultProvider(); const provider = await contract.getDefaultProvider();
const w3 = new Web3(contract.getChain().getRpcUrl()); const w3 = new Web3(contract.getChain().getRpcUrl());
const balance = await w3.eth.getBalance(provider); const balance = await w3.eth.getBalance(provider);
const keeperBalance = await w3.eth.getBalance(keeperAddress);
let version = "unknown"; let version = "unknown";
try { try {
version = await contract.getVersion(); version = await contract.getVersion();
@ -41,7 +34,6 @@ async function main() {
contract: contract.address, contract: contract.address,
provider: providerInfo.uri, provider: providerInfo.uri,
balance, balance,
keeperBalance,
seq: providerInfo.sequenceNumber, seq: providerInfo.sequenceNumber,
version, version,
}); });

View File

@ -1,69 +0,0 @@
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import {
CosmWasmPriceFeedContract,
DefaultStore,
EvmPriceFeedContract,
toPrivateKey,
} from "../src";
const parser = yargs(hideBin(process.argv))
.usage("Update the guardian set in stable networks. Usage: $0")
.options({
"private-key": {
type: "string",
demandOption: true,
desc: "Private key to sign the transactions with",
},
chain: {
type: "array",
desc: "Can be one of the chains available in the store",
},
});
async function main() {
const argv = await parser.argv;
const privateKey = toPrivateKey(argv.privateKey);
const chains = argv.chain;
for (const contract of Object.values(DefaultStore.contracts)) {
// We are currently only managing wormhole receiver contracts in EVM and
// CosmWasm and Solana-based networks. The rest of the networks are
// managed by the guardians themselves and they should be the ones updating
// the guardian set.
// TODO: Solana-based receivers have their script in their rust cli. Add
// support for Solana-based networks here once they are added to the
// contract manager.
if (
contract instanceof CosmWasmPriceFeedContract ||
contract instanceof EvmPriceFeedContract
) {
if (chains && !chains.includes(contract.getChain().getId())) {
continue;
}
try {
console.log("------------------------------------");
const wormhole = await contract.getWormholeContract();
// TODO: This is a temporary workaround to skip contracts that are in beta channel
// We should have a better way to handle this
if ((await wormhole.getCurrentGuardianSetIndex()) === 0) {
continue;
}
console.log(
`Current Guardianset for ${contract.getId()}: ${await wormhole.getCurrentGuardianSetIndex()}`
);
await wormhole.syncMainnetGuardianSets(privateKey);
console.log(`Updated Guardianset for ${contract.getId()}`);
} catch (e) {
console.error(`Error updating Guardianset for ${contract.getId()}`, e);
}
}
}
}
main();

View File

@ -9,33 +9,19 @@ import {
makeCacheFunction, makeCacheFunction,
} from "./common"; } from "./common";
const EXECUTOR_CACHE_FILE = ".cache-upgrade-evm-executor-contract"; const CACHE_FILE = ".cache-upgrade-evm-executor-contract";
const ENTROPY_CACHE_FILE = ".cache-upgrade-evm-entropy-contract"; const runIfNotCached = makeCacheFunction(CACHE_FILE);
const parser = yargs(hideBin(process.argv)) const parser = yargs(hideBin(process.argv))
.usage( .usage(
"Deploys a new Upgradeable contract for Executor or Entropy to a set of chains where Entropy is deployed and creates a governance proposal for it.\n" + "Deploys a new ExecutorUpgradeable contract to a set of chains where Entropy is deployed and creates a governance proposal for it.\n" +
`Uses a cache file to avoid deploying contracts twice\n` + `Uses a cache file (${CACHE_FILE}) to avoid deploying contracts twice\n` +
"Usage: $0 --chain <chain_1> --chain <chain_2> --private-key <private_key> --ops-key-path <ops_key_path> --std-output <std_output>" "Usage: $0 --chain <chain_1> --chain <chain_2> --private-key <private_key> --ops-key-path <ops_key_path> --std-output <std_output>"
) )
.options({ .options(COMMON_UPGRADE_OPTIONS);
...COMMON_UPGRADE_OPTIONS,
"contract-type": {
type: "string",
choices: ["executor", "entropy"],
demandOption: true,
},
});
async function main() { async function main() {
const argv = await parser.argv; const argv = await parser.argv;
const cacheFile =
argv["contract-type"] === "executor"
? EXECUTOR_CACHE_FILE
: ENTROPY_CACHE_FILE;
const runIfNotCached = makeCacheFunction(cacheFile);
const selectedChains = getSelectedChains(argv); const selectedChains = getSelectedChains(argv);
const vault = const vault =
@ -43,7 +29,7 @@ async function main() {
"mainnet-beta_FVQyHcooAtThJ83XFrNnv74BcinbRH3bRmfFamAHBfuj" "mainnet-beta_FVQyHcooAtThJ83XFrNnv74BcinbRH3bRmfFamAHBfuj"
]; ];
console.log("Using cache file", cacheFile); console.log("Using cache file", CACHE_FILE);
const payloads: Buffer[] = []; const payloads: Buffer[] = [];
for (const contract of Object.values(DefaultStore.entropy_contracts)) { for (const contract of Object.values(DefaultStore.entropy_contracts)) {
@ -65,11 +51,9 @@ async function main() {
console.log( console.log(
`Deployed contract at ${address} on ${contract.chain.getId()}` `Deployed contract at ${address} on ${contract.chain.getId()}`
); );
const payload = const payload = await contract.generateUpgradeExecutorContractsPayload(
argv["contract-type"] === "executor" address
? await contract.generateUpgradeExecutorContractsPayload(address) );
: await contract.generateUpgradeEntropyContractPayload(address);
console.log(payload.toString("hex")); console.log(payload.toString("hex"));
payloads.push(payload); payloads.push(payload);
} }

View File

@ -408,10 +408,11 @@ export class EvmChain extends Chain {
const GAS_ESTIMATE_MULTIPLIER = 2; const GAS_ESTIMATE_MULTIPLIER = 2;
const gasEstimate = await transactionObject.estimateGas(txParams); const gasEstimate = await transactionObject.estimateGas(txParams);
// Some networks like Filecoin do not support the normal transaction type and need a type 2 transaction. // Some networks like Filecoin do not support the normal transaction type and need a type 2 transaction.
// To send a type 2 transaction, remove the ``gasPrice`` field. // To send a type 2 transaction, remove the ``gasPrice`` field and add the `type` field with the value
// `0x2` to the transaction configuration parameters.
return transactionObject.send({ return transactionObject.send({
gas: gasEstimate * GAS_ESTIMATE_MULTIPLIER, gas: gasEstimate * GAS_ESTIMATE_MULTIPLIER,
gasPrice: Number(await this.getGasPrice()), gasPrice: await this.getGasPrice(),
...txParams, ...txParams,
}); });
} }

View File

@ -17,39 +17,7 @@ type GuardianSet = {
index: { number: string }; index: { number: string };
}; };
export class AptosWormholeContract extends WormholeContract { export class WormholeAptosContract extends WormholeContract {
static type = "AptosWormholeContract";
getId(): string {
return `${this.chain.getId()}_${this.address}`;
}
getType(): string {
return AptosWormholeContract.type;
}
toJson() {
return {
chain: this.chain.getId(),
address: this.address,
type: AptosWormholeContract.type,
};
}
static fromJson(
chain: Chain,
parsed: {
type: string;
address: string;
}
): AptosWormholeContract {
if (parsed.type !== AptosWormholeContract.type)
throw new Error("Invalid type");
if (!(chain instanceof AptosChain))
throw new Error(`Wrong chain type ${chain}`);
return new AptosWormholeContract(chain, parsed.address);
}
constructor(public chain: AptosChain, public address: string) { constructor(public chain: AptosChain, public address: string) {
super(); super();
} }
@ -156,8 +124,8 @@ export class AptosPriceFeedContract extends PriceFeedContract {
return this.chain.sendTransaction(senderPrivateKey, txPayload); return this.chain.sendTransaction(senderPrivateKey, txPayload);
} }
public getWormholeContract(): AptosWormholeContract { public getWormholeContract(): WormholeAptosContract {
return new AptosWormholeContract(this.chain, this.wormholeStateId); return new WormholeAptosContract(this.chain, this.wormholeStateId);
} }
async executeUpdatePriceFeed( async executeUpdatePriceFeed(

View File

@ -38,36 +38,7 @@ export interface DeploymentConfig {
fee: { amount: string; denom: string }; fee: { amount: string; denom: string };
} }
export class CosmWasmWormholeContract extends WormholeContract { export class WormholeCosmWasmContract extends WormholeContract {
static type = "CosmWasmWormholeContract";
getId(): string {
return `${this.chain.getId()}_${this.address}`;
}
getType(): string {
return CosmWasmWormholeContract.type;
}
toJson() {
return {
chain: this.chain.getId(),
address: this.address,
type: CosmWasmWormholeContract.type,
};
}
static fromJson(
chain: Chain,
parsed: { type: string; address: string }
): CosmWasmWormholeContract {
if (parsed.type !== CosmWasmWormholeContract.type)
throw new Error("Invalid type");
if (!(chain instanceof CosmWasmChain))
throw new Error(`Wrong chain type ${chain}`);
return new CosmWasmWormholeContract(chain, parsed.address);
}
constructor(public chain: CosmWasmChain, public address: string) { constructor(public chain: CosmWasmChain, public address: string) {
super(); super();
} }
@ -240,9 +211,7 @@ export class CosmWasmPriceFeedContract extends PriceFeedContract {
})) as Record<string, string>; })) as Record<string, string>;
const config = { const config = {
config_v1: JSON.parse(allStates["\x00\tconfig_v1"]), config_v1: JSON.parse(allStates["\x00\tconfig_v1"]),
contract_version: allStates["\x00\x10contract_version"] contract_version: JSON.parse(allStates["\x00\x10contract_version"]),
? JSON.parse(allStates["\x00\x10contract_version"])
: undefined,
}; };
return config; return config;
} }
@ -339,10 +308,10 @@ export class CosmWasmPriceFeedContract extends PriceFeedContract {
return { id: result.txHash, info: result }; return { id: result.txHash, info: result };
} }
async getWormholeContract(): Promise<CosmWasmWormholeContract> { async getWormholeContract(): Promise<WormholeCosmWasmContract> {
const config = await this.getConfig(); const config = await this.getConfig();
const wormholeAddress = config.config_v1.wormhole_contract; const wormholeAddress = config.config_v1.wormhole_contract;
return new CosmWasmWormholeContract(this.chain, wormholeAddress); return new WormholeCosmWasmContract(this.chain, wormholeAddress);
} }
async getUpdateFee(msgs: string[]): Promise<Coin> { async getUpdateFee(msgs: string[]): Promise<Coin> {

View File

@ -62,19 +62,6 @@ const EXTENDED_ENTROPY_ABI = [
stateMutability: "pure", stateMutability: "pure",
type: "function", type: "function",
}, },
{
inputs: [
{
internalType: "address",
name: "newImplementation",
type: "address",
},
],
name: "upgradeTo",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
...EntropyAbi, ...EntropyAbi,
] as any; // eslint-disable-line @typescript-eslint/no-explicit-any ] as any; // eslint-disable-line @typescript-eslint/no-explicit-any
const EXTENDED_PYTH_ABI = [ const EXTENDED_PYTH_ABI = [
@ -367,60 +354,7 @@ const EXECUTOR_ABI = [
type: "function", type: "function",
}, },
] as any; // eslint-disable-line @typescript-eslint/no-explicit-any ] as any; // eslint-disable-line @typescript-eslint/no-explicit-any
export class WormholeEvmContract extends WormholeContract {
/**
* Returns the keccak256 digest of the contract bytecode at the given address after replacing
* any occurrences of the contract addr in the bytecode with 0.The bytecode stores the deployment
* address as an immutable variable. This behavior is inherited from OpenZeppelin's implementation
* of UUPSUpgradeable contract. You can read more about verification with immutable variables here:
* https://docs.sourcify.dev/docs/immutables/
* This function can be used to verify that the contract code is the same on all chains and matches
* with the deployedCode property generated by truffle builds
*/
export async function getCodeDigestWithoutAddress(
rpcUrl: string,
address: string
): Promise<string> {
const web3 = new Web3(rpcUrl);
const code = await web3.eth.getCode(address);
const strippedCode = code.replaceAll(
address.toLowerCase().replace("0x", ""),
"0000000000000000000000000000000000000000"
);
return Web3.utils.keccak256(strippedCode);
}
export class EvmWormholeContract extends WormholeContract {
static type = "EvmWormholeContract";
getId(): string {
return `${this.chain.getId()}_${this.address}`;
}
getChain(): EvmChain {
return this.chain;
}
getType(): string {
return EvmWormholeContract.type;
}
async getVersion(): Promise<string> {
const contract = this.getContract();
return contract.methods.version().call();
}
static fromJson(
chain: Chain,
parsed: { type: string; address: string }
): EvmWormholeContract {
if (parsed.type !== EvmWormholeContract.type)
throw new Error("Invalid type");
if (!(chain instanceof EvmChain))
throw new Error(`Wrong chain type ${chain}`);
return new EvmWormholeContract(chain, parsed.address);
}
constructor(public chain: EvmChain, public address: string) { constructor(public chain: EvmChain, public address: string) {
super(); super();
} }
@ -466,14 +400,6 @@ export class EvmWormholeContract extends WormholeContract {
); );
return { id: result.transactionHash, info: result }; return { id: result.transactionHash, info: result };
} }
toJson() {
return {
chain: this.chain.getId(),
address: this.address,
type: EvmWormholeContract.type,
};
}
} }
interface EntropyProviderInfo { interface EntropyProviderInfo {
@ -554,18 +480,6 @@ export class EvmEntropyContract extends Storable {
return this.generateExecutorPayload(newOwner, this.address, data); return this.generateExecutorPayload(newOwner, this.address, data);
} }
async generateUpgradeEntropyContractPayload(
newImplementation: string
): Promise<Buffer> {
const contract = this.getContract();
const data = contract.methods.upgradeTo(newImplementation).encodeABI();
return this.generateExecutorPayload(
await this.getOwner(),
this.address,
data
);
}
// Generates a payload to upgrade the executor contract, the owner of entropy contracts // Generates a payload to upgrade the executor contract, the owner of entropy contracts
async generateUpgradeExecutorContractsPayload( async generateUpgradeExecutorContractsPayload(
newImplementation: string newImplementation: string
@ -630,30 +544,19 @@ export class EvmEntropyContract extends Storable {
async requestRandomness( async requestRandomness(
userRandomNumber: string, userRandomNumber: string,
provider: string, provider: string,
senderPrivateKey: PrivateKey, senderPrivateKey: PrivateKey
withCallback?: boolean
) { ) {
const web3 = new Web3(this.chain.getRpcUrl()); const web3 = new Web3(this.chain.getRpcUrl());
const userCommitment = web3.utils.keccak256(userRandomNumber); const userCommitment = web3.utils.keccak256(userRandomNumber);
const contract = new web3.eth.Contract(EXTENDED_ENTROPY_ABI, this.address); const contract = new web3.eth.Contract(EXTENDED_ENTROPY_ABI, this.address);
const fee = await contract.methods.getFee(provider).call(); const fee = await contract.methods.getFee(provider).call();
const { address } = web3.eth.accounts.wallet.add(senderPrivateKey); const { address } = web3.eth.accounts.wallet.add(senderPrivateKey);
let transactionObject;
if (withCallback) {
transactionObject = contract.methods.requestWithCallback(
provider,
userCommitment
);
} else {
const useBlockHash = false; const useBlockHash = false;
transactionObject = contract.methods.request( const transactionObject = contract.methods.request(
provider, provider,
userCommitment, userCommitment,
useBlockHash useBlockHash
); );
}
return this.chain.estiamteAndSendTransaction(transactionObject, { return this.chain.estiamteAndSendTransaction(transactionObject, {
from: address, from: address,
value: fee, value: fee,
@ -689,13 +592,13 @@ export class EvmExecutorContract {
return `${this.chain.getId()}_${this.address}`; return `${this.chain.getId()}_${this.address}`;
} }
async getWormholeContract(): Promise<EvmWormholeContract> { async getWormholeContract(): Promise<WormholeEvmContract> {
const web3 = new Web3(this.chain.getRpcUrl()); const web3 = new Web3(this.chain.getRpcUrl());
//Unfortunately, there is no public method to get the wormhole address //Unfortunately, there is no public method to get the wormhole address
//Found 251 by using `forge build --extra-output storageLayout` and finding the slot for the wormhole variable. //Found 251 by using `forge build --extra-output storageLayout` and finding the slot for the wormhole variable.
let address = await web3.eth.getStorageAt(this.address, 251); let address = await web3.eth.getStorageAt(this.address, 251);
address = "0x" + address.slice(26); address = "0x" + address.slice(26);
return new EvmWormholeContract(this.chain, address); return new WormholeEvmContract(this.chain, address);
} }
getContract() { getContract() {
@ -805,10 +708,21 @@ export class EvmPriceFeedContract extends PriceFeedContract {
} }
/** /**
* Returns the keccak256 digest of the contract bytecode * Returns the keccak256 digest of the contract bytecode after replacing any occurrences of the contract addr in
* the bytecode with 0.The bytecode stores the deployment address as an immutable variable.
* This behavior is inherited from OpenZeppelin's implementation of UUPSUpgradeable contract.
* You can read more about verification with immutable variables here:
* https://docs.sourcify.dev/docs/immutables/
* This function can be used to verify that the contract code is the same on all chains and matches
* with the deployedCode property generated by truffle builds
*/ */
async getCodeDigestWithoutAddress(): Promise<string> { async getCodeDigestWithoutAddress(): Promise<string> {
return getCodeDigestWithoutAddress(this.chain.getRpcUrl(), this.address); const code = await this.getCode();
const strippedCode = code.replaceAll(
this.address.toLowerCase().replace("0x", ""),
"0000000000000000000000000000000000000000"
);
return Web3.utils.keccak256(strippedCode);
} }
async getTotalFee(): Promise<TokenQty> { async getTotalFee(): Promise<TokenQty> {
@ -860,10 +774,10 @@ export class EvmPriceFeedContract extends PriceFeedContract {
/** /**
* Returns the wormhole contract which is being used for VAA verification * Returns the wormhole contract which is being used for VAA verification
*/ */
async getWormholeContract(): Promise<EvmWormholeContract> { async getWormholeContract(): Promise<WormholeEvmContract> {
const pythContract = this.getContract(); const pythContract = this.getContract();
const address = await pythContract.methods.wormhole().call(); const address = await pythContract.methods.wormhole().call();
return new EvmWormholeContract(this.chain, address); return new WormholeEvmContract(this.chain, address);
} }
async getBaseUpdateFee() { async getBaseUpdateFee() {

View File

@ -1,6 +1,6 @@
import { PrivateKey, Storable, TxResult } from "../base"; import { PrivateKey, TxResult } from "../base";
export abstract class WormholeContract extends Storable { export abstract class WormholeContract {
abstract getCurrentGuardianSetIndex(): Promise<number>; abstract getCurrentGuardianSetIndex(): Promise<number>;
/** /**
@ -33,7 +33,6 @@ export abstract class WormholeContract extends Storable {
"010000000001007ac31b282c2aeeeb37f3385ee0de5f8e421d30b9e5ae8ba3d4375c1c77a86e77159bb697d9c456d6f8c02d22a94b1279b65b0d6a9957e7d3857423845ac758e300610ac1d2000000030001000000000000000000000000000000000000000000000000000000000000000400000000000005390000000000000000000000000000000000000000000000000000000000436f7265020000000000011358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cdeb5f7389fa26941519f0863349c223b73a6ddee774a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d", "010000000001007ac31b282c2aeeeb37f3385ee0de5f8e421d30b9e5ae8ba3d4375c1c77a86e77159bb697d9c456d6f8c02d22a94b1279b65b0d6a9957e7d3857423845ac758e300610ac1d2000000030001000000000000000000000000000000000000000000000000000000000000000400000000000005390000000000000000000000000000000000000000000000000000000000436f7265020000000000011358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cdeb5f7389fa26941519f0863349c223b73a6ddee774a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d",
"01000000010d0012e6b39c6da90c5dfd3c228edbb78c7a4c97c488ff8a346d161a91db067e51d638c17216f368aa9bdf4836b8645a98018ca67d2fec87d769cabfdf2406bf790a0002ef42b288091a670ef3556596f4f47323717882881eaf38e03345078d07a156f312b785b64dae6e9a87e3d32872f59cb1931f728cecf511762981baf48303668f0103cef2616b84c4e511ff03329e0853f1bd7ee9ac5ba71d70a4d76108bddf94f69c2a8a84e4ee94065e8003c334e899184943634e12043d0dda78d93996da073d190104e76d166b9dac98f602107cc4b44ac82868faf00b63df7d24f177aa391e050902413b71046434e67c770b19aecdf7fce1d1435ea0be7262e3e4c18f50ddc8175c0105d9450e8216d741e0206a50f93b750a47e0a258b80eb8fed1314cc300b3d905092de25cd36d366097b7103ae2d184121329ba3aa2d7c6cc53273f11af14798110010687477c8deec89d36a23e7948feb074df95362fc8dcbd8ae910ac556a1dee1e755c56b9db5d710c940938ed79bc1895a3646523a58bc55f475a23435a373ecfdd0107fb06734864f79def4e192497362513171530daea81f07fbb9f698afe7e66c6d44db21323144f2657d4a5386a954bb94eef9f64148c33aef6e477eafa2c5c984c01088769e82216310d1827d9bd48645ec23e90de4ef8a8de99e2d351d1df318608566248d80cdc83bdcac382b3c30c670352be87f9069aab5037d0b747208eae9c650109e9796497ff9106d0d1c62e184d83716282870cef61a1ee13d6fc485b521adcce255c96f7d1bca8d8e7e7d454b65783a830bddc9d94092091a268d311ecd84c26010c468c9fb6d41026841ff9f8d7368fa309d4dbea3ea4bbd2feccf94a92cc8a20a226338a8e2126cd16f70eaf15b4fc9be2c3fa19def14e071956a605e9d1ac4162010e23fcb6bd445b7c25afb722250c1acbc061ed964ba9de1326609ae012acdfb96942b2a102a2de99ab96327859a34a2b49a767dbdb62e0a1fb26af60fe44fd496a00106bb0bac77ac68b347645f2fb1ad789ea9bd76fb9b2324f25ae06f97e65246f142df717f662e73948317182c62ce87d79c73def0dba12e5242dfc038382812cfe00126da03c5e56cb15aeeceadc1e17a45753ab4dc0ec7bf6a75ca03143ed4a294f6f61bc3f478a457833e43084ecd7c985bf2f55a55f168aac0e030fc49e845e497101626e9d9a5d9e343f00010000000000000000000000000000000000000000000000000000000000000004c1759167c43f501c2000000000000000000000000000000000000000000000000000000000436f7265020000000000021358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd66b9590e1c41e0b226937bf9217d1d67fd4e91f574a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d", "01000000010d0012e6b39c6da90c5dfd3c228edbb78c7a4c97c488ff8a346d161a91db067e51d638c17216f368aa9bdf4836b8645a98018ca67d2fec87d769cabfdf2406bf790a0002ef42b288091a670ef3556596f4f47323717882881eaf38e03345078d07a156f312b785b64dae6e9a87e3d32872f59cb1931f728cecf511762981baf48303668f0103cef2616b84c4e511ff03329e0853f1bd7ee9ac5ba71d70a4d76108bddf94f69c2a8a84e4ee94065e8003c334e899184943634e12043d0dda78d93996da073d190104e76d166b9dac98f602107cc4b44ac82868faf00b63df7d24f177aa391e050902413b71046434e67c770b19aecdf7fce1d1435ea0be7262e3e4c18f50ddc8175c0105d9450e8216d741e0206a50f93b750a47e0a258b80eb8fed1314cc300b3d905092de25cd36d366097b7103ae2d184121329ba3aa2d7c6cc53273f11af14798110010687477c8deec89d36a23e7948feb074df95362fc8dcbd8ae910ac556a1dee1e755c56b9db5d710c940938ed79bc1895a3646523a58bc55f475a23435a373ecfdd0107fb06734864f79def4e192497362513171530daea81f07fbb9f698afe7e66c6d44db21323144f2657d4a5386a954bb94eef9f64148c33aef6e477eafa2c5c984c01088769e82216310d1827d9bd48645ec23e90de4ef8a8de99e2d351d1df318608566248d80cdc83bdcac382b3c30c670352be87f9069aab5037d0b747208eae9c650109e9796497ff9106d0d1c62e184d83716282870cef61a1ee13d6fc485b521adcce255c96f7d1bca8d8e7e7d454b65783a830bddc9d94092091a268d311ecd84c26010c468c9fb6d41026841ff9f8d7368fa309d4dbea3ea4bbd2feccf94a92cc8a20a226338a8e2126cd16f70eaf15b4fc9be2c3fa19def14e071956a605e9d1ac4162010e23fcb6bd445b7c25afb722250c1acbc061ed964ba9de1326609ae012acdfb96942b2a102a2de99ab96327859a34a2b49a767dbdb62e0a1fb26af60fe44fd496a00106bb0bac77ac68b347645f2fb1ad789ea9bd76fb9b2324f25ae06f97e65246f142df717f662e73948317182c62ce87d79c73def0dba12e5242dfc038382812cfe00126da03c5e56cb15aeeceadc1e17a45753ab4dc0ec7bf6a75ca03143ed4a294f6f61bc3f478a457833e43084ecd7c985bf2f55a55f168aac0e030fc49e845e497101626e9d9a5d9e343f00010000000000000000000000000000000000000000000000000000000000000004c1759167c43f501c2000000000000000000000000000000000000000000000000000000000436f7265020000000000021358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd66b9590e1c41e0b226937bf9217d1d67fd4e91f574a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d",
"01000000020d00ce45474d9e1b1e7790a2d210871e195db53a70ffd6f237cfe70e2686a32859ac43c84a332267a8ef66f59719cf91cc8df0101fd7c36aa1878d5139241660edc0010375cc906156ae530786661c0cd9aef444747bc3d8d5aa84cac6a6d2933d4e1a031cffa30383d4af8131e929d9f203f460b07309a647d6cd32ab1cc7724089392c000452305156cfc90343128f97e499311b5cae174f488ff22fbc09591991a0a73d8e6af3afb8a5968441d3ab8437836407481739e9850ad5c95e6acfcc871e951bc30105a7956eefc23e7c945a1966d5ddbe9e4be376c2f54e45e3d5da88c2f8692510c7429b1ea860ae94d929bd97e84923a18187e777aa3db419813a80deb84cc8d22b00061b2a4f3d2666608e0aa96737689e3ba5793810ff3a52ff28ad57d8efb20967735dc5537a2e43ef10f583d144c12a1606542c207f5b79af08c38656d3ac40713301086b62c8e130af3411b3c0d91b5b50dcb01ed5f293963f901fc36e7b0e50114dce203373b32eb45971cef8288e5d928d0ed51cd86e2a3006b0af6a65c396c009080009e93ab4d2c8228901a5f4525934000b2c26d1dc679a05e47fdf0ff3231d98fbc207103159ff4116df2832eea69b38275283434e6cd4a4af04d25fa7a82990b707010aa643f4cf615dfff06ffd65830f7f6cf6512dabc3690d5d9e210fdc712842dc2708b8b2c22e224c99280cd25e5e8bfb40e3d1c55b8c41774e287c1e2c352aecfc010b89c1e85faa20a30601964ccc6a79c0ae53cfd26fb10863db37783428cd91390a163346558239db3cd9d420cfe423a0df84c84399790e2e308011b4b63e6b8015010ca31dcb564ac81a053a268d8090e72097f94f366711d0c5d13815af1ec7d47e662e2d1bde22678113d15963da100b668ba26c0c325970d07114b83c5698f46097010dc9fda39c0d592d9ed92cd22b5425cc6b37430e236f02d0d1f8a2ef45a00bde26223c0a6eb363c8b25fd3bf57234a1d9364976cefb8360e755a267cbbb674b39501108db01e444ab1003dd8b6c96f8eb77958b40ba7a85fefecf32ad00b7a47c0ae7524216262495977e09c0989dd50f280c21453d3756843608eacd17f4fdfe47600001261025228ef5af837cb060bcd986fcfa84ccef75b3fa100468cfd24e7fadf99163938f3b841a33496c2706d0208faab088bd155b2e20fd74c625bb1cc8c43677a0163c53c409e0c5dfa000100000000000000000000000000000000000000000000000000000000000000046c5a054d7833d1e42000000000000000000000000000000000000000000000000000000000436f7265020000000000031358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd15e7caf07c4e3dc8e7c469f92c8cd88fb8005a2074a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d", "01000000020d00ce45474d9e1b1e7790a2d210871e195db53a70ffd6f237cfe70e2686a32859ac43c84a332267a8ef66f59719cf91cc8df0101fd7c36aa1878d5139241660edc0010375cc906156ae530786661c0cd9aef444747bc3d8d5aa84cac6a6d2933d4e1a031cffa30383d4af8131e929d9f203f460b07309a647d6cd32ab1cc7724089392c000452305156cfc90343128f97e499311b5cae174f488ff22fbc09591991a0a73d8e6af3afb8a5968441d3ab8437836407481739e9850ad5c95e6acfcc871e951bc30105a7956eefc23e7c945a1966d5ddbe9e4be376c2f54e45e3d5da88c2f8692510c7429b1ea860ae94d929bd97e84923a18187e777aa3db419813a80deb84cc8d22b00061b2a4f3d2666608e0aa96737689e3ba5793810ff3a52ff28ad57d8efb20967735dc5537a2e43ef10f583d144c12a1606542c207f5b79af08c38656d3ac40713301086b62c8e130af3411b3c0d91b5b50dcb01ed5f293963f901fc36e7b0e50114dce203373b32eb45971cef8288e5d928d0ed51cd86e2a3006b0af6a65c396c009080009e93ab4d2c8228901a5f4525934000b2c26d1dc679a05e47fdf0ff3231d98fbc207103159ff4116df2832eea69b38275283434e6cd4a4af04d25fa7a82990b707010aa643f4cf615dfff06ffd65830f7f6cf6512dabc3690d5d9e210fdc712842dc2708b8b2c22e224c99280cd25e5e8bfb40e3d1c55b8c41774e287c1e2c352aecfc010b89c1e85faa20a30601964ccc6a79c0ae53cfd26fb10863db37783428cd91390a163346558239db3cd9d420cfe423a0df84c84399790e2e308011b4b63e6b8015010ca31dcb564ac81a053a268d8090e72097f94f366711d0c5d13815af1ec7d47e662e2d1bde22678113d15963da100b668ba26c0c325970d07114b83c5698f46097010dc9fda39c0d592d9ed92cd22b5425cc6b37430e236f02d0d1f8a2ef45a00bde26223c0a6eb363c8b25fd3bf57234a1d9364976cefb8360e755a267cbbb674b39501108db01e444ab1003dd8b6c96f8eb77958b40ba7a85fefecf32ad00b7a47c0ae7524216262495977e09c0989dd50f280c21453d3756843608eacd17f4fdfe47600001261025228ef5af837cb060bcd986fcfa84ccef75b3fa100468cfd24e7fadf99163938f3b841a33496c2706d0208faab088bd155b2e20fd74c625bb1cc8c43677a0163c53c409e0c5dfa000100000000000000000000000000000000000000000000000000000000000000046c5a054d7833d1e42000000000000000000000000000000000000000000000000000000000436f7265020000000000031358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd15e7caf07c4e3dc8e7c469f92c8cd88fb8005a2074a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d",
"01000000030d03d4a37a6ff4361d91714730831e9d49785f61624c8f348a9c6c1d82bc1d98cadc5e936338204445c6250bb4928f3f3e165ad47ca03a5d63111168a2de4576856301049a5df10464ea4e1961589fd30fc18d1970a7a2ffaad617e56a0f7777f25275253af7d10a0f0f2494dc6e99fc80e444ab9ebbbee252ded2d5dcb50cbf7a54bb5a01055f4603b553b9ba9e224f9c55c7bca3da00abb10abd19e0081aecd3b352be061a70f79f5f388ebe5190838ef3cd13a2f22459c9a94206883b739c90b40d5d74640006a8fade3997f650a36e46bceb1f609edff201ab32362266f166c5c7da713f6a19590c20b68ed3f0119cb24813c727560ede086b3d610c2d7a1efa66f655bad90900080f5e495a75ea52241c59d145c616bfac01e57182ad8d784cbcc9862ed3afb60c0983ccbc690553961ffcf115a0c917367daada8e60be2cbb8b8008bac6341a8c010935ab11e0eea28b87a1edc5ccce3f1fac25f75b5f640fe6b0673a7cd74513c9dc01c544216cf364cc9993b09fda612e0cd1ced9c00fb668b872a16a64ebb55d27010ab2bc39617a2396e7defa24cd7c22f42dc31f3c42ffcd9d1472b02df8468a4d0563911e8fb6a4b5b0ce0bd505daa53779b08ff660967b31f246126ed7f6f29a7e000bdb6d3fd7b33bdc9ac3992916eb4aacb97e7e21d19649e7fa28d2dd6e337937e4274516a96c13ac7a8895da9f91948ea3a09c25f44b982c62ce8842b58e20c8a9000d3d1b19c8bb000856b6610b9d28abde6c35cb7705c6ca5db711f7be96d60eed9d72cfa402a6bfe8bf0496dbc7af35796fc768da51a067b95941b3712dce8ae1e7010ec80085033157fd1a5628fc0c56267469a86f0e5a66d7dede1ad4ce74ecc3dff95b60307a39c3bfbeedc915075070da30d0395def9635130584f709b3885e1bdc0010fc480eb9ee715a2d151b23722b48b42581d7f4001fc1696c75425040bfc1ffc5394fe418adb2b64bd3dc692efda4cc408163677dbe233b16bcdabb853a20843301118ee9e115e1a0c981f19d0772b850e666591322da742a9a12cce9f52a5665bd474abdd59c580016bee8aae67fdf39b315be2528d12eec3a652910e03cc4c6fa3801129d0d1e2e429e969918ec163d16a7a5b2c6729aa44af5dccad07d25d19891556a79b574f42d9adbd9e2a9ae5a6b8750331d2fccb328dd94c3bf8791ee1bfe85aa00661e99781981faea00010000000000000000000000000000000000000000000000000000000000000004fd4c6c55ec8dfd342000000000000000000000000000000000000000000000000000000000436f726502000000000004135893b5a76c3f739645648885bdccc06cd70a3cd3ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd15e7caf07c4e3dc8e7c469f92c8cd88fb8005a2074a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d",
]; ];
const currentIndex = await this.getCurrentGuardianSetIndex(); const currentIndex = await this.getCurrentGuardianSetIndex();
for (let i = currentIndex; i < MAINNET_UPGRADE_VAAS.length; i++) { for (let i = currentIndex; i < MAINNET_UPGRADE_VAAS.length; i++) {

View File

@ -8,9 +8,9 @@ repl.evalCode(
"import { loadHotWallet, Vault } from './src/governance';" + "import { loadHotWallet, Vault } from './src/governance';" +
"import { SuiChain, CosmWasmChain, AptosChain, EvmChain } from './src/chains';" + "import { SuiChain, CosmWasmChain, AptosChain, EvmChain } from './src/chains';" +
"import { SuiPriceFeedContract } from './src/contracts/sui';" + "import { SuiPriceFeedContract } from './src/contracts/sui';" +
"import { CosmWasmWormholeContract, CosmWasmPriceFeedContract } from './src/contracts/cosmwasm';" + "import { WormholeCosmWasmContract, CosmWasmPriceFeedContract } from './src/contracts/cosmwasm';" +
"import { EvmWormholeContract, EvmPriceFeedContract } from './src/contracts/evm';" + "import { WormholeEvmContract, EvmPriceFeedContract } from './src/contracts/evm';" +
"import { AptosWormholeContract, AptosPriceFeedContract } from './src/contracts/aptos';" + "import { WormholeAptosContract, AptosPriceFeedContract } from './src/contracts/aptos';" +
"import { DefaultStore } from './src/store';" + "import { DefaultStore } from './src/store';" +
"import { toPrivateKey } from './src/base';" + "import { toPrivateKey } from './src/base';" +
"DefaultStore" "DefaultStore"

View File

@ -8,14 +8,10 @@ import {
} from "./chains"; } from "./chains";
import { import {
AptosPriceFeedContract, AptosPriceFeedContract,
AptosWormholeContract,
CosmWasmPriceFeedContract, CosmWasmPriceFeedContract,
CosmWasmWormholeContract,
EvmEntropyContract, EvmEntropyContract,
EvmPriceFeedContract, EvmPriceFeedContract,
EvmWormholeContract,
SuiPriceFeedContract, SuiPriceFeedContract,
WormholeContract,
} from "./contracts"; } from "./contracts";
import { Token } from "./token"; import { Token } from "./token";
import { PriceFeedContract, Storable } from "./base"; import { PriceFeedContract, Storable } from "./base";
@ -27,7 +23,6 @@ export class Store {
public chains: Record<string, Chain> = { global: new GlobalChain() }; public chains: Record<string, Chain> = { global: new GlobalChain() };
public contracts: Record<string, PriceFeedContract> = {}; public contracts: Record<string, PriceFeedContract> = {};
public entropy_contracts: Record<string, EvmEntropyContract> = {}; public entropy_contracts: Record<string, EvmEntropyContract> = {};
public wormhole_contracts: Record<string, WormholeContract> = {};
public tokens: Record<string, Token> = {}; public tokens: Record<string, Token> = {};
public vaults: Record<string, Vault> = {}; public vaults: Record<string, Vault> = {};
@ -86,7 +81,6 @@ export class Store {
const contractsByType: Record<string, Storable[]> = {}; const contractsByType: Record<string, Storable[]> = {};
const contracts: Storable[] = Object.values(this.contracts); const contracts: Storable[] = Object.values(this.contracts);
contracts.push(...Object.values(this.entropy_contracts)); contracts.push(...Object.values(this.entropy_contracts));
contracts.push(...Object.values(this.wormhole_contracts));
for (const contract of contracts) { for (const contract of contracts) {
if (!contractsByType[contract.getType()]) { if (!contractsByType[contract.getType()]) {
contractsByType[contract.getType()] = []; contractsByType[contract.getType()] = [];
@ -120,13 +114,10 @@ export class Store {
loadAllContracts() { loadAllContracts() {
const allContractClasses = { const allContractClasses = {
[CosmWasmPriceFeedContract.type]: CosmWasmPriceFeedContract, [CosmWasmPriceFeedContract.type]: CosmWasmPriceFeedContract,
[CosmWasmWormholeContract.type]: CosmWasmWormholeContract,
[SuiPriceFeedContract.type]: SuiPriceFeedContract, [SuiPriceFeedContract.type]: SuiPriceFeedContract,
[EvmPriceFeedContract.type]: EvmPriceFeedContract, [EvmPriceFeedContract.type]: EvmPriceFeedContract,
[AptosPriceFeedContract.type]: AptosPriceFeedContract, [AptosPriceFeedContract.type]: AptosPriceFeedContract,
[AptosWormholeContract.type]: AptosWormholeContract,
[EvmEntropyContract.type]: EvmEntropyContract, [EvmEntropyContract.type]: EvmEntropyContract,
[EvmWormholeContract.type]: EvmWormholeContract,
}; };
this.getYamlFiles(`${this.path}/contracts/`).forEach((yamlFile) => { this.getYamlFiles(`${this.path}/contracts/`).forEach((yamlFile) => {
const parsedArray = parse(readFileSync(yamlFile, "utf-8")); const parsedArray = parse(readFileSync(yamlFile, "utf-8"));
@ -141,16 +132,13 @@ export class Store {
); );
if ( if (
this.contracts[chainContract.getId()] || this.contracts[chainContract.getId()] ||
this.entropy_contracts[chainContract.getId()] || this.entropy_contracts[chainContract.getId()]
this.wormhole_contracts[chainContract.getId()]
) )
throw new Error( throw new Error(
`Multiple contracts with id ${chainContract.getId()} found` `Multiple contracts with id ${chainContract.getId()} found`
); );
if (chainContract instanceof EvmEntropyContract) { if (chainContract instanceof EvmEntropyContract) {
this.entropy_contracts[chainContract.getId()] = chainContract; this.entropy_contracts[chainContract.getId()] = chainContract;
} else if (chainContract instanceof WormholeContract) {
this.wormhole_contracts[chainContract.getId()] = chainContract;
} else { } else {
this.contracts[chainContract.getId()] = chainContract; this.contracts[chainContract.getId()] = chainContract;
} }

View File

@ -74,11 +74,3 @@
prefix: rol prefix: rol
feeDenom: urax feeDenom: urax
type: CosmWasmChain type: CosmWasmChain
- endpoint: https://testnet-burnt-rpc.lavenderfive.com
id: xion_testnet
wormholeChainName: xion_testnet
mainnet: false
gasPrice: "0.025"
prefix: xion
feeDenom: uxion
type: CosmWasmChain

View File

@ -39,6 +39,11 @@
rpcUrl: https://evm-t3.cronos.org rpcUrl: https://evm-t3.cronos.org
networkId: 338 networkId: 338
type: EvmChain type: EvmChain
- id: zksync_goerli
mainnet: false
rpcUrl: https://zksync2-testnet.zksync.dev
networkId: 280
type: EvmChain
- id: canto_testnet - id: canto_testnet
mainnet: false mainnet: false
rpcUrl: https://canto-testnet.plexnode.wtf rpcUrl: https://canto-testnet.plexnode.wtf
@ -46,7 +51,7 @@
type: EvmChain type: EvmChain
- id: polygon_zkevm_testnet - id: polygon_zkevm_testnet
mainnet: false mainnet: false
rpcUrl: https://rpc.public.zkevm-test.net rpcUrl: https://rpc.public.zkevm-test.net/
networkId: 1442 networkId: 1442
type: EvmChain type: EvmChain
- id: polygon_blackberry - id: polygon_blackberry
@ -77,7 +82,7 @@
type: EvmChain type: EvmChain
- id: neon - id: neon
mainnet: true mainnet: true
rpcUrl: https://neon-evm.drpc.org rpcUrl: https://neon-proxy-mainnet.solana.p2p.org
networkId: 245022934 networkId: 245022934
type: EvmChain type: EvmChain
- id: fantom - id: fantom
@ -169,6 +174,11 @@
rpcUrl: https://evm.confluxrpc.org rpcUrl: https://evm.confluxrpc.org
networkId: 1030 networkId: 1030
type: EvmChain type: EvmChain
- id: optimism_goerli
mainnet: false
rpcUrl: https://rpc.ankr.com/optimism_testnet
networkId: 420
type: EvmChain
- id: celo - id: celo
mainnet: true mainnet: true
rpcUrl: https://forno.celo.org rpcUrl: https://forno.celo.org
@ -279,7 +289,7 @@
type: EvmChain type: EvmChain
- id: horizen_eon - id: horizen_eon
mainnet: true mainnet: true
rpcUrl: https://rpc.ankr.com/horizen_eon rpcUrl: https://eon-rpc.horizenlabs.io/ethv1
networkId: 7332 networkId: 7332
type: EvmChain type: EvmChain
- id: horizen_gobi - id: horizen_gobi
@ -317,11 +327,6 @@
rpcUrl: https://goerli.boba.network rpcUrl: https://goerli.boba.network
networkId: 2888 networkId: 2888
type: EvmChain type: EvmChain
- id: boba_sepolia
mainnet: false
rpcUrl: https://sepolia.boba.network
networkId: 28882
type: EvmChain
- id: manta - id: manta
mainnet: true mainnet: true
rpcUrl: https://pacific-rpc.manta.network/http rpcUrl: https://pacific-rpc.manta.network/http
@ -329,7 +334,7 @@
type: EvmChain type: EvmChain
- id: manta_testnet - id: manta_testnet
mainnet: false mainnet: false
rpcUrl: https://manta-pacific-testnet.drpc.org rpcUrl: https://pacific-rpc.testnet.manta.network/http
networkId: 3441005 networkId: 3441005
type: EvmChain type: EvmChain
- id: manta_sepolia - id: manta_sepolia
@ -372,11 +377,6 @@
rpcUrl: https://rpc.zkatana.gelato.digital rpcUrl: https://rpc.zkatana.gelato.digital
networkId: 1261120 networkId: 1261120
type: EvmChain type: EvmChain
- id: astar_zkyoto_testnet
mainnet: false
rpcUrl: https://rpc.startale.com/zkyoto
networkId: 6038361
type: EvmChain
- id: astar_zkevm - id: astar_zkevm
mainnet: true mainnet: true
rpcUrl: https://rpc.startale.com/astar-zkevm rpcUrl: https://rpc.startale.com/astar-zkevm
@ -393,14 +393,14 @@
networkId: 1116 networkId: 1116
type: EvmChain type: EvmChain
nativeToken: CORE nativeToken: CORE
- id: viction - id: tomochain
mainnet: true mainnet: true
rpcUrl: https://viction.blockpi.network/v1/rpc/public rpcUrl: https://rpc.tomochain.com
networkId: 88 networkId: 88
type: EvmChain type: EvmChain
- id: viction_testnet - id: tomochain_testnet
mainnet: false mainnet: false
rpcUrl: https://rpc-testnet.viction.xyz rpcUrl: https://rpc.testnet.tomochain.com
networkId: 89 networkId: 89
type: EvmChain type: EvmChain
- id: mode_testnet - id: mode_testnet
@ -450,7 +450,7 @@
type: EvmChain type: EvmChain
- id: blast_s2_testnet - id: blast_s2_testnet
mainnet: false mainnet: false
rpcUrl: https://sepolia.blast.io rpcUrl: https://rpc.s2.testblast.io/$ENV_BLAST_S2_TESTNET_API_KEY
networkId: 168587773 networkId: 168587773
type: EvmChain type: EvmChain
- id: hedera_testnet - id: hedera_testnet
@ -486,7 +486,7 @@
type: EvmChain type: EvmChain
- id: sei_evm_devnet - id: sei_evm_devnet
mainnet: false mainnet: false
rpcUrl: https://evm-rpc-arctic-1.sei-apis.com rpcUrl: https://evm-devnet.seinetwork.io
networkId: 713715 networkId: 713715
type: EvmChain type: EvmChain
- id: fantom_sonic_testnet - id: fantom_sonic_testnet
@ -502,7 +502,7 @@
- id: idex_xchain_testnet - id: idex_xchain_testnet
mainnet: false mainnet: false
rpcUrl: https://xchain-testnet-rpc.idex.io rpcUrl: https://xchain-testnet-rpc.idex.io
networkId: 64002 networkId: 671276500
type: EvmChain type: EvmChain
- id: injective_inevm_testnet - id: injective_inevm_testnet
mainnet: false mainnet: false
@ -549,28 +549,3 @@
rpcUrl: https://rpc-polynomial-network-testnet-x0tryg8u1c.t.conduit.xyz rpcUrl: https://rpc-polynomial-network-testnet-x0tryg8u1c.t.conduit.xyz
networkId: 80008 networkId: 80008
type: EvmChain type: EvmChain
- id: morph_testnet
mainnet: false
rpcUrl: https://rpc-testnet.morphl2.io
networkId: 2710
type: EvmChain
- id: iota
mainnet: true
rpcUrl: https://json-rpc.evm.iotaledger.net
networkId: 8822
type: EvmChain
- id: flow_previewnet
mainnet: true
rpcUrl: https://previewnet.evm.nodes.onflow.org
networkId: 646
type: EvmChain
- id: olive_testnet
mainnet: false
rpcUrl: https://olive-network-testnet.rpc.caldera.xyz/http
networkId: 8101902
type: EvmChain
- id: taiko_hekla
mainnet: false
rpcUrl: https://rpc.hekla.taiko.xyz/
networkId: 167009
type: EvmChain

View File

@ -1,9 +0,0 @@
- chain: aptos_mainnet
address: "0x5bc11445584a763c1fa7ed39081f1b920954da14e04b32440cba863d03e19625"
type: AptosWormholeContract
- chain: aptos_testnet
address: "0x5bc11445584a763c1fa7ed39081f1b920954da14e04b32440cba863d03e19625"
type: AptosWormholeContract
- chain: movement_move_devnet
address: "0x9236893d6444b208b7e0b3e8d4be4ace90b6d17817ab7d1584e46a33ef5c50c9"
type: AptosWormholeContract

View File

@ -43,6 +43,3 @@
- chain: rol_testnet - chain: rol_testnet
address: rol1pvrwmjuusn9wh34j7y520g8gumuy9xtl3gvprlljfdpwju3x7ucszdyfs8 address: rol1pvrwmjuusn9wh34j7y520g8gumuy9xtl3gvprlljfdpwju3x7ucszdyfs8
type: CosmWasmPriceFeedContract type: CosmWasmPriceFeedContract
- chain: xion_testnet
address: xion1w39ctwxxhxxc2kxarycjxj9rndn65gf8daek7ggarwh3rq3zl0lqqllnmt
type: CosmWasmPriceFeedContract

View File

@ -1,48 +0,0 @@
- chain: rol_testnet
address: rol17p9rzwnnfxcjp32un9ug7yhhzgtkhvl9jfksztgw5uh69wac2pgss2u902
type: CosmWasmWormholeContract
- chain: osmosis
address: osmo1t7qham5kle36rs28se2xd7cckm9mpwzgt65t40lrdf8fcq3837qqjvw80s
type: CosmWasmWormholeContract
- chain: sei_testnet_atlantic_2
address: sei14utt2wp7hamd2qmuz0e5yj728y4u08cm7etujxkc6qprnrla3uwq95jz86
type: CosmWasmWormholeContract
- chain: juno_testnet
address: juno1h7m0xwgu4qh0nrthahpydxzw7klvyd5w8d7jjl675p944ds7jr4sf3ta4l
type: CosmWasmWormholeContract
- chain: sei_testnet_atlantic_2
address: sei1cn8ygrvqk03p5zce3c6rrst7j97qarm33d23rxgme7rzmasddfusw7cpxw
type: CosmWasmWormholeContract
- chain: neutron_testnet_pion_1
address: neutron1nxs2ajn4ejrggfuvqczfx4txghrendcpy3526avg2tsngjktedtspgla8t
type: CosmWasmWormholeContract
- chain: neutron_testnet_pion_1
address: neutron1wtuuak4yt4vyhtv7gt4xnv0m8zfakad5lnz6r7dx8alyydu0sgns67kmvy
type: CosmWasmWormholeContract
- chain: juno_testnet
address: juno1g9xhl5jzhlm6lqc2earxkzyazwl2cshr5cnemxtjy0le64s4w22skukkxj
type: CosmWasmWormholeContract
- chain: osmosis_testnet_5
address: osmo19ah8ak7rgmds40te22xnz7zsdmx5twjulv3sypqm79skkl2ajm4skuhwmf
type: CosmWasmWormholeContract
- chain: sei_pacific_1
address: sei12qq3cufehhsaprjfjrwpx5ltyr43lcrxvf6eaqf0p4jsjpc7semq8p6ewa
type: CosmWasmWormholeContract
- chain: injective_testnet
address: inj1hglkee95shfsl5xxky26hdqxj0mqp54lh7xm59
type: CosmWasmWormholeContract
- chain: neutron
address: neutron178ruq7gf6gk3uus5n8xztj5tsrt5xwxfelw88mc9egfw5d99ktksnk5rsh
type: CosmWasmWormholeContract
- chain: osmosis_testnet_5
address: osmo1llum0y8zc4h2f0rhcdn63xje4mrkdljrve9l40lun9lpeyu2l7cq4phaw6
type: CosmWasmWormholeContract
- chain: injective_testnet
address: inj17sy3vx5dfeva9wx33d09yqdwruntpccnjyw0hj
type: CosmWasmWormholeContract
- chain: injective
address: inj17p9rzwnnfxcjp32un9ug7yhhzgtkhvl9l2q74d
type: CosmWasmWormholeContract
- chain: xion_testnet
address: xion14ycw3tx0hpz3aawmzm6cufs6hx94d64ht5qawd0ej9ug9j2ffzsqmpecys
type: CosmWasmWormholeContract

View File

@ -19,6 +19,9 @@
- chain: blast_s2_testnet - chain: blast_s2_testnet
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603" address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmEntropyContract type: EvmEntropyContract
- chain: sei_evm_devnet
address: "0x6E3A2a644eeDCf6007d3c7d85F0094Cc1B25B2AE"
type: EvmEntropyContract
- chain: lightlink_phoenix - chain: lightlink_phoenix
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603" address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmEntropyContract type: EvmEntropyContract
@ -58,9 +61,3 @@
- chain: base - chain: base
address: "0x6E7D74FA7d5c90FEF9F0512987605a6d546181Bb" address: "0x6E7D74FA7d5c90FEF9F0512987605a6d546181Bb"
type: EvmEntropyContract type: EvmEntropyContract
- chain: sei_evm_devnet
address: "0x23f0e8FAeE7bbb405E7A7C3d60138FCfd43d7509"
type: EvmEntropyContract
- chain: taiko_hekla
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmEntropyContract

View File

@ -97,7 +97,7 @@
- chain: coredao - chain: coredao
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: viction - chain: tomochain
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: arbitrum_sepolia - chain: arbitrum_sepolia
@ -142,6 +142,9 @@
- chain: meter_testnet - chain: meter_testnet
address: "0x5a71C07a0588074443545eE0c08fb0375564c3E4" address: "0x5a71C07a0588074443545eE0c08fb0375564c3E4"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: optimism_goerli
address: "0xDd24F84d36BF92C65F92307595335bdFab5Bbd21"
type: EvmPriceFeedContract
- chain: shimmer_testnet - chain: shimmer_testnet
address: "0x8D254a21b3C86D32F7179855531CE99164721933" address: "0x8D254a21b3C86D32F7179855531CE99164721933"
type: EvmPriceFeedContract type: EvmPriceFeedContract
@ -166,6 +169,9 @@
- chain: coredao_testnet - chain: coredao_testnet
address: "0x8D254a21b3C86D32F7179855531CE99164721933" address: "0x8D254a21b3C86D32F7179855531CE99164721933"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: tomochain_testnet
address: "0x5D289Ad1CE59fCC25b6892e7A303dfFf3a9f7167"
type: EvmPriceFeedContract
- chain: cronos_testnet - chain: cronos_testnet
address: "0x36825bf3Fbdf5a29E2d5148bfe7Dcf7B5639e320" address: "0x36825bf3Fbdf5a29E2d5148bfe7Dcf7B5639e320"
type: EvmPriceFeedContract type: EvmPriceFeedContract
@ -196,6 +202,9 @@
- chain: neon_devnet - chain: neon_devnet
address: "0x0708325268dF9F66270F1401206434524814508b" address: "0x0708325268dF9F66270F1401206434524814508b"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: zksync_goerli
address: "0x8739d5024B5143278E2b15Bd9e7C26f6CEc658F1"
type: EvmPriceFeedContract
- chain: optimism_sepolia - chain: optimism_sepolia
address: "0x0708325268dF9F66270F1401206434524814508b" address: "0x0708325268dF9F66270F1401206434524814508b"
type: EvmPriceFeedContract type: EvmPriceFeedContract
@ -239,7 +248,7 @@
address: "0x2880aB155794e7179c9eE2e38200202908C17B43" address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: sei_evm_devnet - chain: sei_evm_devnet
address: "0xe9d69CdD6Fe41e7B621B4A688C5D1a68cB5c8ADc" address: "0x23f0e8FAeE7bbb405E7A7C3d60138FCfd43d7509"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: lightlink_pegasus_testnet - chain: lightlink_pegasus_testnet
address: "0x5D289Ad1CE59fCC25b6892e7A303dfFf3a9f7167" address: "0x5D289Ad1CE59fCC25b6892e7A303dfFf3a9f7167"
@ -254,7 +263,7 @@
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: idex_xchain_testnet - chain: idex_xchain_testnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: injective_inevm_testnet - chain: injective_inevm_testnet
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
@ -310,15 +319,3 @@
- chain: linea_sepolia - chain: linea_sepolia
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729" address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract type: EvmPriceFeedContract
- chain: morph_testnet
address: "0xA2aa501b19aff244D90cc15a4Cf739D2725B5729"
type: EvmPriceFeedContract
- chain: flow_previewnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmPriceFeedContract
- chain: taiko_hekla
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmPriceFeedContract
- chain: olive_testnet
address: "0x41c9e39574F40Ad34c79f1C99B66A45eFB830d4c"
type: EvmPriceFeedContract

View File

@ -1,303 +0,0 @@
- chain: polygon
address: "0x35a58BeeE77a2Ad547FcDed7e8CB1c6e19746b13"
type: EvmWormholeContract
- chain: aurora
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: fantom
address: "0x35a58BeeE77a2Ad547FcDed7e8CB1c6e19746b13"
type: EvmWormholeContract
- chain: optimism
address: "0x87047526937246727E4869C5f76A347160e08672"
type: EvmWormholeContract
- chain: arbitrum
address: "0xEbe57e8045F2F230872523bbff7374986E45C486"
type: EvmWormholeContract
- chain: gnosis
address: "0x26DD80569a8B23768A1d80869Ed7339e07595E85"
type: EvmWormholeContract
- chain: polygon_zkevm
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: conflux_espace
address: "0xDd24F84d36BF92C65F92307595335bdFab5Bbd21"
type: EvmWormholeContract
- chain: bsc
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: kava
address: "0x0708325268dF9F66270F1401206434524814508b"
type: EvmWormholeContract
- chain: avalanche
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: canto
address: "0xf0a1b566B55e0A0CB5BeF52Eb2a57142617Bee67"
type: EvmWormholeContract
- chain: linea
address: "0x0708325268dF9F66270F1401206434524814508b"
type: EvmWormholeContract
- chain: neon
address: "0xCd76c50c3210C5AaA9c39D53A4f95BFd8b1a3a19"
type: EvmWormholeContract
- chain: mantle
address: "0xf0a1b566B55e0A0CB5BeF52Eb2a57142617Bee67"
type: EvmWormholeContract
- chain: meter
address: "0xfA133831D350A2A5997d6db182B6Ca9e8ad4191B"
type: EvmWormholeContract
- chain: kcc
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: eos
address: "0xEbe57e8045F2F230872523bbff7374986E45C486"
type: EvmWormholeContract
- chain: celo
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: wemix
address: "0xEbe57e8045F2F230872523bbff7374986E45C486"
type: EvmWormholeContract
- chain: base
address: "0x87047526937246727E4869C5f76A347160e08672"
type: EvmWormholeContract
- chain: zksync
address: "0x53cD6960888cA09361506678adfE267b4CE81A08"
type: EvmWormholeContract
- chain: horizen_eon
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: shimmer
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: boba
address: "0x26DD80569a8B23768A1d80869Ed7339e07595E85"
type: EvmWormholeContract
- chain: manta
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: scroll
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: chiliz
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: coredao
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: viction
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: arbitrum_sepolia
address: "0xfA25E653b44586dBbe27eE9d252192F0e4956683"
type: EvmWormholeContract
- chain: fuji
address: "0x5744Cbf430D99456a0A8771208b674F27f8EF0Fb"
type: EvmWormholeContract
- chain: canto_testnet
address: "0x41c9e39574F40Ad34c79f1C99B66A45eFB830d4c"
type: EvmWormholeContract
- chain: aurora_testnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmWormholeContract
- chain: chiado
address: "0x87047526937246727E4869C5f76A347160e08672"
type: EvmWormholeContract
- chain: kava_testnet
address: "0xD458261E832415CFd3BAE5E416FdF3230ce6F134"
type: EvmWormholeContract
- chain: conflux_espace_testnet
address: "0xEbe57e8045F2F230872523bbff7374986E45C486"
type: EvmWormholeContract
- chain: celo_alfajores_testnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmWormholeContract
- chain: bsc_testnet
address: "0xe9d69CdD6Fe41e7B621B4A688C5D1a68cB5c8ADc"
type: EvmWormholeContract
- chain: kcc_testnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmWormholeContract
- chain: eos_testnet
address: "0x8D254a21b3C86D32F7179855531CE99164721933"
type: EvmWormholeContract
- chain: meter_testnet
address: "0x257c3B61102442C1c3286Efbd24242322d002920"
type: EvmWormholeContract
- chain: shimmer_testnet
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmWormholeContract
- chain: scroll_sepolia
address: "0x36825bf3Fbdf5a29E2d5148bfe7Dcf7B5639e320"
type: EvmWormholeContract
- chain: boba_goerli
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmWormholeContract
- chain: manta_testnet
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmWormholeContract
- chain: chiliz_spicy
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmWormholeContract
- chain: coredao_testnet
address: "0x98046Bd286715D3B0BC227Dd7a956b83D8978603"
type: EvmWormholeContract
- chain: cronos_testnet
address: "0x74f09cb3c7e2A01865f424FD14F6dc9A14E3e94E"
type: EvmWormholeContract
- chain: wemix_testnet
address: "0x41c9e39574F40Ad34c79f1C99B66A45eFB830d4c"
type: EvmWormholeContract
- chain: evmos_testnet
address: "0x2880aB155794e7179c9eE2e38200202908C17B43"
type: EvmWormholeContract
- chain: zetachain_testnet
address: "0x8D254a21b3C86D32F7179855531CE99164721933"
type: EvmWormholeContract
- chain: neon_devnet
address: "0x23f0e8FAeE7bbb405E7A7C3d60138FCfd43d7509"
type: EvmWormholeContract
- chain: optimism_sepolia
address: "0x8D254a21b3C86D32F7179855531CE99164721933"
type: EvmWormholeContract
- chain: mode
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: mode_testnet
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: bttc_testnet
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: bttc
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: zksync_sepolia
address: "0xc10F5BE78E464BB0E1f534D66E5A6ecaB150aEFa"
type: EvmWormholeContract
- chain: base_sepolia
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: movement_evm_devnet
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: zkfair_testnet
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: blast_s2_testnet
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: zkfair
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: filecoin_calibration
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: filecoin
address: "0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a"
type: EvmWormholeContract
- chain: zetachain
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: sei_evm_devnet
address: "0x66E9cBa5529824a03B5Bc9931d9c63637101D0F7"
type: EvmWormholeContract
- chain: lightlink_pegasus_testnet
address: "0x5f3c61944CEb01B3eAef861251Fb1E0f14b848fb"
type: EvmWormholeContract
- chain: fantom_sonic_testnet
address: "0x74f09cb3c7e2A01865f424FD14F6dc9A14E3e94E"
type: EvmWormholeContract
- chain: dela_deperp_testnet
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: lightlink_phoenix
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: injective_inevm_testnet
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: injective_inevm
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: hedera_testnet
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: hedera
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: berachain_testnet
address: "0x74f09cb3c7e2A01865f424FD14F6dc9A14E3e94E"
type: EvmWormholeContract
- chain: blast
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: astar_zkevm
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: merlin_testnet
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: mantle_sepolia
address: "0x66E9cBa5529824a03B5Bc9931d9c63637101D0F7"
type: EvmWormholeContract
- chain: merlin
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: manta_sepolia
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: polygon_blackberry
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: arbitrum_blueberry
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: optimism_celestia_raspberry
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: polynomial_testnet
address: "0x87047526937246727E4869C5f76A347160e08672"
type: EvmWormholeContract
- chain: parallel_testnet
address: "0x87047526937246727E4869C5f76A347160e08672"
type: EvmWormholeContract
- chain: parallel
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: linea_sepolia
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: morph_testnet
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: cronos
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: ronin
address: "0x41955476936DdA8d0fA98b8d1778172F7E4fCcA1"
type: EvmWormholeContract
- chain: saigon
address: "0x36825bf3Fbdf5a29E2d5148bfe7Dcf7B5639e320"
type: EvmWormholeContract
- chain: ethereum
address: "0x74f09cb3c7e2A01865f424FD14F6dc9A14E3e94E"
type: EvmWormholeContract
- chain: mumbai
address: "0x876A4e56A51386aBb1a5ab5d62f77E814372f0C7"
type: EvmWormholeContract
- chain: fantom_testnet
address: "0xe9d69CdD6Fe41e7B621B4A688C5D1a68cB5c8ADc"
type: EvmWormholeContract
- chain: sepolia
address: "0x41c9e39574F40Ad34c79f1C99B66A45eFB830d4c"
type: EvmWormholeContract
- chain: linea_goerli
address: "0xfA25E653b44586dBbe27eE9d252192F0e4956683"
type: EvmWormholeContract
- chain: taiko_hekla
address: "0xb27e5ca259702f209a29225d0eDdC131039C9933"
type: EvmWormholeContract
- chain: olive_testnet
address: "0x74f09cb3c7e2A01865f424FD14F6dc9A14E3e94E"
type: EvmWormholeContract

View File

@ -1,6 +1,6 @@
{ {
"name": "@pythnetwork/express-relay-evm-js", "name": "@pythnetwork/express-relay-evm-js",
"version": "0.4.1", "version": "0.1.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@pythnetwork/express-relay-evm-js", "name": "@pythnetwork/express-relay-evm-js",
"version": "0.4.1", "version": "0.2.0",
"description": "Utilities for interacting with the express relay protocol", "description": "Utilities for interacting with the express relay protocol",
"homepage": "https://github.com/pyth-network/pyth-crosschain/tree/main/express_relay/sdk/js", "homepage": "https://github.com/pyth-network/pyth-crosschain/tree/main/express_relay/sdk/js",
"author": "Douro Labs", "author": "Douro Labs",
@ -42,11 +42,10 @@
"ws": "^8.16.0" "ws": "^8.16.0"
}, },
"devDependencies": { "devDependencies": {
"@pythnetwork/pyth-evm-js": "*",
"@types/node": "^20.12.7",
"@types/yargs": "^17.0.10", "@types/yargs": "^17.0.10",
"@typescript-eslint/eslint-plugin": "^5.21.0", "@typescript-eslint/eslint-plugin": "^5.21.0",
"@typescript-eslint/parser": "^5.21.0", "@typescript-eslint/parser": "^5.21.0",
"@pythnetwork/pyth-evm-js": "*",
"eslint": "^8.56.0", "eslint": "^8.56.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"prettier": "^2.6.2", "prettier": "^2.6.2",

View File

@ -23,17 +23,10 @@ class SimpleSearcher {
} }
async bidStatusHandler(bidStatus: BidStatusUpdate) { async bidStatusHandler(bidStatus: BidStatusUpdate) {
let resultDetails = "";
if (bidStatus.type == "submitted") {
resultDetails = `, transaction ${bidStatus.result}, index ${bidStatus.index} of multicall`;
} else if (bidStatus.type == "lost") {
resultDetails = `, transaction ${bidStatus.result}`;
}
console.log( console.log(
`Bid status for bid ${bidStatus.id}: ${bidStatus.type.replaceAll( `Bid status for bid ${bidStatus.id}: ${bidStatus.status} ${
"_", bidStatus.status == "submitted" ? bidStatus.result : ""
" " }`
)}${resultDetails}`
); );
} }

View File

@ -2,8 +2,15 @@ import type { components, paths } from "./serverTypes";
import createClient, { import createClient, {
ClientOptions as FetchClientOptions, ClientOptions as FetchClientOptions,
} from "openapi-fetch"; } from "openapi-fetch";
import { Address, Hex, isAddress, isHex } from "viem"; import {
import { privateKeyToAccount, signTypedData } from "viem/accounts"; Address,
encodeAbiParameters,
Hex,
isAddress,
isHex,
keccak256,
} from "viem";
import { privateKeyToAccount, sign, signatureToHex } from "viem/accounts";
import WebSocket from "isomorphic-ws"; import WebSocket from "isomorphic-ws";
import { import {
Bid, Bid,
@ -11,7 +18,6 @@ import {
BidParams, BidParams,
BidStatusUpdate, BidStatusUpdate,
Opportunity, Opportunity,
EIP712Domain,
OpportunityBid, OpportunityBid,
OpportunityParams, OpportunityParams,
TokenAmount, TokenAmount,
@ -130,17 +136,6 @@ export class Client {
}); });
} }
private convertEIP712Domain(
eip712Domain: components["schemas"]["EIP712Domain"]
): EIP712Domain {
return {
name: eip712Domain.name,
version: eip712Domain.version,
verifyingContract: checkAddress(eip712Domain.verifying_contract),
chainId: BigInt(eip712Domain.chain_id),
};
}
/** /**
* Converts an opportunity from the server to the client format * Converts an opportunity from the server to the client format
* Returns undefined if the opportunity version is not supported * Returns undefined if the opportunity version is not supported
@ -164,7 +159,6 @@ export class Client {
targetCallValue: BigInt(opportunity.target_call_value), targetCallValue: BigInt(opportunity.target_call_value),
sellTokens: opportunity.sell_tokens.map(checkTokenQty), sellTokens: opportunity.sell_tokens.map(checkTokenQty),
buyTokens: opportunity.buy_tokens.map(checkTokenQty), buyTokens: opportunity.buy_tokens.map(checkTokenQty),
eip712Domain: this.convertEIP712Domain(opportunity.eip_712_domain),
}; };
} }
@ -299,49 +293,62 @@ export class Client {
bidParams: BidParams, bidParams: BidParams,
privateKey: Hex privateKey: Hex
): Promise<OpportunityBid> { ): Promise<OpportunityBid> {
const types = {
ExecutionParams: [
{ name: "sellTokens", type: "TokenAmount[]" },
{ name: "buyTokens", type: "TokenAmount[]" },
{ name: "executor", type: "address" },
{ name: "targetContract", type: "address" },
{ name: "targetCalldata", type: "bytes" },
{ name: "targetCallValue", type: "uint256" },
{ name: "validUntil", type: "uint256" },
{ name: "bidAmount", type: "uint256" },
],
TokenAmount: [
{ name: "token", type: "address" },
{ name: "amount", type: "uint256" },
],
};
const account = privateKeyToAccount(privateKey); const account = privateKeyToAccount(privateKey);
const signature = await signTypedData({ const convertTokenQty = ({ token, amount }: TokenAmount): [Hex, bigint] => [
privateKey, token,
domain: { amount,
...opportunity.eip712Domain, ];
chainId: Number(opportunity.eip712Domain.chainId), const payload = encodeAbiParameters(
[
{
name: "repayTokens",
type: "tuple[]",
components: [
{
type: "address",
}, },
types, {
primaryType: "ExecutionParams", type: "uint256",
message: {
sellTokens: opportunity.sellTokens,
buyTokens: opportunity.buyTokens,
executor: account.address,
targetContract: opportunity.targetContract,
targetCalldata: opportunity.targetCalldata,
targetCallValue: opportunity.targetCallValue,
validUntil: bidParams.validUntil,
bidAmount: bidParams.amount,
}, },
}); ],
},
{
name: "receiptTokens",
type: "tuple[]",
components: [
{
type: "address",
},
{
type: "uint256",
},
],
},
{ name: "contract", type: "address" },
{ name: "calldata", type: "bytes" },
{ name: "value", type: "uint256" },
{ name: "bid", type: "uint256" },
{ name: "validUntil", type: "uint256" },
],
[
opportunity.sellTokens.map(convertTokenQty),
opportunity.buyTokens.map(convertTokenQty),
opportunity.targetContract,
opportunity.targetCalldata,
opportunity.targetCallValue,
bidParams.amount,
bidParams.validUntil,
]
);
const msgHash = keccak256(payload);
const hash = signatureToHex(await sign({ hash: msgHash, privateKey }));
return { return {
permissionKey: opportunity.permissionKey, permissionKey: opportunity.permissionKey,
bid: bidParams, bid: bidParams,
executor: account.address, executor: account.address,
signature, signature: hash,
opportunityId: opportunity.opportunityId, opportunityId: opportunity.opportunityId,
}; };
} }

View File

@ -10,7 +10,7 @@ export interface paths {
* @description Bid on a specific permission key for a specific chain. * @description Bid on a specific permission key for a specific chain.
* *
* Your bid will be simulated and verified by the server. Depending on the outcome of the auction, a transaction * Your bid will be simulated and verified by the server. Depending on the outcome of the auction, a transaction
* containing the contract call will be sent to the blockchain expecting the bid amount to be paid after the call. * containing the targetContract call will be sent to the blockchain expecting the bid amount to be paid after the call.
*/ */
post: operations["bid"]; post: operations["bid"];
}; };
@ -58,7 +58,7 @@ export interface components {
amount: string; amount: string;
/** /**
* @description The chain id to bid on. * @description The chain id to bid on.
* @example op_sepolia * @example sepolia
*/ */
chain_id: string; chain_id: string;
/** /**
@ -67,12 +67,12 @@ export interface components {
*/ */
permission_key: string; permission_key: string;
/** /**
* @description Calldata for the contract call. * @description Calldata for the targetContract call.
* @example 0xdeadbeef * @example 0xdeadbeef
*/ */
target_calldata: string; target_calldata: string;
/** /**
* @description The contract address to call. * @description The targetContract address to call.
* @example 0xcA11bde05977b3631167028862bE2a173976CA11 * @example 0xcA11bde05977b3631167028862bE2a173976CA11
*/ */
target_contract: string; target_contract: string;
@ -88,28 +88,20 @@ export interface components {
BidStatus: BidStatus:
| { | {
/** @enum {string} */ /** @enum {string} */
type: "pending"; status: "pending";
}
| {
/** @enum {string} */
type: "simulation_failed";
} }
| { | {
/** /**
* Format: int32 * @description The bid won the auction and was submitted to the chain in a transaction with the given hash
* @example 1 * @example 0x103d4fbd777a36311b5161f2062490f761f25b67406badb2bace62bb170aa4e3
*/ */
index: number;
/** @example 0x103d4fbd777a36311b5161f2062490f761f25b67406badb2bace62bb170aa4e3 */
result: string; result: string;
/** @enum {string} */ /** @enum {string} */
type: "submitted"; status: "submitted";
} }
| { | {
/** @example 0x103d4fbd777a36311b5161f2062490f761f25b67406badb2bace62bb170aa4e3 */
result: string;
/** @enum {string} */ /** @enum {string} */
type: "lost"; status: "lost";
}; };
BidStatusWithId: { BidStatusWithId: {
bid_status: components["schemas"]["BidStatus"]; bid_status: components["schemas"]["BidStatus"];
@ -148,28 +140,6 @@ export interface components {
ClientRequest: components["schemas"]["ClientMessage"] & { ClientRequest: components["schemas"]["ClientMessage"] & {
id: string; id: string;
}; };
EIP712Domain: {
/**
* @description The network chain id parameter for EIP712 domain.
* @example 31337
*/
chain_id: string;
/**
* @description The name parameter for the EIP712 domain.
* @example OpportunityAdapter
*/
name: string;
/**
* @description The verifying contract address parameter for the EIP712 domain.
* @example 0xcA11bde05977b3631167028862bE2a173976CA11
*/
verifying_contract: string;
/**
* @description The version parameter for the EIP712 domain.
* @example 1
*/
version: string;
};
ErrorBodyResponse: { ErrorBodyResponse: {
error: string; error: string;
}; };
@ -192,7 +162,7 @@ export interface components {
/** @example 0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef12 */ /** @example 0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef12 */
signature: string; signature: string;
/** /**
* @description The latest unix timestamp in seconds until which the bid is valid * @description How long the bid will be valid for.
* @example 1000000000000000000 * @example 1000000000000000000
*/ */
valid_until: string; valid_until: string;
@ -204,14 +174,14 @@ export interface components {
/** /**
* @description Opportunity parameters needed for on-chain execution * @description Opportunity parameters needed for on-chain execution
* If a searcher signs the opportunity and have approved enough tokens to opportunity adapter, * If a searcher signs the opportunity and have approved enough tokens to opportunity adapter,
* by calling this target contract with the given target calldata and structures, they will * by calling this target targetContract with the given target targetCalldata and structures, they will
* send the tokens specified in the sell_tokens field and receive the tokens specified in the buy_tokens field. * send the tokens specified in the sell_tokens field and receive the tokens specified in the buy_tokens field.
*/ */
OpportunityParamsV1: { OpportunityParamsV1: {
buy_tokens: components["schemas"]["TokenAmount"][]; buy_tokens: components["schemas"]["TokenAmount"][];
/** /**
* @description The chain id where the opportunity will be executed. * @description The chain id where the opportunity will be executed.
* @example op_sepolia * @example sepolia
*/ */
chain_id: string; chain_id: string;
/** /**
@ -221,17 +191,17 @@ export interface components {
permission_key: string; permission_key: string;
sell_tokens: components["schemas"]["TokenAmount"][]; sell_tokens: components["schemas"]["TokenAmount"][];
/** /**
* @description The value to send with the contract call. * @description The targetCallValue to send with the targetContract call.
* @example 1 * @example 1
*/ */
target_call_value: string; target_call_value: string;
/** /**
* @description Calldata for the target contract call. * @description Calldata for the target targetContract call.
* @example 0xdeadbeef * @example 0xdeadbeef
*/ */
target_calldata: string; target_calldata: string;
/** /**
* @description The contract address to call for execution of the opportunity. * @description The targetContract address to call for execution of the opportunity.
* @example 0xcA11bde05977b3631167028862bE2a173976CA11 * @example 0xcA11bde05977b3631167028862bE2a173976CA11
*/ */
target_contract: string; target_contract: string;
@ -242,11 +212,11 @@ export interface components {
version: "v1"; version: "v1";
}) & { }) & {
/** /**
* @description Creation time of the opportunity (in microseconds since the Unix epoch) * Format: int64
* @example 1700000000000000 * @description Creation time of the opportunity
* @example 1700000000
*/ */
creation_time: number; creation_time: number;
eip_712_domain: components["schemas"]["EIP712Domain"];
/** /**
* @description The opportunity unique id * @description The opportunity unique id
* @example obo3ee3e-58cc-4372-a567-0e02b2c3d479 * @example obo3ee3e-58cc-4372-a567-0e02b2c3d479
@ -290,7 +260,7 @@ export interface components {
*/ */
amount: string; amount: string;
/** /**
* @description Token contract address * @description Token targetContract address
* @example 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2 * @example 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2
*/ */
token: string; token: string;
@ -325,11 +295,11 @@ export interface components {
version: "v1"; version: "v1";
}) & { }) & {
/** /**
* @description Creation time of the opportunity (in microseconds since the Unix epoch) * Format: int64
* @example 1700000000000000 * @description Creation time of the opportunity
* @example 1700000000
*/ */
creation_time: number; creation_time: number;
eip_712_domain: components["schemas"]["EIP712Domain"];
/** /**
* @description The opportunity unique id * @description The opportunity unique id
* @example obo3ee3e-58cc-4372-a567-0e02b2c3d479 * @example obo3ee3e-58cc-4372-a567-0e02b2c3d479
@ -355,7 +325,7 @@ export interface operations {
* @description Bid on a specific permission key for a specific chain. * @description Bid on a specific permission key for a specific chain.
* *
* Your bid will be simulated and verified by the server. Depending on the outcome of the auction, a transaction * Your bid will be simulated and verified by the server. Depending on the outcome of the auction, a transaction
* containing the contract call will be sent to the blockchain expecting the bid amount to be paid after the call. * containing the targetContract call will be sent to the blockchain expecting the bid amount to be paid after the call.
*/ */
bid: { bid: {
requestBody: { requestBody: {
@ -413,7 +383,7 @@ export interface operations {
get_opportunities: { get_opportunities: {
parameters: { parameters: {
query?: { query?: {
/** @example op_sepolia */ /** @example sepolia */
chain_id?: string | null; chain_id?: string | null;
}; };
}; };

View File

@ -23,27 +23,6 @@ export type BidParams = {
*/ */
validUntil: bigint; validUntil: bigint;
}; };
/**
* Represents the configuration for signing an opportunity
*/
export type EIP712Domain = {
/**
* The network chain id for the EIP712 domain.
*/
chainId: bigint;
/**
* The verifying contract address for the EIP712 domain.
*/
verifyingContract: Address;
/**
* The name parameter for the EIP712 domain.
*/
name: string;
/**
* The version parameter for the EIP712 domain.
*/
version: string;
};
/** /**
* Represents a valid opportunity ready to be executed * Represents a valid opportunity ready to be executed
*/ */
@ -81,18 +60,11 @@ export type Opportunity = {
* Tokens to receive after the opportunity is executed * Tokens to receive after the opportunity is executed
*/ */
buyTokens: TokenAmount[]; buyTokens: TokenAmount[];
/**
* The data required to sign the opportunity
*/
eip712Domain: EIP712Domain;
}; };
/** /**
* All the parameters necessary to represent an opportunity * All the parameters necessary to represent an opportunity
*/ */
export type OpportunityParams = Omit< export type OpportunityParams = Omit<Opportunity, "opportunityId">;
Opportunity,
"opportunityId" | "eip712Domain"
>;
/** /**
* Represents a bid for an opportunity * Represents a bid for an opportunity
*/ */

View File

@ -6,10 +6,13 @@ from typing import Callable, Any
from collections.abc import Coroutine from collections.abc import Coroutine
from uuid import UUID from uuid import UUID
import httpx import httpx
import web3
import websockets import websockets
from websockets.client import WebSocketClientProtocol from websockets.client import WebSocketClientProtocol
from eth_abi import encode
from eth_account.account import Account from eth_account.account import Account
from express_relay.express_relay_types import ( from web3.auto import w3
from express_relay.types import (
Opportunity, Opportunity,
BidStatusUpdate, BidStatusUpdate,
ClientMessage, ClientMessage,
@ -319,14 +322,10 @@ class ExpressRelayClient:
elif msg_json.get("type") == "bid_status_update": elif msg_json.get("type") == "bid_status_update":
if bid_status_callback is not None: if bid_status_callback is not None:
id = msg_json["status"]["id"] id = msg_json["status"]["id"]
bid_status = msg_json["status"]["bid_status"]["type"] bid_status = msg_json["status"]["bid_status"]["status"]
result = msg_json["status"]["bid_status"].get("result") result = msg_json["status"]["bid_status"].get("result")
index = msg_json["status"]["bid_status"].get("index")
bid_status_update = BidStatusUpdate( bid_status_update = BidStatusUpdate(
id=id, id=id, bid_status=BidStatus(bid_status), result=result
bid_status=BidStatus(bid_status),
result=result,
index=index,
) )
asyncio.create_task(bid_status_callback(bid_status_update)) asyncio.create_task(bid_status_callback(bid_status_update))
@ -402,66 +401,42 @@ def sign_bid(
Returns: Returns:
A OpportunityBid object, representing the transaction to submit to the server. This object contains the searcher's signature. A OpportunityBid object, representing the transaction to submit to the server. This object contains the searcher's signature.
""" """
sell_tokens = [
(token.token, int(token.amount)) for token in opportunity.sell_tokens
]
buy_tokens = [(token.token, int(token.amount)) for token in opportunity.buy_tokens]
target_calldata = bytes.fromhex(opportunity.target_calldata.replace("0x", ""))
executor = Account.from_key(private_key).address digest = encode(
domain_data = { [
"name": opportunity.eip_712_domain.name, "(address,uint256)[]",
"version": opportunity.eip_712_domain.version, "(address,uint256)[]",
"chainId": opportunity.eip_712_domain.chain_id, "address",
"verifyingContract": opportunity.eip_712_domain.verifying_contract, "bytes",
} "uint256",
message_types = { "uint256",
"ExecutionParams": [ "uint256",
{"name": "sellTokens", "type": "TokenAmount[]"},
{"name": "buyTokens", "type": "TokenAmount[]"},
{"name": "executor", "type": "address"},
{"name": "targetContract", "type": "address"},
{"name": "targetCalldata", "type": "bytes"},
{"name": "targetCallValue", "type": "uint256"},
{"name": "validUntil", "type": "uint256"},
{"name": "bidAmount", "type": "uint256"},
], ],
"TokenAmount": [ [
{"name": "token", "type": "address"}, sell_tokens,
{"name": "amount", "type": "uint256"}, buy_tokens,
opportunity.target_contract,
target_calldata,
opportunity.target_call_value,
bid_amount,
valid_until,
], ],
}
# the data to be signed
message_data = {
"sellTokens": [
{
"token": token.token,
"amount": int(token.amount),
}
for token in opportunity.sell_tokens
],
"buyTokens": [
{
"token": token.token,
"amount": int(token.amount),
}
for token in opportunity.buy_tokens
],
"executor": executor,
"targetContract": opportunity.target_contract,
"targetCalldata": bytes.fromhex(opportunity.target_calldata.replace("0x", "")),
"targetCallValue": opportunity.target_call_value,
"validUntil": valid_until,
"bidAmount": bid_amount,
}
signed_typed_data = Account.sign_typed_data(
private_key, domain_data, message_types, message_data
) )
msg_data = web3.Web3.solidity_keccak(["bytes"], [digest])
signature = w3.eth.account.signHash(msg_data, private_key=private_key)
opportunity_bid = OpportunityBid( opportunity_bid = OpportunityBid(
opportunity_id=opportunity.opportunity_id, opportunity_id=opportunity.opportunity_id,
permission_key=opportunity.permission_key, permission_key=opportunity.permission_key,
amount=bid_amount, amount=bid_amount,
valid_until=valid_until, valid_until=valid_until,
executor=executor, executor=Account.from_key(private_key).address,
signature=signed_typed_data, signature=signature,
) )
return opportunity_bid return opportunity_bid

View File

@ -3,7 +3,7 @@ import asyncio
import logging import logging
from eth_account.account import Account from eth_account.account import Account
from express_relay.client import ExpressRelayClient, sign_bid from express_relay.client import ExpressRelayClient, sign_bid
from express_relay.express_relay_types import ( from express_relay.types import (
Opportunity, Opportunity,
OpportunityBid, OpportunityBid,
Bytes32, Bytes32,
@ -76,16 +76,14 @@ class SimpleSearcher:
bid_status = bid_status_update.bid_status bid_status = bid_status_update.bid_status
result = bid_status_update.result result = bid_status_update.result
result_details = ""
if bid_status == BidStatus("submitted"): if bid_status == BidStatus("submitted"):
result_details = ( logger.info(f"Bid {id} has been submitted in hash {result}")
f", transaction {result}, index {bid_status_update.index} of multicall"
)
elif bid_status == BidStatus("lost"): elif bid_status == BidStatus("lost"):
result_details = f", transaction {result}" logger.info(f"Bid {id} was unsuccessful")
logger.error( elif bid_status == BidStatus("pending"):
f"Bid status for bid {id}: {bid_status.value.replace('_', ' ')}{result_details}" logger.info(f"Bid {id} is pending")
) else:
logger.error(f"Unrecognized status {bid_status} for bid {id}")
async def main(): async def main():

View File

@ -105,40 +105,26 @@ class BidStatus(Enum):
SUBMITTED = "submitted" SUBMITTED = "submitted"
LOST = "lost" LOST = "lost"
PENDING = "pending" PENDING = "pending"
SIMULATION_FAILED = "simulation_failed"
class BidStatusUpdate(BaseModel): class BidStatusUpdate(BaseModel):
""" """
Attributes: Attributes:
id: The ID of the bid. id: The ID of the bid.
bid_status: The current status of the bid. bid_status: The status enum, either SUBMITTED, LOST, or PENDING.
result: The result of the bid: a transaction hash if the status is SUBMITTED or LOST, else None. result: The result of the bid: a transaction hash if the status is SUBMITTED, else None.
index: The index of the bid in the submitted transaction; None if the status is not SUBMITTED.
""" """
id: UUIDString id: UUIDString
bid_status: BidStatus bid_status: BidStatus
result: Bytes32 | None = Field(default=None) result: Bytes32 | None = Field(default=None)
index: int | None = Field(default=None)
@model_validator(mode="after") @model_validator(mode="after")
def check_result(self): def check_result(self):
if self.bid_status in [
BidStatus("pending"),
BidStatus("simulation_failed"),
]:
assert self.result is None, "result must be None"
else:
assert self.result is not None, "result must be a valid 32-byte hash"
return self
@model_validator(mode="after")
def check_index(self):
if self.bid_status == BidStatus("submitted"): if self.bid_status == BidStatus("submitted"):
assert self.index is not None, "index must be a valid integer" assert self.result is not None, "result must be a valid 32-byte hash"
else: else:
assert self.index is None, "index must be None" assert self.result is None, "result must be None"
return self return self
@ -197,13 +183,6 @@ class OpportunityParams(BaseModel):
params: Union[OpportunityParamsV1] = Field(..., discriminator="version") params: Union[OpportunityParamsV1] = Field(..., discriminator="version")
class EIP712Domain(BaseModel):
name: str
version: str
chain_id: IntString
verifying_contract: Address
class Opportunity(BaseModel): class Opportunity(BaseModel):
""" """
Attributes: Attributes:
@ -217,7 +196,6 @@ class Opportunity(BaseModel):
version: The version of the opportunity. version: The version of the opportunity.
creation_time: The creation time of the opportunity. creation_time: The creation time of the opportunity.
opportunity_id: The ID of the opportunity. opportunity_id: The ID of the opportunity.
eip_712_domain: The EIP712 domain data needed for signing.
""" """
target_calldata: HexString target_calldata: HexString
@ -230,7 +208,6 @@ class Opportunity(BaseModel):
version: str version: str
creation_time: IntString creation_time: IntString
opportunity_id: UUIDString opportunity_id: UUIDString
eip_712_domain: EIP712Domain
supported_versions: ClassVar[list[str]] = ["v1"] supported_versions: ClassVar[list[str]] = ["v1"]

View File

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "express-relay" name = "express-relay"
version = "0.4.2" version = "0.2.0"
description = "Utilities for searchers and protocols to interact with the Express Relay protocol." description = "Utilities for searchers and protocols to interact with the Express Relay protocol."
authors = ["dourolabs"] authors = ["dourolabs"]
license = "Proprietary" license = "Proprietary"

View File

@ -1,4 +1,4 @@
/target /target
*config.yaml config.yaml
*secret* *secret*
*private-key* *private-key*

View File

@ -1488,7 +1488,7 @@ dependencies = [
[[package]] [[package]]
name = "fortuna" name = "fortuna"
version = "5.2.2" version = "4.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"axum", "axum",
@ -2822,7 +2822,7 @@ dependencies = [
[[package]] [[package]]
name = "pythnet-sdk" name = "pythnet-sdk"
version = "2.1.0" version = "2.0.0"
dependencies = [ dependencies = [
"bincode", "bincode",
"borsh", "borsh",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "fortuna" name = "fortuna"
version = "5.2.2" version = "4.0.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
@ -16,7 +16,7 @@ ethers = { version = "2.0.14", features = ["ws"] }
futures = { version = "0.3.28" } futures = { version = "0.3.28" }
hex = "0.4.3" hex = "0.4.3"
prometheus-client = { version = "0.21.2" } prometheus-client = { version = "0.21.2" }
pythnet-sdk = { path = "../../pythnet/pythnet_sdk", features = ["strum"] } pythnet-sdk = { path = "../pythnet/pythnet_sdk", features = ["strum"] }
rand = "0.8.5" rand = "0.8.5"
reqwest = { version = "0.11.22", features = ["json", "blocking"] } reqwest = { version = "0.11.22", features = ["json", "blocking"] }
serde = { version = "1.0.188", features = ["derive"] } serde = { version = "1.0.188", features = ["derive"] }

View File

@ -7,15 +7,15 @@ RUN rustup default nightly-2023-07-23
# Build # Build
WORKDIR /src WORKDIR /src
COPY apps/fortuna apps/fortuna COPY fortuna fortuna
COPY pythnet pythnet COPY pythnet pythnet
COPY target_chains/ethereum/entropy_sdk/solidity/abis target_chains/ethereum/entropy_sdk/solidity/abis COPY target_chains/ethereum/entropy_sdk/solidity/abis target_chains/ethereum/entropy_sdk/solidity/abis
WORKDIR /src/apps/fortuna WORKDIR /src/fortuna
RUN --mount=type=cache,target=/root/.cargo/registry cargo build --release RUN --mount=type=cache,target=/root/.cargo/registry cargo build --release
FROM rust:${RUST_VERSION} FROM rust:${RUST_VERSION}
# Copy artifacts from other images # Copy artifacts from other images
COPY --from=build /src/apps/fortuna/target/release/fortuna /usr/local/bin/ COPY --from=build /src/fortuna/target/release/fortuna /usr/local/bin/

View File

@ -4,4 +4,3 @@ chains:
contract_addr: 0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a contract_addr: 0x8250f4aF4B972684F7b336503E2D6dFeDeB1487a
reveal_delay_blocks: 0 reveal_delay_blocks: 0
legacy_tx: true legacy_tx: true
gas_limit: 500000

View File

@ -59,7 +59,7 @@ use {
// contract in the same repo. // contract in the same repo.
abigen!( abigen!(
PythRandom, PythRandom,
"../../target_chains/ethereum/entropy_sdk/solidity/abis/IEntropy.json" "../target_chains/ethereum/entropy_sdk/solidity/abis/IEntropy.json"
); );
pub type SignablePythContract = PythRandom< pub type SignablePythContract = PythRandom<
@ -277,11 +277,7 @@ impl EntropyReader for PythContract {
Err(e) => match e { Err(e) => match e {
ContractError::ProviderError { e } => Err(anyhow!(e)), ContractError::ProviderError { e } => Err(anyhow!(e)),
_ => { _ => {
tracing::info!( tracing::info!("Gas estimation for reveal with callback failed: {:?}", e);
sequence_number = sequence_number,
"Gas estimation failed. error: {:?}",
e
);
Ok(None) Ok(None)
} }
}, },

View File

@ -8,9 +8,7 @@ use {
chain::ethereum::PythContract, chain::ethereum::PythContract,
command::register_provider::CommitmentMetadata, command::register_provider::CommitmentMetadata,
config::{ config::{
Commitment,
Config, Config,
ProviderConfig,
RunOptions, RunOptions,
}, },
keeper, keeper,
@ -29,6 +27,7 @@ use {
collections::HashMap, collections::HashMap,
net::SocketAddr, net::SocketAddr,
sync::Arc, sync::Arc,
vec,
}, },
tokio::{ tokio::{
spawn, spawn,
@ -122,67 +121,38 @@ pub async fn run_keeper(
pub async fn run(opts: &RunOptions) -> Result<()> { pub async fn run(opts: &RunOptions) -> Result<()> {
let config = Config::load(&opts.config.config)?; let config = Config::load(&opts.config.config)?;
let provider_config = opts let private_key = opts.load_private_key()?;
.provider_config
.provider_config
.as_ref()
.map(|path| ProviderConfig::load(&path).expect("Failed to load provider config"));
let secret = opts.randomness.load_secret()?; let secret = opts.randomness.load_secret()?;
let (tx_exit, rx_exit) = watch::channel(false); let (tx_exit, rx_exit) = watch::channel(false);
let mut chains: HashMap<ChainId, BlockchainState> = HashMap::new(); let mut chains: HashMap<ChainId, BlockchainState> = HashMap::new();
for (chain_id, chain_config) in &config.chains { for (chain_id, chain_config) in &config.chains {
let contract = Arc::new(PythContract::from_config(&chain_config)?); let contract = Arc::new(PythContract::from_config(&chain_config)?);
let provider_chain_config = provider_config
.as_ref()
.and_then(|c| c.get_chain_config(chain_id));
let mut provider_commitments = provider_chain_config
.as_ref()
.map(|c| c.get_sorted_commitments())
.unwrap_or_else(|| Vec::new());
let provider_info = contract.get_provider_info(opts.provider).call().await?; let provider_info = contract.get_provider_info(opts.provider).call().await?;
let latest_metadata =
bincode::deserialize::<CommitmentMetadata>(&provider_info.commitment_metadata)
.map_err(|e| {
anyhow!(
"Chain: {} - Failed to deserialize commitment metadata: {}",
&chain_id,
e
)
})?;
provider_commitments.push(Commitment {
seed: latest_metadata.seed,
chain_length: latest_metadata.chain_length,
original_commitment_sequence_number: provider_info.original_commitment_sequence_number,
});
// Reconstruct the hash chain based on the metadata and check that it matches the on-chain commitment.
// TODO: we should instantiate the state here with multiple hash chains.
// This approach works fine as long as we haven't rotated the commitment (i.e., all user requests
// are for the most recent chain).
// TODO: we may want to load the hash chain in a lazy/fault-tolerant way. If there are many blockchains, // TODO: we may want to load the hash chain in a lazy/fault-tolerant way. If there are many blockchains,
// then it's more likely that some RPC fails. We should tolerate these faults and generate the hash chain // then it's more likely that some RPC fails. We should tolerate these faults and generate the hash chain
// later when a user request comes in for that chain. // later when a user request comes in for that chain.
let metadata =
bincode::deserialize::<CommitmentMetadata>(&provider_info.commitment_metadata)?;
let mut offsets = Vec::<usize>::new(); let hash_chain = PebbleHashChain::from_config(
let mut hash_chains = Vec::<PebbleHashChain>::new();
for commitment in &provider_commitments {
let offset = commitment.original_commitment_sequence_number.try_into()?;
offsets.push(offset);
let pebble_hash_chain = PebbleHashChain::from_config(
&secret, &secret,
&chain_id, &chain_id,
&opts.provider, &opts.provider,
&chain_config.contract_addr, &chain_config.contract_addr,
&commitment.seed, &metadata.seed,
commitment.chain_length, metadata.chain_length,
)?; )?;
hash_chains.push(pebble_hash_chain);
}
let chain_state = HashChainState { let chain_state = HashChainState {
offsets, offsets: vec![provider_info
hash_chains, .original_commitment_sequence_number
.try_into()?],
hash_chains: vec![hash_chain],
}; };
if chain_state.reveal(provider_info.original_commitment_sequence_number)? if chain_state.reveal(provider_info.original_commitment_sequence_number)?
@ -217,10 +187,7 @@ pub async fn run(opts: &RunOptions) -> Result<()> {
Ok::<(), Error>(()) Ok::<(), Error>(())
}); });
spawn(run_keeper(chains.clone(), config, private_key));
if let Some(keeper_private_key) = opts.load_keeper_private_key()? {
spawn(run_keeper(chains.clone(), config, keeper_private_key));
}
run_api(opts.addr.clone(), chains, rx_exit).await?; run_api(opts.addr.clone(), chains, rx_exit).await?;

View File

@ -16,10 +16,7 @@ use {
PebbleHashChain, PebbleHashChain,
}, },
}, },
anyhow::{ anyhow::Result,
anyhow,
Result,
},
ethers::{ ethers::{
abi::Bytes as AbiBytes, abi::Bytes as AbiBytes,
signers::{ signers::{
@ -69,14 +66,7 @@ pub async fn setup_provider(opts: &SetupProviderOptions) -> Result<()> {
register = true; register = true;
} else { } else {
let metadata = let metadata =
bincode::deserialize::<CommitmentMetadata>(&provider_info.commitment_metadata) bincode::deserialize::<CommitmentMetadata>(&provider_info.commitment_metadata)?;
.map_err(|e| {
anyhow!(
"Chain: {} - Failed to deserialize commitment metadata: {}",
&chain_id,
e
)
})?;
let hash_chain = PebbleHashChain::from_config( let hash_chain = PebbleHashChain::from_config(
&secret, &secret,
@ -84,7 +74,7 @@ pub async fn setup_provider(opts: &SetupProviderOptions) -> Result<()> {
&provider_address, &provider_address,
&chain_config.contract_addr, &chain_config.contract_addr,
&metadata.seed, &metadata.seed,
opts.randomness.chain_length, metadata.chain_length,
)?; )?;
let chain_state = HashChainState { let chain_state = HashChainState {
offsets: vec![provider_info offsets: vec![provider_info
@ -115,8 +105,7 @@ pub async fn setup_provider(opts: &SetupProviderOptions) -> Result<()> {
fee: opts.fee, fee: opts.fee,
uri, uri,
}) })
.await .await?;
.map_err(|e| anyhow!("Chain: {} - Failed to register provider: {}", &chain_id, e))?;
tracing::info!("{}: registered", &chain_id); tracing::info!("{}: registered", &chain_id);
} else { } else {
if provider_info.fee_in_wei != opts.fee { if provider_info.fee_in_wei != opts.fee {

View File

@ -97,7 +97,7 @@ pub struct RandomnessOptions {
/// The length of the hash chain to generate. /// The length of the hash chain to generate.
#[arg(long = "chain-length")] #[arg(long = "chain-length")]
#[arg(env = "FORTUNA_CHAIN_LENGTH")] #[arg(env = "FORTUNA_CHAIN_LENGTH")]
#[arg(default_value = "100000")] #[arg(default_value = "10000")]
pub chain_length: u64, pub chain_length: u64,
} }
@ -158,57 +158,3 @@ pub struct EthereumConfig {
/// The gas limit to use for entropy callback transactions. /// The gas limit to use for entropy callback transactions.
pub gas_limit: U256, pub gas_limit: U256,
} }
#[derive(Args, Clone, Debug)]
#[command(next_help_heading = "Provider Config Options")]
#[group(id = "ProviderConfig")]
pub struct ProviderConfigOptions {
#[arg(long = "provider-config")]
#[arg(env = "FORTUNA_PROVIDER_CONFIG")]
pub provider_config: Option<String>,
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct ProviderConfig {
pub chains: HashMap<ChainId, ProviderChainConfig>,
}
impl ProviderConfig {
pub fn load(path: &str) -> Result<ProviderConfig> {
// Open and read the YAML file
let yaml_content = fs::read_to_string(path)?;
let config: ProviderConfig = serde_yaml::from_str(&yaml_content)?;
Ok(config)
}
/// Get the provider chain config. The method returns an Option for ProviderChainConfig.
/// We may not have past any commitments for a chain. For example, for a new chain
pub fn get_chain_config(&self, chain_id: &ChainId) -> Option<ProviderChainConfig> {
self.chains.get(chain_id).map(|x| x.clone())
}
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct ProviderChainConfig {
commitments: Vec<Commitment>,
}
impl ProviderChainConfig {
/// Returns a clone of the commitments in the sorted order.
/// `HashChainState` requires offsets to be in order.
pub fn get_sorted_commitments(&self) -> Vec<Commitment> {
let mut sorted_commitments = self.commitments.clone();
sorted_commitments.sort_by(|c1, c2| {
c1.original_commitment_sequence_number
.cmp(&c2.original_commitment_sequence_number)
});
sorted_commitments
}
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct Commitment {
pub seed: [u8; 32],
pub chain_length: u64,
pub original_commitment_sequence_number: u64,
}

View File

@ -1,7 +1,6 @@
use { use {
crate::config::{ crate::config::{
ConfigOptions, ConfigOptions,
ProviderConfigOptions,
RandomnessOptions, RandomnessOptions,
}, },
anyhow::Result, anyhow::Result,
@ -19,9 +18,6 @@ pub struct RunOptions {
#[command(flatten)] #[command(flatten)]
pub config: ConfigOptions, pub config: ConfigOptions,
#[command(flatten)]
pub provider_config: ProviderConfigOptions,
#[command(flatten)] #[command(flatten)]
pub randomness: RandomnessOptions, pub randomness: RandomnessOptions,
@ -36,20 +32,16 @@ pub struct RunOptions {
#[arg(env = "FORTUNA_PROVIDER")] #[arg(env = "FORTUNA_PROVIDER")]
pub provider: Address, pub provider: Address,
/// If provided, the keeper will run alongside the Fortuna API service. /// Path to a file containing a 20-byte (40 char) hex encoded Ethereum private key.
/// It should be a path to a file containing a 20-byte (40 char) hex encoded Ethereum private key.
/// This key is required to submit transactions for entropy callback requests. /// This key is required to submit transactions for entropy callback requests.
/// This key should not be a registered provider. /// This key should not be a registered provider.
#[arg(long = "keeper-private-key")] #[arg(long = "keeper-private-key")]
#[arg(env = "KEEPER_PRIVATE_KEY")] #[arg(env = "KEEPER_PRIVATE_KEY")]
pub keeper_private_key_file: Option<String>, pub keeper_private_key_file: String,
} }
impl RunOptions { impl RunOptions {
pub fn load_keeper_private_key(&self) -> Result<Option<String>> { pub fn load_private_key(&self) -> Result<String> {
if let Some(ref keeper_private_key_file) = self.keeper_private_key_file { return Ok((fs::read_to_string(&self.keeper_private_key_file))?);
return Ok(Some(fs::read_to_string(keeper_private_key_file)?));
}
return Ok(None);
} }
} }

407
fortuna/src/keeper.rs Normal file
View File

@ -0,0 +1,407 @@
use {
crate::{
api::{
self,
BlockchainState,
},
chain::{
ethereum::SignablePythContract,
reader::{
BlockNumber,
RequestedWithCallbackEvent,
},
},
config::EthereumConfig,
},
anyhow::Result,
ethers::{
providers::{
Middleware,
Provider,
Ws,
},
types::U256,
},
futures::StreamExt,
std::sync::Arc,
tokio::{
spawn,
sync::mpsc,
time::{
self,
Duration,
},
},
tracing,
};
/// How much to wait before retrying in case of an RPC error
const RETRY_INTERVAL: Duration = Duration::from_secs(5);
/// How many blocks to look back for events that might be missed when starting the keeper
const BACKLOG_RANGE: u64 = 1000;
/// How many blocks to fetch events for in a single rpc call
const BLOCK_BATCH_SIZE: u64 = 100;
/// How much to wait before polling the next latest block
const POLL_INTERVAL: Duration = Duration::from_secs(5);
/// Get the latest safe block number for the chain. Retry internally if there is an error.
async fn get_latest_safe_block(chain_state: &BlockchainState) -> BlockNumber {
loop {
match chain_state
.contract
.get_block_number(chain_state.confirmed_block_status)
.await
{
Ok(latest_confirmed_block) => {
return latest_confirmed_block - chain_state.reveal_delay_blocks
}
Err(e) => {
tracing::error!("Error while getting block number. error: {:?}", e);
time::sleep(RETRY_INTERVAL).await;
}
}
}
}
/// Run threads to handle events for the last `BACKLOG_RANGE` blocks. Watch for new blocks and
/// handle any events for the new blocks.
pub async fn run_keeper_threads(
private_key: String,
chain_eth_config: EthereumConfig,
chain_state: BlockchainState,
) {
tracing::info!("Starting keeper for chain: {}", &chain_state.id);
let latest_safe_block = get_latest_safe_block(&chain_state).await;
tracing::info!(
"Latest safe block for chain {}: {} ",
&chain_state.id,
&latest_safe_block
);
let contract = Arc::new(
SignablePythContract::from_config(&chain_eth_config, &private_key)
.await
.expect("Chain config should be valid"),
);
let backlog_chain_state = chain_state.clone();
let backlog_contract = contract.clone();
// Spawn a thread to handle the events from last BACKLOG_RANGE blocks.
spawn(async move {
let from_block = latest_safe_block.saturating_sub(BACKLOG_RANGE);
process_block_range(
BlockRange {
from: from_block,
to: latest_safe_block,
},
backlog_contract,
chain_eth_config.gas_limit,
backlog_chain_state.clone(),
)
.await;
tracing::info!(
"Backlog processing for chain: {} completed",
&backlog_chain_state.id
);
});
let (tx, rx) = mpsc::channel::<BlockRange>(1000);
let watch_blocks_chain_state = chain_state.clone();
// Spawn a thread to watch for new blocks and send the range of blocks for which events has not been handled to the `tx` channel.
spawn(async move {
loop {
if let Err(e) = watch_blocks(
watch_blocks_chain_state.clone(),
latest_safe_block,
tx.clone(),
chain_eth_config.geth_rpc_wss.clone(),
)
.await
{
tracing::error!(
"Error in watching blocks for chain: {}, {:?}",
&watch_blocks_chain_state.id,
e
);
time::sleep(RETRY_INTERVAL).await;
}
}
});
// Spawn a thread that listens for block ranges on the `rx` channel and processes the events for those blocks.
spawn(process_new_blocks(
chain_state.clone(),
rx,
Arc::clone(&contract),
chain_eth_config.gas_limit,
));
}
// Process an event for a chain. It estimates the gas for the reveal with callback and
// submits the transaction if the gas estimate is below the gas limit.
// It will return an Error if the gas estimation failed with a provider error or if the
// reveal with callback failed with a provider error.
pub async fn process_event(
event: RequestedWithCallbackEvent,
chain_config: &BlockchainState,
contract: &Arc<SignablePythContract>,
gas_limit: U256,
) -> Result<()> {
if chain_config.provider_address != event.provider_address {
return Ok(());
}
let provider_revelation = match chain_config.state.reveal(event.sequence_number) {
Ok(result) => result,
Err(e) => {
tracing::error!(
"Error while revealing for provider: {} and sequence number: {} with error: {:?}",
event.provider_address,
event.sequence_number,
e
);
return Ok(());
}
};
let gas_estimate_res = chain_config
.contract
.estimate_reveal_with_callback_gas(
event.provider_address,
event.sequence_number,
event.user_random_number,
provider_revelation,
)
.await;
match gas_estimate_res {
Ok(gas_estimate_option) => match gas_estimate_option {
Some(gas_estimate) => {
// Pad the gas estimate by 33%
let (gas_estimate, _) = gas_estimate
.saturating_mul(U256::from(4))
.div_mod(U256::from(3));
if gas_estimate > gas_limit {
tracing::error!(
"Gas estimate for reveal with callback is higher than the gas limit for chain: {}",
&chain_config.id
);
return Ok(());
}
let res = contract
.reveal_with_callback(
event.provider_address,
event.sequence_number,
event.user_random_number,
provider_revelation,
)
.gas(gas_estimate)
.send()
.await?
.await;
match res {
Ok(_) => {
tracing::info!(
"Revealed on chain: {} for provider: {} and sequence number: {} with res: {:?}",
&chain_config.id,
event.provider_address,
event.sequence_number,
res
);
Ok(())
}
Err(e) => {
tracing::error!(
"Error while revealing for provider: {} and sequence number: {} with error: {:?}",
event.provider_address,
event.sequence_number,
e
);
Err(e.into())
}
}
}
None => Ok(()),
},
Err(e) => {
tracing::error!(
"Error while simulating reveal for provider: {} and sequence number: {} \n error: {:?}",
event.provider_address,
event.sequence_number,
e
);
Err(e)
}
}
}
/// Process a range of blocks for a chain. It will fetch events for the blocks in the provided range
/// and then try to process them one by one. If the process fails, it will retry indefinitely.
pub async fn process_block_range(
block_range: BlockRange,
contract: Arc<SignablePythContract>,
gas_limit: U256,
chain_state: api::BlockchainState,
) {
tracing::info!(
"Processing blocks for chain: {} from block: {} to block: {}",
&chain_state.id,
block_range.from,
block_range.to
);
let BlockRange {
from: first_block,
to: last_block,
} = block_range;
let mut current_block = first_block;
while current_block <= last_block {
let mut to_block = current_block + BLOCK_BATCH_SIZE;
if to_block > last_block {
to_block = last_block;
}
let events_res = chain_state
.contract
.get_request_with_callback_events(current_block, to_block)
.await;
match events_res {
Ok(events) => {
for event in events {
while let Err(e) =
process_event(event.clone(), &chain_state, &contract, gas_limit).await
{
tracing::error!(
"Error while processing event for chain: {} and sequence number: {}. Waiting for {} seconds before retry. error: {:?}",
&chain_state.id,
&event.sequence_number,
RETRY_INTERVAL.as_secs(),
e
);
time::sleep(RETRY_INTERVAL).await;
}
}
tracing::info!(
"Backlog processed for chain: {} from block: {} to block: {}",
&chain_state.id,
&current_block,
&to_block
);
current_block = to_block + 1;
}
Err(e) => {
tracing::error!(
"Error while getting events for chain: {} from block: {} to block: {}. Waiting for {} seconds before retry. error: {:?}",
&chain_state.id,
&current_block,
&to_block,
RETRY_INTERVAL.as_secs(),
e
);
time::sleep(RETRY_INTERVAL).await;
}
}
}
}
pub struct BlockRange {
pub from: BlockNumber,
pub to: BlockNumber,
}
/// Watch for new blocks and send the range of blocks for which events have not been handled to the `tx` channel.
/// We are subscribing to new blocks instead of events. If we miss some blocks, it will be fine as we are sending
/// block ranges to the `tx` channel. If we have subscribed to events, we could have missed those and won't even
/// know about it.
pub async fn watch_blocks(
chain_state: BlockchainState,
latest_safe_block: BlockNumber,
tx: mpsc::Sender<BlockRange>,
geth_rpc_wss: Option<String>,
) -> Result<()> {
tracing::info!(
"Watching blocks to handle new events for chain: {}",
&chain_state.id
);
let mut last_safe_block_processed = latest_safe_block;
let provider_option = match geth_rpc_wss {
Some(wss) => Some(Provider::<Ws>::connect(wss).await?),
None => {
tracing::info!("No wss provided for chain: {}", &chain_state.id);
None
}
};
let mut stream_option = match provider_option {
Some(ref provider) => Some(provider.subscribe_blocks().await?),
None => None,
};
loop {
match stream_option {
Some(ref mut stream) => {
stream.next().await;
}
None => {
time::sleep(POLL_INTERVAL).await;
}
}
let latest_safe_block = get_latest_safe_block(&chain_state).await;
if latest_safe_block > last_safe_block_processed {
match tx
.send(BlockRange {
from: last_safe_block_processed + 1,
to: latest_safe_block,
})
.await
{
Ok(_) => {
tracing::info!(
"Block range sent to handle events for chain {}: {} to {}",
&chain_state.id,
&last_safe_block_processed + 1,
&latest_safe_block
);
last_safe_block_processed = latest_safe_block;
}
Err(e) => {
tracing::error!("Error while sending block range to handle events for chain {}. These will be handled in next call. error: {:?}",&chain_state.id,e);
}
};
}
}
}
/// It waits on rx channel to receive block ranges and then calls process_block_range to process them.
pub async fn process_new_blocks(
chain_state: BlockchainState,
mut rx: mpsc::Receiver<BlockRange>,
contract: Arc<SignablePythContract>,
gas_limit: U256,
) {
loop {
tracing::info!(
"Waiting for new block ranges to process for chain: {}",
&chain_state.id
);
if let Some(block_range) = rx.recv().await {
process_block_range(
block_range,
Arc::clone(&contract),
gas_limit,
chain_state.clone(),
)
.await;
}
}
}

View File

@ -37,8 +37,6 @@ const KEYPAIR: Keypair = Keypair.fromSecretKey(
Uint8Array.from(JSON.parse(fs.readFileSync(envOrErr("WALLET"), "ascii"))) Uint8Array.from(JSON.parse(fs.readFileSync(envOrErr("WALLET"), "ascii")))
); );
const OFFSET: number = Number(process.env.OFFSET ?? "-1"); const OFFSET: number = Number(process.env.OFFSET ?? "-1");
const SKIP_FAILED_REMOTE_INSTRUCTIONS: boolean =
process.env.SKIP_FAILED_REMOTE_INSTRUCTIONS == "true";
const COMMITMENT: Commitment = const COMMITMENT: Commitment =
(process.env.COMMITMENT as Commitment) ?? "confirmed"; (process.env.COMMITMENT as Commitment) ?? "confirmed";
@ -165,24 +163,15 @@ async function run() {
} }
} }
try {
await remoteExecutor.methods await remoteExecutor.methods
.executePostedVaa() .executePostedVaa()
.accounts({ .accounts({
claimRecord: claimRecordAddress, claimRecord: claimRecordAddress,
postedVaa: derivePostedVaaKey( postedVaa: derivePostedVaaKey(WORMHOLE_ADDRESS[CLUSTER]!, vaa.hash),
WORMHOLE_ADDRESS[CLUSTER]!,
vaa.hash
),
}) })
.remainingAccounts(extraAccountMetas) .remainingAccounts(extraAccountMetas)
.preInstructions(preInstructions) .preInstructions(preInstructions)
.rpc({ skipPreflight: true }); .rpc({ skipPreflight: true });
} catch (e) {
if (SKIP_FAILED_REMOTE_INSTRUCTIONS) {
console.error(e);
} else throw e;
}
} }
} else if (response.code == 5) { } else if (response.code == 5) {
console.log(`All VAAs have been relayed`); console.log(`All VAAs have been relayed`);

View File

@ -44,7 +44,6 @@ import {
} from "@pythnetwork/pyth-solana-receiver"; } from "@pythnetwork/pyth-solana-receiver";
import { LedgerNodeWallet } from "./ledger"; import { LedgerNodeWallet } from "./ledger";
import { DEFAULT_PRIORITY_FEE_CONFIG } from "@pythnetwork/solana-utils";
export async function loadHotWalletOrLedger( export async function loadHotWalletOrLedger(
wallet: string, wallet: string,
@ -75,7 +74,7 @@ async function loadVaultFromOptions(options: any): Promise<MultisigVault> {
const vault: PublicKey = new PublicKey(options.vault); const vault: PublicKey = new PublicKey(options.vault);
const squad = SquadsMesh.endpoint( const squad = SquadsMesh.endpoint(
options.rpcUrlOverride ?? getPythClusterApiUrl(multisigCluster), getPythClusterApiUrl(multisigCluster),
wallet wallet
); );
@ -102,10 +101,6 @@ const multisigCommand = (name: string, description: string) =>
.option( .option(
"-ldc, --ledger-derivation-change <number>", "-ldc, --ledger-derivation-change <number>",
"ledger derivation change to use" "ledger derivation change to use"
)
.option(
"-u, --rpc-url-override <string>",
"RPC URL to override the default for the cluster. Make sure this is an RPC URL of the cluster where the multisig lives. For Pythnet proposals it should be a Solana Mainnet RPC URL."
); );
program program
@ -159,11 +154,7 @@ multisigCommand(
}) })
.instruction(); .instruction();
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], targetCluster);
[proposalInstruction],
targetCluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand( multisigCommand(
@ -187,11 +178,7 @@ multisigCommand(
}) })
.instruction(); .instruction();
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], targetCluster);
[proposalInstruction],
targetCluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand( multisigCommand(
@ -222,11 +209,7 @@ multisigCommand(
}) })
.instruction(); .instruction();
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], targetCluster);
[proposalInstruction],
targetCluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand("upgrade-program", "Upgrade a program from a buffer") multisigCommand("upgrade-program", "Upgrade a program from a buffer")
@ -267,11 +250,7 @@ multisigCommand("upgrade-program", "Upgrade a program from a buffer")
], ],
}; };
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], cluster);
[proposalInstruction],
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand( multisigCommand(
@ -307,11 +286,7 @@ multisigCommand(
], ],
}; };
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], cluster);
[proposalInstruction],
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand( multisigCommand(
@ -340,40 +315,7 @@ multisigCommand(
[] []
); );
await vault.proposeInstructions( await vault.proposeInstructions(instructions, cluster);
instructions,
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
});
multisigCommand(
"delegate-stake",
"Delegate a stake account to the given vote account"
)
.requiredOption("-s, --stake-account <pubkey>", "stake account to delegate")
.requiredOption("-d, --vote-account <pubkey>", "vote account to delegate to")
.action(async (options: any) => {
const vault = await loadVaultFromOptions(options);
const cluster: PythCluster = options.cluster;
const authorizedPubkey: PublicKey = await vault.getVaultAuthorityPDA(
cluster
);
const stakeAccount: PublicKey = new PublicKey(options.stakeAccount);
const voteAccount: PublicKey = new PublicKey(options.voteAccount);
const instructions = StakeProgram.delegate({
stakePubkey: stakeAccount,
authorizedPubkey,
votePubkey: voteAccount,
}).instructions;
await vault.proposeInstructions(
instructions,
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand( multisigCommand(
@ -398,11 +340,7 @@ multisigCommand(
priceAccount, priceAccount,
}) })
.instruction(); .instruction();
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], cluster);
[proposalInstruction],
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
program program
@ -493,11 +431,7 @@ multisigCommand("propose-token-transfer", "Propose token transfer")
BigInt(amount) * BigInt(10) ** BigInt(mintAccount.decimals) BigInt(amount) * BigInt(10) ** BigInt(mintAccount.decimals)
); );
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], cluster);
[proposalInstruction],
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand("propose-sol-transfer", "Propose sol transfer") multisigCommand("propose-sol-transfer", "Propose sol transfer")
@ -516,11 +450,7 @@ multisigCommand("propose-sol-transfer", "Propose sol transfer")
lamports: amount * LAMPORTS_PER_SOL, lamports: amount * LAMPORTS_PER_SOL,
}); });
await vault.proposeInstructions( await vault.proposeInstructions([proposalInstruction], cluster);
[proposalInstruction],
cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
multisigCommand("propose-arbitrary-payload", "Propose arbitrary payload") multisigCommand("propose-arbitrary-payload", "Propose arbitrary payload")
@ -594,11 +524,7 @@ multisigCommand("add-and-delete", "Change the roster of the multisig")
} }
} }
vault.proposeInstructions( vault.proposeInstructions(proposalInstructions, options.cluster);
proposalInstructions,
options.cluster,
DEFAULT_PRIORITY_FEE_CONFIG
);
}); });
/** /**

View File

@ -19,26 +19,19 @@
"format": "prettier --write \"src/**/*.ts\"", "format": "prettier --write \"src/**/*.ts\"",
"test": "jest" "test": "jest"
}, },
"//": [
"The injectivelabs/token-metadata package is pinned to a specific version to ensure that",
"wormhole-sdk can be built with the correct version of the package in nextjs (xc_admin_frontend).",
"Otherwise, the nextjs build will fail due to using a different version of the package."
],
"dependencies": { "dependencies": {
"@certusone/wormhole-sdk": "^0.10.15", "@certusone/wormhole-sdk": "^0.9.22",
"@coral-xyz/anchor": "^0.29.0", "@coral-xyz/anchor": "^0.29.0",
"@injectivelabs/sdk-ts": "^1.10.72",
"@injectivelabs/token-metadata": "~1.10.42",
"@pythnetwork/client": "^2.17.0", "@pythnetwork/client": "^2.17.0",
"@pythnetwork/pyth-solana-receiver": "*",
"@pythnetwork/solana-utils": "*",
"@solana/buffer-layout": "^4.0.1", "@solana/buffer-layout": "^4.0.1",
"@solana/web3.js": "^1.73.0", "@solana/web3.js": "^1.73.0",
"@sqds/mesh": "^1.0.6", "@sqds/mesh": "^1.0.6",
"bigint-buffer": "^1.1.5", "bigint-buffer": "^1.1.5",
"ethers": "^5.7.2", "ethers": "^5.7.2",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"typescript": "^4.9.4" "typescript": "^4.9.4",
"@pythnetwork/solana-utils": "*",
"@pythnetwork/pyth-solana-receiver": "*"
}, },
"devDependencies": { "devDependencies": {
"@types/bn.js": "^5.1.1", "@types/bn.js": "^5.1.1",

View File

@ -53,7 +53,7 @@ export const RECEIVER_CHAINS = {
zetachain: 60034, zetachain: 60034,
astar_zkevm: 60035, astar_zkevm: 60035,
coredao: 60036, coredao: 60036,
viction: 60037, tomochain: 60037,
stacks: 60038, stacks: 60038,
mode: 60039, mode: 60039,
bttc: 60040, bttc: 60040,
@ -65,8 +65,6 @@ export const RECEIVER_CHAINS = {
blast: 60046, blast: 60046,
merlin: 60047, merlin: 60047,
parallel: 60048, parallel: 60048,
iota: 60049,
flow_previewnet: 60050,
// Testnets as a separate chain ids (to use stable data sources and governance for them) // Testnets as a separate chain ids (to use stable data sources and governance for them)
injective_testnet: 60013, injective_testnet: 60013,
@ -113,7 +111,7 @@ export const RECEIVER_CHAINS = {
zetachain_testnet: 50035, zetachain_testnet: 50035,
astar_zkevm_testnet: 50036, astar_zkevm_testnet: 50036,
coredao_testnet: 50037, coredao_testnet: 50037,
viction_testnet: 50038, tomochain_testnet: 50038,
stacks_testnet: 50039, stacks_testnet: 50039,
mode_testnet: 50040, mode_testnet: 50040,
bttc_testnet: 50041, bttc_testnet: 50041,
@ -126,6 +124,7 @@ export const RECEIVER_CHAINS = {
hedera_testnet: 50048, hedera_testnet: 50048,
filecoin_calibration: 50049, // Filecoin testnet filecoin_calibration: 50049, // Filecoin testnet
lightlink_pegasus_testnet: 50050, lightlink_pegasus_testnet: 50050,
sei_evm_devnet: 50051,
fantom_sonic_testnet: 50052, fantom_sonic_testnet: 50052,
dela_deperp_testnet: 50053, dela_deperp_testnet: 50053,
injective_inevm_testnet: 50054, injective_inevm_testnet: 50054,
@ -140,13 +139,6 @@ export const RECEIVER_CHAINS = {
polynomial_testnet: 50063, polynomial_testnet: 50063,
linea_sepolia: 50064, linea_sepolia: 50064,
rol_testnet: 50065, rol_testnet: 50065,
morph_testnet: 50066,
sei_evm_devnet: 50067,
boba_sepolia: 50068,
astar_zkyoto_testnet: 50069,
xion_testnet: 50070,
taiko_hekla: 50071,
olive_testnet: 50072,
}; };
// If there is any overlapping value the receiver chain will replace the wormhole // If there is any overlapping value the receiver chain will replace the wormhole

View File

@ -30,46 +30,19 @@ export class SolanaStakingMultisigInstruction implements MultisigInstruction {
const type = StakeInstruction.decodeInstructionType(instruction); const type = StakeInstruction.decodeInstructionType(instruction);
switch (type) { switch (type) {
case "Deactivate": case "Deactivate":
const decodedDeactivate = const decoded = StakeInstruction.decodeDeactivate(instruction);
StakeInstruction.decodeDeactivate(instruction);
return new SolanaStakingMultisigInstruction( return new SolanaStakingMultisigInstruction(
"Deactivate", "Deactivate",
{}, {},
{ {
named: { named: {
stakePubkey: { stakePubkey: {
pubkey: decodedDeactivate.stakePubkey, pubkey: decoded.stakePubkey,
isSigner: false, isSigner: false,
isWritable: true, isWritable: true,
}, },
authorizedPubkey: { authorizedPubkey: {
pubkey: decodedDeactivate.authorizedPubkey, pubkey: decoded.authorizedPubkey,
isSigner: true,
isWritable: false,
},
},
remaining: [],
}
);
case "Delegate":
const decodedDelegate = StakeInstruction.decodeDelegate(instruction);
return new SolanaStakingMultisigInstruction(
"Delegate",
{},
{
named: {
stakePubkey: {
pubkey: decodedDelegate.stakePubkey,
isSigner: false,
isWritable: true,
},
votePubkey: {
pubkey: decodedDelegate.votePubkey,
isSigner: false,
isWritable: false,
},
authorizedPubkey: {
pubkey: decodedDelegate.authorizedPubkey,
isSigner: true, isSigner: true,
isWritable: false, isWritable: false,
}, },
@ -77,8 +50,10 @@ export class SolanaStakingMultisigInstruction implements MultisigInstruction {
remaining: [], remaining: [],
} }
); );
case "Authorize": case "Authorize":
case "AuthorizeWithSeed": case "AuthorizeWithSeed":
case "Delegate":
case "Initialize": case "Initialize":
case "Merge": case "Merge":
case "Split": case "Split":

View File

@ -24,21 +24,16 @@ import SquadsMesh, { getIxAuthorityPDA, getTxPDA } from "@sqds/mesh";
import { MultisigAccount } from "@sqds/mesh/lib/types"; import { MultisigAccount } from "@sqds/mesh/lib/types";
import { mapKey } from "./remote_executor"; import { mapKey } from "./remote_executor";
import { WORMHOLE_ADDRESS } from "./wormhole"; import { WORMHOLE_ADDRESS } from "./wormhole";
import { import { TransactionBuilder } from "@pythnetwork/solana-utils";
TransactionBuilder,
sendTransactions,
} from "@pythnetwork/solana-utils";
import { import {
PACKET_DATA_SIZE_WITH_ROOM_FOR_COMPUTE_BUDGET, PACKET_DATA_SIZE_WITH_ROOM_FOR_COMPUTE_BUDGET,
PriorityFeeConfig, PriorityFeeConfig,
} from "@pythnetwork/solana-utils"; } from "@pythnetwork/solana-utils";
import NodeWallet from "@coral-xyz/anchor/dist/cjs/nodewallet";
export const MAX_EXECUTOR_PAYLOAD_SIZE = export const MAX_EXECUTOR_PAYLOAD_SIZE =
PACKET_DATA_SIZE_WITH_ROOM_FOR_COMPUTE_BUDGET - 687; // Bigger payloads won't fit in one addInstruction call when adding to the proposal PACKET_DATA_SIZE_WITH_ROOM_FOR_COMPUTE_BUDGET - 687; // Bigger payloads won't fit in one addInstruction call when adding to the proposal
export const MAX_INSTRUCTIONS_PER_PROPOSAL = 256 - 1; export const MAX_INSTRUCTIONS_PER_PROPOSAL = 256 - 1;
export const TIMEOUT = 10; export const MAX_NUMBER_OF_RETRIES = 10;
export const MAX_RETRY_SEND = 70;
type SquadInstruction = { type SquadInstruction = {
instruction: TransactionInstruction; instruction: TransactionInstruction;
@ -390,31 +385,52 @@ export class MultisigVault {
async sendAllTransactions(transactions: Transaction[]) { async sendAllTransactions(transactions: Transaction[]) {
const provider = this.getAnchorProvider({ const provider = this.getAnchorProvider({
preflightCommitment: "confirmed", preflightCommitment: "processed",
commitment: "confirmed", commitment: "processed",
}); });
for (const [index, tx] of transactions.entries()) { let needToFetchBlockhash = true; // We don't fetch blockhash everytime to save time
console.log("Trying transaction: ", index, " of ", transactions.length); let blockhash: string = "";
for (let [index, tx] of transactions.entries()) {
console.log("Trying to send transaction: " + index);
let numberOfRetries = 0;
let txHasLanded = false;
let retries = 0; while (!txHasLanded) {
while (retries < TIMEOUT) {
try { try {
await sendTransactions( if (needToFetchBlockhash) {
[{ tx, signers: [] }], blockhash = (await provider.connection.getLatestBlockhash())
.blockhash;
needToFetchBlockhash = false;
}
tx.feePayer = tx.feePayer || provider.wallet.publicKey;
tx.recentBlockhash = blockhash;
provider.wallet.signTransaction(tx);
await sendAndConfirmRawTransaction(
provider.connection, provider.connection,
this.squad.wallet as NodeWallet, tx.serialize(),
MAX_RETRY_SEND provider.opts
); );
break; txHasLanded = true;
} catch (e) { } catch (e) {
if (numberOfRetries >= MAX_NUMBER_OF_RETRIES) {
// Cap the number of retries
throw Error("Maximum number of retries exceeded");
}
const message = (e as any).toString().split("\n")[0];
if (
message ==
"Error: failed to send transaction: Transaction simulation failed: Blockhash not found"
) {
// If blockhash has expired, we need to fetch a new one
needToFetchBlockhash = true;
} else {
await new Promise((r) => setTimeout(r, 3000));
}
console.log(e); console.log(e);
retries++; numberOfRetries += 1;
} }
} }
if (retries === TIMEOUT) {
throw new Error("Too many retries");
}
} }
} }
} }

View File

@ -1,3 +1 @@
node_modules node_modules
tailwind.config.js
next.config.js

Some files were not shown because too many files have changed in this diff Show More