Add pre-commit globally (#393)

* Add pre-commit globally

* Fix CI errors
This commit is contained in:
Ali Behjati 2022-11-24 14:14:29 +01:00 committed by GitHub
parent 275c7b8d1a
commit d6e25d9209
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
228 changed files with 8782 additions and 7903 deletions

19
.github/workflows/pre-commit.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: Pre-commit checks
on:
pull_request:
push:
branches: [main]
jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
components: rustfmt, clippy
- uses: pre-commit/action@v2.0.3

View File

@ -16,8 +16,8 @@ jobs:
run:
working-directory: ./cosmwasm/contracts/pyth
steps:
- uses: actions/checkout@v2
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- uses: actions/checkout@v2
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

View File

@ -2,10 +2,10 @@ name: Check Remote Executor
on:
pull_request:
paths: [ pythnet/remote-executor/** ]
paths: [pythnet/remote-executor/**]
push:
branches: [main]
paths: [ pythnet/remote-executor/** ]
paths: [pythnet/remote-executor/**]
jobs:
pre-commit:
runs-on: ubuntu-latest
@ -24,4 +24,3 @@ jobs:
echo "/home/runner/.local/share/solana/install/active_release/bin" >> $GITHUB_PATH
- name: Run executor tests
run: cargo test-bpf --manifest-path ./pythnet/remote-executor/Cargo.toml

View File

@ -3,19 +3,36 @@ repos:
rev: v3.2.0
hooks:
- id: trailing-whitespace
files: pythnet/
- id: end-of-file-fixer
files: pythnet/
- id: check-added-large-files
# Hook to format many type of files in the repo
# including solidity contracts.
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v2.7.1"
hooks:
- id: prettier
additional_dependencies:
- "prettier@2.7.1"
- "prettier-plugin-solidity@1.0.0-rc.1"
- repo: local
hooks:
# Hooks for the remote executor
- id: cargo-fmt-executor
name: Cargo format executor
language: "rust"
entry: cargo +nightly fmt --manifest-path ./pythnet/remote-executor/Cargo.toml --all
pass_filenames: false
files: pythnet/remote-executor/
- id: cargo-clippy-executor
name: Cargo clippy executor
language: "rust"
entry: cargo +nightly clippy --manifest-path ./pythnet/remote-executor/Cargo.toml -- -D warnings
pass_filenames: false
files: pythnet/remote-executor/
# Hooks for the attester
- id: cargo-fmt-executor
name: Cargo format executor
language: "rust"
entry: cargo +nightly fmt --manifest-path ./solana/pyth2wormhole/Cargo.toml --all
pass_filenames: false
files: solana/pyth2wormhole/

View File

@ -10,18 +10,18 @@ and code reviews are our most important tools to accomplish that.
complex features, it can be useful to submit a [formal design document](design/template.md).
- Development happens on a long-lived development branch (`dev.v2` and `dev.v1`).
Every change going into a development branch is reviewed individually (see below). Release branches may be used
Every change going into a development branch is reviewed individually (see below). Release branches may be used
to support in-the-wild releases of Wormhole. We aim to support at most two release
branches at the same time. Changes can be cherry-picked from the development branch to release branches, but
never from release branches to a development branch.
- Releases are first tested on a testnet.
- Commits should be small and have a meaningful commit message. One commit should, roughly, be "one idea" and
be as atomic as possible. A feature can consist of many such commits.
- Feature flags and interface evolution are better than breaking changes and long-lived feature branches.
- We optimize for reading, not for writing - over its lifetime, code is read much more often than written.
Small commits, meaningful commit messages and useful comments make it easier to review code and improve the
quality of code review as well as review turnaround times. It's much easier to spot mistakes in small,
@ -42,12 +42,12 @@ The answer is... maybe? The following things are needed in order to fully suppor
a node or light client for every chain supported by Wormhole. This adds up, and the barrier to support new
chains is pretty high. Your proposal should clearly outline the value proposition of supporting the new chain.
**Convincing the DAO to run nodes for your chain is the first step in supporting a new chain.**
- The chain needs to support smart contracts capable of verifying 19 individual secp256k1 signatures.
- The smart contract needs to be built and audited. In some cases, existing contracts can be used, like with
EVM-compatible chains.
- Support for observing the chain needs to be added to guardiand.
- Web wallet integration needs to be built to actually interact with Wormhole.

View File

@ -7,7 +7,7 @@ The following dependencies are required for local development:
- [Go](https://golang.org/dl/) >= 1.17.5
- [Tilt](http://tilt.dev/) >= 0.20.8
- Any of the local Kubernetes clusters supported by Tilt.
We strongly recommend [minikube](https://kubernetes.io/docs/setup/learning-environment/minikube/) >=
We strongly recommend [minikube](https://kubernetes.io/docs/setup/learning-environment/minikube/) >=
v1.21.0 with the kvm2 driver.
- Tilt will use Minikube's embedded Docker server. If Minikube is not used, a local instance of
[Docker](https://docs.docker.com/engine/install/) / moby-engine >= 19.03 is required.

View File

@ -8,7 +8,7 @@ ARG WORMHOLE_TAG=v2.8.9
RUN apt-get update && apt-get install -yq libudev-dev ncat
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs
COPY solana /usr/src/solana
COPY solana /usr/src/solana
WORKDIR /usr/src/solana/pyth2wormhole
RUN --mount=type=cache,target=/root/.cache \

View File

@ -20,7 +20,7 @@ FROM cosmwasm/workspace-optimizer:0.12.6@sha256:e6565a5e87c830ef3e8775a9035006b3
COPY cosmwasm/Cargo.lock /code/
COPY cosmwasm/Cargo.toml /code/
COPY cosmwasm/contracts /code/contracts
COPY third_party/pyth/p2w-sdk/rust /third_party/pyth/p2w-sdk/rust
COPY third_party/pyth/p2w-sdk/rust /third_party/pyth/p2w-sdk/rust
RUN --mount=type=cache,target=/code/target,id=cosmwasm_pyth_target --mount=type=cache,target=/usr/local/cargo/registry optimize_workspace.sh
# Contract deployment stage

View File

@ -20,7 +20,7 @@ RUN sh -c "$(curl -sSfL https://release.solana.com/v1.10.31/install)"
ENV PATH="/root/.local/share/solana/install/active_release/bin:$PATH"
ADD solana/rust-toolchain /rust-toolchain
ADD solana/pyth2wormhole/rust-toolchain /rust-toolchain
# Solana does a questionable download at the beginning of a *first* build-bpf call. Trigger and layer-cache it explicitly.
RUN cargo init --lib /tmp/decoy-crate && \
cd /tmp/decoy-crate && cargo build-bpf && \

View File

@ -6,13 +6,13 @@ Pyth2Wormhole. The base repository is a fork from Certus One's reference
for building projects based on Wormhole's various SDKs. Much of the existing
documentation from there will also apply to this repository.
[Wormhole]: https://github.com/wormhole-foundation/wormhole
[wormhole]: https://github.com/wormhole-foundation/wormhole
Within this monorepo you will find the following subprojects:
## Pyth2Wormhole Solana
> solana/pyth2wormhole
> solana/pyth2wormhole
The main Pyth implementation currently exists as an [on-chain contract][] on
Solana. In order to expose these prices cross-chain, the Pyth2Wormhole Solana
@ -24,7 +24,8 @@ various P2W receiver contracts.
[on-chain contract]: https://github.com/pyth-network/pyth-client
## Pyth2Wormhole Ethereum
> ethereum/contracts/pyth
> ethereum/contracts/pyth
The Ethereum P2W contract acts as a receiver for Pyth prices relayed from the
P2W Solana contract. It also provides a public API for other Ethereum contracts
@ -35,7 +36,8 @@ examples.
[pyth-evm-sdk]: https://github.com/pyth-network/pyth-sdk-solidity
## Pyth2Wormhole Price Service
> third_party/pyth
> third_party/pyth
The P2W Price Service is an off-chain service which constantly observes the
Wormhole network watching for price attestations emitted from the Pyth Solana
@ -48,7 +50,7 @@ the [pyth-js][] repository.
[pyth-js]: https://github.com/pyth-network/pyth-js
--------------------------------------------------------------------------------
---
See [DEVELOP.md](DEVELOP.md) for instructions on how to set up a local devnet, and
[CONTRIBUTING.md](CONTRIBUTING.md) for instructions on how to contribute to this project.
@ -60,3 +62,15 @@ implied. See the License for the specific language governing permissions and lim
spoken - this is a very complex piece of software which targets a bleeding-edge, experimental smart contract runtime.
Mistakes happen, and no matter how hard you try and whether you pay someone to audit it, it may eat your tokens, set
your printer on fire or startle your cat. Cryptocurrencies are a high-risk investment, no matter how fancy.
## Development
### Releases
We use [Semantic Versioning](https://semver.org/) for our releases.
### pre-commit hooks
pre-commit is a tool that checks and fixes simple issues (formatting, ...) before each commit. You can install it by following [their website](https://pre-commit.com/). In order to enable checks for this repo run `pre-commit install` from command-line in the root of this repo.
The checks are also performed in the CI to ensure the code follows consistent formatting.

View File

@ -5,13 +5,13 @@
Pyth operates a self hosted [bug bounty program](https://pyth.network/bounty) to financially incentivize independent researchers (with up to $1,000,000 USDC) for finding and responsibly disclosing security issues.
- **Scopes**
- [Pyth Oracle](https://github.com/pyth-network/pyth-client/tree/main/program)
- [Pyth Crosschain Ethereum](https://github.com/pyth-network/pyth-crosschain/tree/main/ethereum/contracts/pyth)
- [Pyth Crosschain Aptos](https://github.com/pyth-network/pyth-crosschain/tree/main/aptos/contracts)
- [Pyth Governance](https://github.com/pyth-network/governance/tree/master/staking/programs/staking)
- [Pyth Oracle](https://github.com/pyth-network/pyth-client/tree/main/program)
- [Pyth Crosschain Ethereum](https://github.com/pyth-network/pyth-crosschain/tree/main/ethereum/contracts/pyth)
- [Pyth Crosschain Aptos](https://github.com/pyth-network/pyth-crosschain/tree/main/aptos/contracts)
- [Pyth Governance](https://github.com/pyth-network/governance/tree/master/staking/programs/staking)
- **Rewards**
- Critical: Up to $1,000,000
- High: Up to $100,000
- Critical: Up to $1,000,000
- High: Up to $100,000
If you find a security issue in Pyth, please [report the issue](https://yyyf63zqhtu.typeform.com/to/dBV4qcP0) immediately to our security team.
@ -19,16 +19,16 @@ If there is a duplicate report, either the same reporter or different reporters,
## 3rd Party Security Audits
We engage 3rd party firms to conduct independent security audits of Pyth. At any given time, we likely have multiple audit streams in progress.
We engage 3rd party firms to conduct independent security audits of Pyth. At any given time, we likely have multiple audit streams in progress.
As these 3rd party audits are completed and issues are sufficiently addressed, we make those audit reports public.
- **[April 27, 2022 - Zellic](https://github.com/pyth-network/audit-reports/blob/main/2022_04_27/pyth2wormhole_zellic.pdf)**
- **Scope**: *pyth-crosschain (formerly known as pyth2wormhole))*
- **Scope**: _pyth-crosschain (formerly known as pyth2wormhole))_
- **[October 10, 2022 - OtterSec](https://github.com/pyth-network/audit-reports/blob/main/2022_10_10/pyth_aptos.pdf)**
- **Scope**: *pyth-aptos contracts*
- **Scope**: _pyth-aptos contracts_
- **[November 01, 2022 - Zellic](https://github.com/pyth-network/audit-reports/blob/main/2022_11_01/pyth.pdf)**
- **Scope**: *pyth-evm contracts*
- **Scope**: _pyth-evm contracts_
## Social Media Monitoring
@ -42,7 +42,7 @@ In the case of a large ecosystem development that requires response, the Pyth pr
The Pyth project maintains an incident response program to respond to vulnerabilities or active threats to Pyth, its users, or the ecosystems it's connected to. Pyth can be made aware about a security event from a variety of different sources (eg. bug bounty program, audit finding, security monitoring, social media, etc.)
When a Pyth project contributor becomes aware of a security event, that contributor immediately holds the role of [incident commander](https://en.wikipedia.org/wiki/Incident_commander) for the issue until they hand off to a more appropriate incident commander. A contributor does not need to be a "security person" or have any special priviledges to hold the role of incident commander, they simply need to be responsible, communicate effectively, and maintain the following obligations to manage the incident to completion.
When a Pyth project contributor becomes aware of a security event, that contributor immediately holds the role of [incident commander](https://en.wikipedia.org/wiki/Incident_commander) for the issue until they hand off to a more appropriate incident commander. A contributor does not need to be a "security person" or have any special priviledges to hold the role of incident commander, they simply need to be responsible, communicate effectively, and maintain the following obligations to manage the incident to completion.
The role of the incident commander for Pyth includes the following minimum obligations:

View File

@ -179,8 +179,8 @@ docker_build(
k8s_yaml_with_ns("./devnet/pyth.yaml")
k8s_resource(
"pyth",
resource_deps = ["solana-devnet"],
"pyth",
resource_deps = ["solana-devnet"],
labels = ["pyth"],
trigger_mode = trigger_mode,
)

View File

@ -18,7 +18,7 @@ deployer = "_"
wormhole = "_"
[dev-addresses]
# Note that these are localnet addresses, for use in testing. The contracts are deployed to the real networks at the
# Note that these are localnet addresses, for use in testing. The contracts are deployed to the real networks at the
# addresses documented at https://docs.pyth.network/consume-data/aptos#addresses
pyth = "0x377f0d7c4aaeea8dd73a7a2c6bc817e59aea569e1f9ae0d2f4a80ea1be93bf01"
deployer = "0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b"

View File

@ -118,13 +118,13 @@ module pyth::batch_price_attestation {
let ema_price = deserialize::deserialize_i64(cur);
let ema_conf = deserialize::deserialize_u64(cur);
let status = price_status::from_u64((deserialize::deserialize_u8(cur) as u64));
// Skip obselete fields
let _num_publishers = deserialize::deserialize_u32(cur);
let _max_num_publishers = deserialize::deserialize_u32(cur);
let attestation_time = deserialize::deserialize_u64(cur);
let publish_time = deserialize::deserialize_u64(cur); //
let publish_time = deserialize::deserialize_u64(cur); //
let prev_publish_time = deserialize::deserialize_u64(cur);
let prev_price = deserialize::deserialize_i64(cur);
let prev_conf = deserialize::deserialize_u64(cur);
@ -139,7 +139,7 @@ module pyth::batch_price_attestation {
current_price = pyth::price::new(prev_price, prev_conf, expo, prev_publish_time);
};
// If status is trading, use the timestamp of the aggregate as the timestamp for the
// If status is trading, use the timestamp of the aggregate as the timestamp for the
// EMA price. If not, the EMA will have last been updated when the aggregate last had
// trading status, so use prev_publish_time (the time when the aggregate last had trading status).
let ema_timestamp = publish_time;
@ -175,10 +175,10 @@ module pyth::batch_price_attestation {
let arrival_time = 1663074349;
timestamp::update_global_time_for_test(1663074349 * 1000000);
// A raw batch price attestation
// A raw batch price attestation
// The first attestation has a status of UNKNOWN
let bytes = x"5032574800030000000102000400951436e0be37536be96f0896366089506a59763d036728332d3e3038047851aea7c6c75c89f14810ec1c54c03ab8f1864a4c4032791f05747f560faec380a695d1000000000000049a0000000000000008fffffffb00000000000005dc0000000000000003000000000100000001000000006329c0eb000000006329c0e9000000006329c0e400000000000006150000000000000007215258d81468614f6b7e194c5d145609394f67b041e93e6695dcc616faadd0603b9551a68d01d954d6387aff4df1529027ffb2fee413082e509feb29cc4904fe000000000000041a0000000000000003fffffffb00000000000005cb0000000000000003010000000100000001000000006329c0eb000000006329c0e9000000006329c0e4000000000000048600000000000000078ac9cf3ab299af710d735163726fdae0db8465280502eb9f801f74b3c1bd190333832fad6e36eb05a8972fe5f219b27b5b2bb2230a79ce79beb4c5c5e7ecc76d00000000000003f20000000000000002fffffffb00000000000005e70000000000000003010000000100000001000000006329c0eb000000006329c0e9000000006329c0e40000000000000685000000000000000861db714e9ff987b6fedf00d01f9fea6db7c30632d6fc83b7bc9459d7192bc44a21a28b4c6619968bd8c20e95b0aaed7df2187fd310275347e0376a2cd7427db800000000000006cb0000000000000001fffffffb00000000000005e40000000000000003010000000100000001000000006329c0eb000000006329c0e9000000006329c0e400000000000007970000000000000001";
let expected = BatchPriceAttestation {
header: Header {
magic: 0x50325748,

View File

@ -26,7 +26,7 @@ module pyth::deserialize {
public fun deserialize_i32(cur: &mut Cursor<u8>): I64 {
let deserialized = deserialize_u32(cur);
// If negative, pad the value
let negative = (deserialized >> 31) == 1;
if (negative) {
@ -49,7 +49,7 @@ module pyth::deserialize {
fun test_deserialize_u8() {
let input = x"48258963";
let cursor = cursor::init(input);
let result = deserialize_u8(&mut cursor);
assert!(result == 0x48, 1);
@ -61,7 +61,7 @@ module pyth::deserialize {
fun test_deserialize_u16() {
let input = x"48258963";
let cursor = cursor::init(input);
let result = deserialize_u16(&mut cursor);
assert!(result == 0x4825, 1);
@ -73,7 +73,7 @@ module pyth::deserialize {
fun test_deserialize_u32() {
let input = x"4825896349741695";
let cursor = cursor::init(input);
let result = deserialize_u32(&mut cursor);
assert!(result == 0x48258963, 1);
@ -85,7 +85,7 @@ module pyth::deserialize {
fun test_deserialize_i32_positive() {
let input = x"4825896349741695";
let cursor = cursor::init(input);
let result = deserialize_i32(&mut cursor);
assert!(result == i64::from_u64(0x48258963), 1);
@ -97,7 +97,7 @@ module pyth::deserialize {
fun test_deserialize_i32_negative() {
let input = x"FFFFFDC349741695";
let cursor = cursor::init(input);
let result = deserialize_i32(&mut cursor);
assert!(result == i64::from_u64(0xFFFFFFFFFFFFFDC3), 1);
@ -109,7 +109,7 @@ module pyth::deserialize {
fun test_deserialize_u64() {
let input = x"48258963497416957497253486";
let cursor = cursor::init(input);
let result = deserialize_u64(&mut cursor);
assert!(result == 0x4825896349741695, 1);
@ -121,7 +121,7 @@ module pyth::deserialize {
fun test_deserialize_i64_positive() {
let input = x"48258963497416957497253486";
let cursor = cursor::init(input);
let result = deserialize_i64(&mut cursor);
assert!(result == i64::from_u64(0x4825896349741695), 1);
@ -133,7 +133,7 @@ module pyth::deserialize {
fun test_deserialize_i64_negative() {
let input = x"FFFFFFFFFFFFFDC37497253486";
let cursor = cursor::init(input);
let result = deserialize_i64(&mut cursor);
assert!(result == i64::from_u64(0xFFFFFFFFFFFFFDC3), 1);

View File

@ -78,7 +78,7 @@ module pyth::error {
public fun positive_value(): u64 {
error::invalid_state(19)
}
public fun invalid_governance_magic_value(): u64 {
error::invalid_argument(20)
}

View File

@ -35,5 +35,5 @@ module pyth::event {
timestamp,
}
);
}
}
}

View File

@ -25,7 +25,7 @@ module pyth::contract_upgrade {
let cursor = cursor::init(bytes);
let hash = contract_upgrade_hash::from_byte_vec(deserialize::deserialize_vector(&mut cursor, HASH_LENGTH));
cursor::destroy_empty(cursor);
AuthorizeContractUpgrade {
hash,
}
@ -51,7 +51,7 @@ module pyth::contract_upgrade {
vector::reverse(&mut reversed);
let flattened = aptos_hash::keccak256(metadata_serialized);
while (!vector::is_empty(&reversed)) vector::append(&mut flattened, aptos_hash::keccak256(vector::pop_back(&mut reversed)));
aptos_hash::keccak256(flattened) == contract_upgrade_hash::destroy(hash)
}
}

View File

@ -141,7 +141,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 2
let vaa_bytes = x"010000000001001d9fd73b3fb0fc522eae5eb5bd40ddf68941894495d7cec8c8efdbf462e48715171b5c6d4bbca0c1e3843b3c28d0ca6f3f76874624b5595a3a2cbfdb3907b62501527e4f9b000000010032f06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf0000000000000001005054474d0202001003001793a28e2e5b4cb88f69e96fb29a8287a88b23f0e99f5502f81744e904da8e3b4d000c9a4066ce1fa26da1c102a3e268abd3ca58e3b3c25f250e6ad9a3525066fbf8b00012f7778ca023d5cbe37449bab2faa2a133fe02b056c2c25605950320df08750f35";
governance::execute_governance_instruction(vaa_bytes);
@ -156,7 +156,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 17 != wormhole test chain ID 22
let vaa_bytes = x"010000000001001ed81e10f8e52e6a7daeca12bf0859c14e8dabed737eaed9a1f8227190a9d11c48d58856713243c5d7de08ed49de4aa1efe7c5e6020c11056802e2d702aa4b2e00527e4f9b000000010032f06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf0000000000000001005054474d0102001103001793a28e2e5b4cb88f69e96fb29a8287a88b23f0e99f5502f81744e904da8e3b4d000c9a4066ce1fa26da1c102a3e268abd3ca58e3b3c25f250e6ad9a3525066fbf8b00012f7778ca023d5cbe37449bab2faa2a133fe02b056c2c25605950320df08750f35";
@ -172,7 +172,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - Action 19 (invalid)
@ -188,14 +188,14 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 5
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - AuthorizeContractUpgrade {
// hash: 0xa381a47fd0e97f34c71ef491c82208f58cd0080e784c697e65966d2a25d20d56,
// }
let vaa_bytes = x"010000000001002242229aec7d320a437cb241672dacfbc34c9155c02f60cd806bbfcd69bb7ba667fc069e372ae0443a7f3e08eaad61930b00784faeb2b72ecf5d1b0f0fa486a101527e4f9b000000010032f06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf0000000000000005005054474d01000016a381a47fd0e97f34c71ef491c82208f58cd0080e784c697e65966d2a25d20d56";
governance::execute_governance_instruction(vaa_bytes);
assert!(state::get_last_executed_governance_sequence() == 5, 1);
@ -212,14 +212,14 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 5
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 0
// - AuthorizeContractUpgrade {
// hash: 0xa381a47fd0e97f34c71ef491c82208f58cd0080e784c697e65966d2a25d20d56,
// }
let vaa_bytes = x"01000000000100303c10020c537205ed0322b7ec9d9b296f4e3e12e39ebde985ed4ef4c8f5565256cfc6f90800c4683dba62b577cc994e2ca9135d32b955040b94718cdcb5527600527e4f9b000000010032f06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf0000000000000005005054474d01000000a381a47fd0e97f34c71ef491c82208f58cd0080e784c697e65966d2a25d20d56";
governance::execute_governance_instruction(vaa_bytes);
assert!(state::get_last_executed_governance_sequence() == 5, 1);
@ -234,7 +234,7 @@ module pyth::governance_test {
setup_test(100, initial_governance_emitter_chain_id, initial_governance_emitter_address, 100);
state::set_last_executed_governance_sequence(25);
let initial_governance_data_source = data_source::new(initial_governance_emitter_chain_id, external_address::from_bytes(initial_governance_emitter_address));
assert!(state::is_valid_governance_data_source(initial_governance_data_source), 1);
@ -242,7 +242,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 27
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetGovernanceDataSource {
@ -266,7 +266,7 @@ module pyth::governance_test {
// - Emitter chain ID 9
// - Emitter address 0x625bae57728a368652a0ab0a89808de5fffa61d3312f1a27c3e200e99b1f3058
// - Sequence number 15
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetStalePriceThreshold {
@ -287,7 +287,7 @@ module pyth::governance_test {
setup_test(100, initial_governance_emitter_chain_id, initial_governance_emitter_address, 100);
state::set_last_executed_governance_sequence(25);
let initial_governance_data_source = data_source::new(initial_governance_emitter_chain_id, external_address::from_bytes(initial_governance_emitter_address));
assert!(state::is_valid_governance_data_source(initial_governance_data_source), 1);
@ -295,7 +295,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address x"f06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf"
// - Sequence number 27
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetGovernanceDataSource {
@ -333,7 +333,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetUpdateFee {
@ -359,7 +359,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetStalePriceThreshold {
@ -381,7 +381,7 @@ module pyth::governance_test {
// - Emitter chain ID 50
// - Emitter address 0xf06413c0148c78916554f134dcd17a7c8029a3a2bda475a4a1182305c53078bf
// - Sequence number 1
// - A payload representing a governance instruction with:
// - A payload representing a governance instruction with:
// - Module number 1
// - Target chain 22
// - SetDataSources {

View File

@ -20,7 +20,7 @@ module pyth::governance_instruction {
let target_chain_id = instruction.target_chain_id;
assert!(target_chain_id == u16::to_u64(wormhole::state::get_chain_id()) || target_chain_id == 0, error::invalid_governance_target_chain_id());
}
public fun from_byte_vec(bytes: vector<u8>): GovernanceInstruction {
let cursor = cursor::init(bytes);
let magic = deserialize::deserialize_vector(&mut cursor, 4);

View File

@ -10,7 +10,7 @@ module pyth::set_update_fee {
struct SetUpdateFee {
mantissa: u64,
exponent: u64,
exponent: u64,
}
public(friend) fun execute(payload: vector<u8>) {

View File

@ -1,12 +1,12 @@
module pyth::i64 {
use pyth::error;
const MAX_POSITIVE_MAGNITUDE: u64 = (1 << 63) - 1;
const MAX_NEGATIVE_MAGNITUDE: u64 = (1 << 63);
/// As Move does not support negative numbers natively, we use our own internal
/// representation.
///
/// representation.
///
/// To consume these values, first call `get_is_negative()` to determine if the I64
/// represents a negative or positive value. Then call `get_magnitude_if_positive()` or
/// `get_magnitude_if_negative()` to get the magnitude of the number in unsigned u64 format.
@ -22,7 +22,7 @@ module pyth::i64 {
max_magnitude = MAX_NEGATIVE_MAGNITUDE;
};
assert!(magnitude <= max_magnitude, error::magnitude_too_large());
// Ensure we have a single zero representation: (0, false).
// (0, true) is invalid.

View File

@ -18,7 +18,7 @@ module pyth::price_info {
public fun get_price_feed(price_info: &PriceInfo): &PriceFeed {
&price_info.price_feed
}
public fun get_attestation_time(price_info: &PriceInfo): u64 {
price_info.attestation_time
}

View File

@ -1,6 +1,6 @@
module pyth::price_status {
use pyth::error;
/// The price feed is not currently updating for an unknown reason.
const UNKNOWN: u64 = 0;
/// The price feed is updating as expected.

View File

@ -119,22 +119,22 @@ module pyth::pyth {
// -----------------------------------------------------------------------------
// Update the cached prices
//
// Pyth uses an uses an on-demand update model, where consumers need to update the
// Pyth uses an uses an on-demand update model, where consumers need to update the
/// cached prices before using them. Please read more about this at https://docs.pyth.network/consume-data/on-demand.
/// Update the cached price feeds with the data in the given VAAs. This is a
/// Update the cached price feeds with the data in the given VAAs. This is a
/// convenience wrapper around update_price_feeds(), which allows you to update the price feeds
/// using an entry function.
///
///
/// If possible, it is recommended to use update_price_feeds() instead, which avoids the need
/// to pass a signer account. update_price_feeds_with_funder() should only be used when
/// you need to call an entry function.
///
///
/// This function will charge an update fee, transferring some AptosCoin's
/// from the given funder account to the Pyth contract. The amount of coins that will be transferred
/// to perform this update can be queried with get_update_fee(&vaas). The signer must have sufficient
/// to perform this update can be queried with get_update_fee(&vaas). The signer must have sufficient
/// account balance to pay this fee, otherwise the transaction will abort.
///
///
/// Please read more information about the update fee here: https://docs.pyth.network/consume-data/on-demand#fees
public entry fun update_price_feeds_with_funder(account: &signer, vaas: vector<vector<u8>>) {
let coins = coin::withdraw<AptosCoin>(account, get_update_fee(&vaas));
@ -143,14 +143,14 @@ module pyth::pyth {
/// Update the cached price feeds with the data in the given VAAs.
/// The vaas argument is a vector of VAAs encoded as bytes.
///
///
/// The javascript https://github.com/pyth-network/pyth-js/tree/main/pyth-aptos-js package
/// should be used to fetch these VAAs from the Price Service. More information about this
/// process can be found at https://docs.pyth.network/consume-data.
///
///
/// The given fee must contain a sufficient number of coins to pay the update fee for the given vaas.
/// The update fee amount can be queried by calling get_update_fee(&vaas).
///
///
/// Please read more information about the update fee here: https://docs.pyth.network/consume-data/on-demand#fees
public fun update_price_feeds(vaas: vector<vector<u8>>, fee: Coin<AptosCoin>) {
// Charge the message update fee
@ -198,7 +198,7 @@ module pyth::pyth {
/// A convenience wrapper around update_price_feeds_if_fresh(), allowing you to conditionally
/// update the price feeds using an entry function.
///
///
/// If possible, it is recommended to use update_price_feeds_if_fresh() instead, which avoids the need
/// to pass a signer account. update_price_feeds_if_fresh_with_funder() should only be used when
/// you need to call an entry function.
@ -216,9 +216,9 @@ module pyth::pyth {
/// prices in the update are fresh. The price_identifiers and publish_times parameters
/// are used to determine if the update is fresh without doing any serialisation or verification
/// of the VAAs, potentially saving time and gas. If the update contains no fresh data, this function
/// will revert with error::no_fresh_data().
///
/// For a given price update i in the batch, that price is considered fresh if the current cached
/// will revert with error::no_fresh_data().
///
/// For a given price update i in the batch, that price is considered fresh if the current cached
/// price for price_identifiers[i] is older than publish_times[i].
public entry fun update_price_feeds_if_fresh(
vaas: vector<vector<u8>>,
@ -265,7 +265,7 @@ module pyth::pyth {
return true
};
let cached_timestamp = price::get_timestamp(&get_price_unsafe(*price_identifier));
update_timestamp > cached_timestamp
}
@ -280,17 +280,17 @@ module pyth::pyth {
state::price_info_cached(price_identifier)
}
/// Get the latest available price cached for the given price identifier, if that price is
/// Get the latest available price cached for the given price identifier, if that price is
/// no older than the stale price threshold.
///
///
/// Please refer to the documentation at https://docs.pyth.network/consumers/best-practices for
/// how to how this price safely.
///
/// Important: Pyth uses an on-demand update model, where consumers need to update the
///
/// Important: Pyth uses an on-demand update model, where consumers need to update the
/// cached prices before using them. Please read more about this at https://docs.pyth.network/consume-data/on-demand.
/// get_price() is likely to abort unless you call update_price_feeds() to update the cached price
/// beforehand, as the cached prices may be older than the stale price threshold.
///
///
/// Note that the price_identifier does not correspond to a seperate Aptos account:
/// all price feeds are stored in the single pyth account. The price identifier is an
/// opaque identifier for a price feed.
@ -298,7 +298,7 @@ module pyth::pyth {
get_price_no_older_than(price_identifier, state::get_stale_price_threshold_secs())
}
/// Get the latest available price cached for the given price identifier, if that price is
/// Get the latest available price cached for the given price identifier, if that price is
/// no older than the given age.
public fun get_price_no_older_than(price_identifier: PriceIdentifier, max_age_secs: u64): Price {
let price = get_price_unsafe(price_identifier);
@ -308,11 +308,11 @@ module pyth::pyth {
}
/// Get the latest available price cached for the given price identifier.
///
///
/// WARNING: the returned price can be from arbitrarily far in the past.
/// This function makes no guarantees that the returned price is recent or
/// useful for any particular application. Users of this function should check
/// the returned timestamp to ensure that the returned price is sufficiently
/// the returned timestamp to ensure that the returned price is sufficiently
/// recent for their application. The checked get_price_no_older_than()
/// function should be used in preference to this.
public fun get_price_unsafe(price_identifier: PriceIdentifier): Price {
@ -339,10 +339,10 @@ module pyth::pyth {
assert!(age < max_age_secs, error::stale_price_update());
}
/// Get the latest available exponentially moving average price cached for the given
/// Get the latest available exponentially moving average price cached for the given
/// price identifier, if that price is no older than the stale price threshold.
///
/// Important: Pyth uses an on-demand update model, where consumers need to update the
///
/// Important: Pyth uses an on-demand update model, where consumers need to update the
/// cached prices before using them. Please read more about this at https://docs.pyth.network/consume-data/on-demand.
/// get_ema_price() is likely to abort unless you call update_price_feeds() to update the cached price
/// beforehand, as the cached prices may be older than the stale price threshold.
@ -357,14 +357,14 @@ module pyth::pyth {
check_price_is_fresh(&price, max_age_secs);
price
}
}
/// Get the latest available exponentially moving average price cached for the given price identifier.
///
///
/// WARNING: the returned price can be from arbitrarily far in the past.
/// This function makes no guarantees that the returned price is recent or
/// useful for any particular application. Users of this function should check
/// the returned timestamp to ensure that the returned price is sufficiently
/// the returned timestamp to ensure that the returned price is sufficiently
/// recent for their application. The checked get_ema_price_no_older_than()
/// function should be used in preference to this.
public fun get_ema_price_unsafe(price_identifier: PriceIdentifier): Price {
@ -373,7 +373,7 @@ module pyth::pyth {
}
/// Get the number of AptosCoin's required to perform the given price updates.
///
///
/// Please read more information about the update fee here: https://docs.pyth.network/consume-data/on-demand#fees
public fun get_update_fee(update_data: &vector<vector<u8>>): u64 {
state::get_base_update_fee() * vector::length(update_data)
@ -419,7 +419,7 @@ module pyth::pyth_test {
account::create_test_signer_cap(@0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b));
let (_pyth, signer_capability) = account::create_resource_account(&deployer, b"pyth");
pyth::init_test(signer_capability, stale_price_threshold, governance_emitter_chain_id, governance_emitter_address, data_sources, update_fee);
let (burn_capability, mint_capability) = aptos_coin::initialize_for_test(aptos_framework);
let coins = coin::mint(to_mint, &mint_capability);
(burn_capability, mint_capability, coins)
@ -485,7 +485,7 @@ module pyth::pyth_test {
public fun update_cache_for_test(updates: vector<PriceInfo>) {
pyth::update_cache(updates);
}
#[test(aptos_framework = @aptos_framework)]
fun test_get_update_fee(aptos_framework: &signer) {
let single_update_fee = 50;
@ -570,7 +570,7 @@ module pyth::pyth_test {
#[test(aptos_framework = @aptos_framework)]
fun test_update_price_feeds_success(aptos_framework: &signer) {
let (burn_capability, mint_capability, coins) = setup_test(aptos_framework, 500, 1, x"5d1f252d5de865279b00c84bce362774c2804294ed53299bc4a0389a5defef92", data_sources_for_test_vaa(), 50, 100);
// Update the price feeds from the VAA
pyth::update_price_feeds(TEST_VAAS, coins);
@ -597,7 +597,7 @@ module pyth::pyth_test {
assert!(coin::balance<AptosCoin>(signer::address_of(&funder)) == initial_balance, 1);
assert!(coin::balance<AptosCoin>(@pyth) == 0, 1);
// Update the price feeds using the funder
// Update the price feeds using the funder
pyth::update_price_feeds_with_funder(&funder, TEST_VAAS);
// Check that the price feeds are now cached
@ -629,7 +629,7 @@ module pyth::pyth_test {
assert!(coin::balance<AptosCoin>(signer::address_of(&funder)) == initial_balance, 1);
assert!(coin::balance<AptosCoin>(@pyth) == 0, 1);
// Update the price feeds using the funder
// Update the price feeds using the funder
pyth::update_price_feeds_with_funder(&funder, TEST_VAAS);
cleanup_test(burn_capability, mint_capability);
@ -673,7 +673,7 @@ module pyth::pyth_test {
assert!(!pyth::price_feed_exists(*price_feed::get_price_identifier(price_feed)), 1);
i = i + 1;
};
// Submit the updates
pyth::update_cache(updates);
@ -687,7 +687,7 @@ module pyth::pyth_test {
#[test(aptos_framework = @aptos_framework)]
fun test_update_cache_old_update(aptos_framework: &signer) {
let (burn_capability, mint_capability, coins) = setup_test(aptos_framework, 1000, 1, x"5d1f252d5de865279b00c84bce362774c2804294ed53299bc4a0389a5defef92", data_sources_for_test_vaa(), 50, 0);
// Submit a price update
let timestamp = 1663680700;
let price_identifier = price_identifier::from_byte_vec(x"baa284eaf23edf975b371ba2818772f93dbae72836bbdea28b07d40f3cf8b485");
@ -725,7 +725,7 @@ module pyth::pyth_test {
assert!(pyth::get_price(price_identifier) == price, 1);
assert!(pyth::get_ema_price(price_identifier) == ema_price, 1);
// Update the cache with a fresh update
// Update the cache with a fresh update
let fresh_price = price::new(i64::new(4857, true), 9979, i64::new(243, false), timestamp + 200);
let fresh_ema_price = price::new(i64::new(74637, false), 9979, i64::new(1433, false), timestamp + 1);
let fresh_update = price_info::new(
@ -827,8 +827,8 @@ module pyth::pyth_test {
#[expected_failure(abort_code = 65541)]
fun test_update_price_feeds_if_fresh_invalid_length(aptos_framework: &signer) {
let (burn_capability, mint_capability, coins) = setup_test(aptos_framework, 500, 1, x"5d1f252d5de865279b00c84bce362774c2804294ed53299bc4a0389a5defef92", data_sources_for_test_vaa(), 50, 0);
// Update the price feeds
// Update the price feeds
let bytes = vector[vector[0u8, 1u8, 2u8]];
let price_identifiers = vector[
x"baa284eaf23edf975b371ba2818772f93dbae72836bbdea28b07d40f3cf8b485",
@ -846,8 +846,8 @@ module pyth::pyth_test {
#[test(aptos_framework = @aptos_framework)]
fun test_update_price_feeds_if_fresh_fresh_data(aptos_framework: &signer) {
let (burn_capability, mint_capability, coins) = setup_test(aptos_framework, 500, 1, x"5d1f252d5de865279b00c84bce362774c2804294ed53299bc4a0389a5defef92", data_sources_for_test_vaa(), 50, 50);
// Update the price feeds
// Update the price feeds
let bytes = TEST_VAAS;
let price_identifiers = vector[
x"c6c75c89f14810ec1c54c03ab8f1864a4c4032791f05747f560faec380a695d1",
@ -883,7 +883,7 @@ module pyth::pyth_test {
assert!(coin::balance<AptosCoin>(signer::address_of(&funder)) == initial_balance, 1);
assert!(coin::balance<AptosCoin>(@pyth) == 0, 1);
// Update the price feeds using the funder
// Update the price feeds using the funder
let bytes = TEST_VAAS;
let price_identifiers = vector[
x"c6c75c89f14810ec1c54c03ab8f1864a4c4032791f05747f560faec380a695d1",
@ -915,7 +915,7 @@ module pyth::pyth_test {
// First populate the cache
pyth::update_cache(get_mock_price_infos());
// Now attempt to update the price feeds with publish_times that are older than those we have cached
// This should abort with error::no_fresh_data()
let bytes = TEST_VAAS;

View File

@ -41,5 +41,5 @@ module pyth::set {
}
// TODO: destroy_empty, but this is tricky because std::table doesn't
// have this functionality.
// have this functionality.
}

View File

@ -41,8 +41,8 @@ module pyth::state {
}
/// Mapping of cached price information
///
/// WARNING: do not directly read out of this table, instead use
///
/// WARNING: do not directly read out of this table, instead use
/// the checked `pyth::get_price` method. This ensures that the price
/// is recent enough.
struct LatestPriceInfo has key {

View File

@ -3,11 +3,11 @@ module example::example {
use pyth::price::Price;
use pyth::price_identifier;
use aptos_framework::coin;
/// Updates the Pyth price feeds using the given pyth_update_data, and then returns
/// Updates the Pyth price feeds using the given pyth_update_data, and then returns
/// the BTC/USD price.
///
/// https://github.com/pyth-network/pyth-js/tree/main/pyth-aptos-js should be used to
///
/// https://github.com/pyth-network/pyth-js/tree/main/pyth-aptos-js should be used to
/// fetch the pyth_update_data off-chain and pass it in. More information about how this
/// works can be found at https://docs.pyth.network/consume-data
public fun get_btc_usd_price(user: &signer, pyth_update_data: vector<vector<u8>>): Price {

View File

@ -1,2 +1,2 @@
.aptos
build
build

View File

@ -5,18 +5,19 @@ The example contract is deployed at : `0x19f8503273cdb5aa93ffe453927768461524212
The goal of this contract is managing an NFT mint where the mint is paid in native currency but the cost of one NFT is always 1$.
This example is intended to be run on Aptos testnet because it depends on Pyth and Wormhole existing onchain.
### Important files :
### Important files :
- `./sources/minting.move` has the smart contract logic (the code that will run onchain)
- `./app/src/App.tsx` has the React application. The core logic of how the frontend will interact with the wallet and the blockchain.
Both combined contain the key pieces of code needed to make an Aptos fullstack app using Pyth!
Both combined contain the key pieces of code needed to make an Aptos fullstack app using Pyth!
### How to deploy the smart contract :
- Use `aptos init` with rest_url : `https://testnet.aptoslabs.com/` and faucet `https://faucet.testnet.aptoslabs.com` to generate a new keypair.
- Use `aptos init` with rest_url : `https://testnet.aptoslabs.com/` and faucet `https://faucet.testnet.aptoslabs.com` to generate a new keypair.
- Use a faucet to airdrop testnet APT to your newly created account by calling `aptos account fund-with-faucet --account default`. If this doesn't work, I have had success importing my private key from `.aptos/config.yaml` into Petra and clicking the airdrop button. Otherwise send APT from another account.
- Get your account address from `.aptos/config.yaml` and replace `mint_nft="0x19f8503273cdb5aa93ffe4539277684615242127aa2e65ef91424136a316c9c7"` by `mint_nft="<ADDRESS>"` in `Move.toml`
- `aptos move compile`
- `aptos move publish`
- `aptos move publish`
### How to run the webapp :

View File

@ -1,8 +1,8 @@
import React from 'react';
import { render, screen } from '@testing-library/react';
import App from './App';
import React from "react";
import { render, screen } from "@testing-library/react";
import App from "./App";
test('renders learn react link', () => {
test("renders learn react link", () => {
render(<App />);
const linkElement = screen.getByText(/learn react/i);
expect(linkElement).toBeInTheDocument();

View File

@ -19,7 +19,8 @@ const APT_USD_TESTNET_PRICE_ID =
"0x44a93dddd8effa54ea51076c4e851b6cbbfd938e82eb90197de38fe8876bb66e";
// Aptos modules : These are testnet addresses https://docs.pyth.network/consume-data/aptos#addresses
const MINT_NFT_MODULE = "0x19f8503273cdb5aa93ffe4539277684615242127aa2e65ef91424136a316c9c7";
const MINT_NFT_MODULE =
"0x19f8503273cdb5aa93ffe4539277684615242127aa2e65ef91424136a316c9c7";
/// React component that shows the offchain price and confidence interval
function PriceText(props: { price: Price | undefined }) {

View File

@ -1,13 +1,13 @@
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New",
monospace;
}

View File

@ -1,18 +1,20 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
import './index.css';
import App from './App';
import reportWebVitals from './reportWebVitals';
import React from "react";
import ReactDOM from "react-dom/client";
import "./index.css";
import App from "./App";
import reportWebVitals from "./reportWebVitals";
declare global {
interface Window { aptos: any; }
interface Window {
aptos: any;
}
}
const root = ReactDOM.createRoot(
document.getElementById('root') as HTMLElement
document.getElementById("root") as HTMLElement
);
window.addEventListener('load', () => {
window.addEventListener("load", () => {
root.render(
<React.StrictMode>
<App />

View File

@ -1,8 +1,8 @@
import { ReportHandler } from 'web-vitals';
import { ReportHandler } from "web-vitals";
const reportWebVitals = (onPerfEntry?: ReportHandler) => {
if (onPerfEntry && onPerfEntry instanceof Function) {
import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
getCLS(onPerfEntry);
getFID(onPerfEntry);
getFCP(onPerfEntry);

View File

@ -2,4 +2,4 @@
// allows you to do things like:
// expect(element).toHaveTextContent(/react/i)
// learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom';
import "@testing-library/jest-dom";

View File

@ -1,11 +1,7 @@
{
"compilerOptions": {
"target": "es5",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"esModuleInterop": true,
@ -20,7 +16,5 @@
"noEmit": true,
"jsx": "react-jsx"
},
"include": [
"src"
]
"include": ["src"]
}

View File

@ -17,7 +17,7 @@ module mint_nft::minting {
use aptos_token::token::{Self, TokenDataId};
// For the entire list of price_ids head to https://pyth.network/developers/price-feed-ids/#pyth-cross-chain-testnet
const APTOS_USD_PRICE_FEED_IDENTIFIER : vector<u8> = x"44a93dddd8effa54ea51076c4e851b6cbbfd938e82eb90197de38fe8876bb66e";
const APTOS_USD_PRICE_FEED_IDENTIFIER : vector<u8> = x"44a93dddd8effa54ea51076c4e851b6cbbfd938e82eb90197de38fe8876bb66e";
// This event stores the receiver of the NFT and the TokenDataId of the NFT
struct TokenMintingEvent has drop, store {
@ -82,7 +82,7 @@ module mint_nft::minting {
/// Mint an edition of the Pythian NFT pay 1 USD in native APT
public entry fun mint_nft(receiver : &signer, vaas : vector<vector<u8>>) acquires CollectionTokenMinter{
// Fetch the signer capability to mint the NFT
let collection_token_minter = borrow_global_mut<CollectionTokenMinter>(@mint_nft);
let collection_token_minter = borrow_global_mut<CollectionTokenMinter>(@mint_nft);
let resource_signer = account::create_signer_with_capability(&collection_token_minter.signer_cap);
let token_id = token::mint_token(&resource_signer, collection_token_minter.token_data_id, 1); // Mint the NFT
@ -101,7 +101,7 @@ module mint_nft::minting {
fun update_and_fetch_price(receiver : &signer, vaas : vector<vector<u8>>) : Price {
let coins = coin::withdraw<aptos_coin::AptosCoin>(receiver, pyth::get_update_fee(&vaas)); // Get coins to pay for the update
pyth::update_price_feeds(vaas, coins); // Update price feed with the provided vaas
pyth::get_price(price_identifier::from_byte_vec(APTOS_USD_PRICE_FEED_IDENTIFIER)) // Get recent price (will fail if price is too old)
pyth::get_price(price_identifier::from_byte_vec(APTOS_USD_PRICE_FEED_IDENTIFIER)) // Get recent price (will fail if price is too old)
}
}

View File

@ -1,3 +1,3 @@
target
tools/node_modules
tools/dist
tools/dist

View File

@ -11,4 +11,3 @@ codegen-units = 1
panic = 'abort'
incremental = false
overflow-checks = true

View File

@ -5,10 +5,11 @@ This directory contains The Pyth contract on CosmWasm and utilities to deploy it
## Deployment
Deploying the CosmWasm contract has two steps:
1. Upload the code. This step will give you a code id.
2. Either create a new contract or migrate an existing one:
1. Create a new contract that has an address with a code id as its program.
2. Migrating an existing contract code id to the new code id.
1. Create a new contract that has an address with a code id as its program.
2. Migrating an existing contract code id to the new code id.
This directory includes a script to perform both steps. Read below for the details.
@ -16,7 +17,7 @@ This directory includes a script to perform both steps. Read below for the detai
First, build the contracts within [the current directory](./):
``` sh
```sh
bash build.sh
```
@ -24,31 +25,33 @@ This command will build and save the Pyth contract in the `artifact` directory.
Then, to deploy the Pyth contract (`pyth_cosmwasm.wasm`), run the following command in the `tools` directory:
``` sh
```sh
npm ci # Do it only once to install the required packages
npm run deploy-pyth -- --network testnet --artifact ../artifacts/pyth_cosmwasm.wasm --mnemonic "..."
```
If successful, this command will print something along the lines of:
``` sh
```sh
Storing WASM: ../artifacts/pyth_cosmwasm.wasm (367689 bytes)
Deploy fee: 88446uluna
Code ID: 2435
```
If you do not pass any additional arguments to the script, it will only upload the code and return the code id. If you want to create a
If you do not pass any additional arguments to the script, it will only upload the code and return the code id. If you want to create a
new contract or upgrade an existing contract you should pass more arguments that are described below.
### Instantiating new contract
If you want to instantiate a new contract after your deployment, pass `--instantiate` to the above command.
This command will upload the code and instantiates a new Pyth contract with the resulting code id:
``` sh
```sh
npm run deploy-pyth -- --network testnet --artifact ../artifacts/pyth_cosmwasm.wasm --mnemonic "..." --instantiate
```
If successful, the output should look like so:
```
Storing WASM: ../artifacts/pyth_cosmwasm.wasm (183749 bytes)
Deploy fee: 44682uluna
@ -60,14 +63,16 @@ Deployed Pyth contract at terra123456789yelw23uh22nadqlyjvtl7s5527er97
```
### Migrating existing contract
If you want to upgrade an existing contract pass `--migrate --contract terra123456xyzqwe..` to the above command.
This command will upload the code, and with the resulting code id, will migrate the existing contract to the new one:
``` sh
```sh
npm run deploy-pyth -- --network testnet --artifact ../artifacts/pyth_cosmwasm.wasm --mnemonic "..." --migrate --contract "terra123..."
```
If successful, the output should look like so:
```
Storing WASM: ../artifacts/pyth_cosmwasm.wasm (183749 bytes)
Deploy fee: 44682uluna
@ -80,14 +85,15 @@ Contract terra1rhjej5gkyelw23uh22nadqlyjvtl7s5527er97 code_id successfully updat
### Common Errors
While running the instantiation/migration commands you might get the following errors:
- Gateway timeout: This error means that the request timed out. It is good to double check with terra finder as sometimes transactions succeed despite being timed out.
- Account sequence mismatch: Transactions from an account should have an increasing sequence number. This error happens when a transaction from the same sender is not fully synchronized with the terra RPC and an old sequence number is used. This is likely to happen because the deploy script sends two transactions: one to submit the code, and one to do the instantiation/migration.
You can rerun your command if you encounter any of the above errors. If an error occurs after the new code is uploaded, you can avoid re-uploading the code and use the uploaded code for instantiation/migration. You can use the printed code id in the logs
You can rerun your command if you encounter any of the above errors. If an error occurs after the new code is uploaded, you can avoid re-uploading the code and use the uploaded code for instantiation/migration. You can use the printed code id in the logs
by passing `--code-id <codeId>` instead of `--artifact`. If you do so, the script will skip uploading the code and instantiate/migrate the contract with the given code id.
An example command using an existing code id looks like so:
``` sh
```sh
npm run deploy-pyth -- --network testnet --code-id 50123 --mnemonic "..." --migrate --contract "terra123..."
```

View File

@ -287,7 +287,7 @@ pub fn query_price_feed(deps: Deps, env: Env, address: &[u8]) -> StdResult<Price
// updated yet.
// - If a price has arrived very late to this chain it will set the status to unknown.
// - If a price is coming from future it's tolerated up to VALID_TIME_PERIOD seconds
// (using abs diff) but more than that is set to unknown, the reason could be the
// (using abs diff) but more than that is set to unknown, the reason could be the
// clock time drift between the source and target chains.
let time_abs_diff = if env_time_sec > price_pub_time_sec {
env_time_sec - price_pub_time_sec

View File

@ -22,7 +22,7 @@ pub enum PythContractError {
/// Data source does not exists error (on removing data source)
#[error("DataSourceDoesNotExists")]
DataSourceDoesNotExists,
/// Data source already exists error (on adding data source)
#[error("DataSourceAlreadyExists")]
DataSourceAlreadyExists,

View File

@ -60,7 +60,7 @@ inter-block-cache = true
# ["message.sender", "message.recipient"]
index-events = []
# IavlCacheSize set the size of the iavl tree cache.
# IavlCacheSize set the size of the iavl tree cache.
# Default cache size is 50mb.
iavl-cache-size = 781250

View File

@ -1 +1,6 @@
{"priv_key":{"type":"tendermint/PrivKeyEd25519","value":"xkHT1nM10OR4WLbdZNxQzxS5n2XbEUQhrEGS9DSpiS55fPkIkwVyvEqz0Auzv2S9ZcnoVwwAvVjg2uOfy/dlVw=="}}
{
"priv_key": {
"type": "tendermint/PrivKeyEd25519",
"value": "xkHT1nM10OR4WLbdZNxQzxS5n2XbEUQhrEGS9DSpiS55fPkIkwVyvEqz0Auzv2S9ZcnoVwwAvVjg2uOfy/dlVw=="
}
}

View File

@ -8,9 +8,9 @@
# so we need to restrict the max usage to prevent DoS attack
contract-query-gas-limit = "3000000"
# Storing instances in the LRU will have no effect on the results
# (still deterministic), but should lower execution time at
# the cost of increased memory usage. We cannot pick universal
# Storing instances in the LRU will have no effect on the results
# (still deterministic), but should lower execution time at
# the cost of increased memory usage. We cannot pick universal
# parameters for this, so we should allow node operators to set it.
lru-size = "0"

View File

@ -9,52 +9,53 @@ import { Bech32, toHex } from "@cosmjs/encoding";
import { zeroPad } from "ethers/lib/utils.js";
import axios from "axios";
import yargs from "yargs";
import {hideBin} from "yargs/helpers";
import { hideBin } from "yargs/helpers";
import assert from "assert";
export const TERRA_GAS_PRICES_URL = "https://fcd.terra.dev/v1/txs/gas_prices";
const argv = yargs(hideBin(process.argv))
.option('network', {
description: 'Which network to deploy to',
choices: ['mainnet', 'testnet'],
required: true
.option("network", {
description: "Which network to deploy to",
choices: ["mainnet", "testnet"],
required: true,
})
.option('artifact', {
description: 'Path to Pyth artifact',
type: 'string',
required: false
})
.option('mnemonic', {
description: 'Mnemonic (private key)',
type: 'string',
required: true
})
.option('instantiate', {
description: 'Instantiate contract if set (default: disabled)',
type: 'boolean',
default: false,
required: false
})
.option('migrate', {
description: 'Migrate an existing contract if set (default: disabled)',
type: 'boolean',
default: false,
required: false
})
.option('contract', {
description: 'Contract address, used only for migration',
type: 'string',
.option("artifact", {
description: "Path to Pyth artifact",
type: "string",
required: false,
default: ''
})
.option('code-id', {
description: 'Code Id, if provided this will be used for migrate/instantiate and no code will be uploaded',
type: 'number',
requred: false
.option("mnemonic", {
description: "Mnemonic (private key)",
type: "string",
required: true,
})
.option("instantiate", {
description: "Instantiate contract if set (default: disabled)",
type: "boolean",
default: false,
required: false,
})
.option("migrate", {
description: "Migrate an existing contract if set (default: disabled)",
type: "boolean",
default: false,
required: false,
})
.option("contract", {
description: "Contract address, used only for migration",
type: "string",
required: false,
default: "",
})
.option("code-id", {
description:
"Code Id, if provided this will be used for migrate/instantiate and no code will be uploaded",
type: "number",
requred: false,
})
.help()
.alias('help', 'h').argv;
.alias("help", "h").argv;
const artifact = argv.artifact;
@ -68,7 +69,8 @@ const CONFIG = {
name: "mainnet",
},
wormholeContract: "terra1dq03ugtd40zu9hcgdzrsq6z2z4hwhc9tqk2uy5",
pythEmitterAddress: "6bb14509a612f01fbbc4cffeebd4bbfb492a86df717ebe92eb6df432a3f00a25"
pythEmitterAddress:
"6bb14509a612f01fbbc4cffeebd4bbfb492a86df717ebe92eb6df432a3f00a25",
},
testnet: {
terraHost: {
@ -77,14 +79,15 @@ const CONFIG = {
name: "testnet",
},
wormholeContract: "terra1pd65m0q9tl3v8znnz5f5ltsfegyzah7g42cx5v",
pythEmitterAddress: "f346195ac02f37d60d4db8ffa6ef74cb1be3550047543a4a9ee9acf4d78697b0"
}
}
pythEmitterAddress:
"f346195ac02f37d60d4db8ffa6ef74cb1be3550047543a4a9ee9acf4d78697b0",
},
};
const terraHost = CONFIG[argv.network].terraHost;
const wormholeContract = CONFIG[argv.network].wormholeContract;
const pythEmitterAddress = CONFIG[argv.network].pythEmitterAddress;
const lcd = new LCDClient(terraHost);
const feeDenoms = ["uluna"];
@ -95,19 +98,21 @@ const gasPrices = await axios
const wallet = lcd.wallet(
new MnemonicKey({
mnemonic: argv.mnemonic
mnemonic: argv.mnemonic,
})
);
/* Deploy artifacts */
var codeId;
var codeId;
if (argv.codeId !== undefined) {
codeId = argv.codeId;
} else {
if (argv.artifact === undefined) {
console.error("Artifact is not provided. Please at least provide artifact or code id");
console.error(
"Artifact is not provided. Please at least provide artifact or code id"
);
process.exit(1);
}
@ -142,10 +147,12 @@ if (argv.codeId !== undefined) {
try {
const ci = /"code_id","value":"([^"]+)/gm.exec(rs.raw_log)[1];
codeId = parseInt(ci);
} catch(e) {
console.error("Encountered an error in parsing deploy code result. Printing raw log")
} catch (e) {
console.error(
"Encountered an error in parsing deploy code result. Printing raw log"
);
console.error(rs.raw_log);
throw(e);
throw e;
}
console.log("Code ID: ", codeId);
@ -176,13 +183,19 @@ if (argv.instantiate) {
.then((rs) => {
try {
address = /"contract_address","value":"([^"]+)/gm.exec(rs.raw_log)[1];
} catch (e) {
console.error("Encountered an error in parsing instantiation result. Printing raw log")
} catch (e) {
console.error(
"Encountered an error in parsing instantiation result. Printing raw log"
);
console.error(rs.raw_log);
throw(e);
throw e;
}
});
console.log(`Instantiated Pyth at ${address} (${convert_terra_address_to_hex(address)})`);
console.log(
`Instantiated Pyth at ${address} (${convert_terra_address_to_hex(
address
)})`
);
return address;
}
@ -190,9 +203,7 @@ if (argv.instantiate) {
const contractAddress = await instantiate(codeId, {
wormhole_contract: wormholeContract,
pyth_emitter: Buffer.from(pythEmitterAddress, "hex").toString(
"base64"
),
pyth_emitter: Buffer.from(pythEmitterAddress, "hex").toString("base64"),
pyth_emitter_chain: pythChain,
});
@ -200,8 +211,10 @@ if (argv.instantiate) {
}
if (argv.migrate) {
if (argv.contract === '') {
console.error("Contract address is not provided. Provide it using --contract");
if (argv.contract === "") {
console.error(
"Contract address is not provided. Provide it using --contract"
);
process.exit(1);
}
@ -214,7 +227,7 @@ if (argv.migrate) {
argv.contract,
codeId,
{
"action": ""
action: "",
},
{ uluna: 1000 }
),
@ -222,19 +235,23 @@ if (argv.migrate) {
feeDenoms,
gasPrices,
});
const rs = await lcd.tx.broadcast(tx);
var resultCodeId;
try {
resultCodeId = /"code_id","value":"([^"]+)/gm.exec(rs.raw_log)[1];
assert.equal(codeId, resultCodeId)
assert.equal(codeId, resultCodeId);
} catch (e) {
console.error("Encountered an error in parsing migration result. Printing raw log")
console.error(
"Encountered an error in parsing migration result. Printing raw log"
);
console.error(rs.raw_log);
throw(e);
throw e;
}
console.log(`Contract ${argv.contract} code_id successfully updated to ${resultCodeId}`);
console.log(
`Contract ${argv.contract} code_id successfully updated to ${resultCodeId}`
);
}
// Terra addresses are "human-readable", but for cross-chain registrations, we
@ -244,5 +261,5 @@ function convert_terra_address_to_hex(human_addr) {
}
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
return new Promise((resolve) => setTimeout(resolve, ms));
}

View File

@ -1,13 +1,10 @@
// Deploy Wormhole and Pyth contract to Tilt. If you want to
// Deploy Wormhole and Pyth contract to Tilt. If you want to
// test the contracts locally you need to build the wormhole contract
// as well. You can use Dockerfile.cosmwasm in the root of this repo
// to do that.
import { LCDClient, MnemonicKey } from "@terra-money/terra.js";
import {
MsgInstantiateContract,
MsgStoreCode,
} from "@terra-money/terra.js";
import { MsgInstantiateContract, MsgStoreCode } from "@terra-money/terra.js";
import { readFileSync, readdirSync } from "fs";
import { Bech32, toHex } from "@cosmjs/encoding";
import { zeroPad } from "ethers/lib/utils.js";
@ -17,10 +14,7 @@ import { zeroPad } from "ethers/lib/utils.js";
contracts must be imported in a deterministic order so their addresses remain
deterministic.
*/
const artifacts = [
"wormhole.wasm",
"pyth_cosmwasm.wasm",
];
const artifacts = ["wormhole.wasm", "pyth_cosmwasm.wasm"];
/* Check that the artifact folder contains all the wasm files we expect and nothing else */
@ -133,7 +127,11 @@ async function instantiate(contract, inst_msg, label) {
.then((rs) => {
address = /"_contract_address","value":"([^"]+)/gm.exec(rs.raw_log)[1];
});
console.log(`Instantiated ${contract} at ${address} (${convert_terra_address_to_hex(address)})`);
console.log(
`Instantiated ${contract} at ${address} (${convert_terra_address_to_hex(
address
)})`
);
return address;
}
@ -142,36 +140,42 @@ async function instantiate(contract, inst_msg, label) {
const addresses = {};
addresses["wormhole.wasm"] = await instantiate("wormhole.wasm", {
gov_chain: govChain,
gov_address: Buffer.from(govAddress, "hex").toString("base64"),
guardian_set_expirity: 86400,
initial_guardian_set: {
addresses: [
{
bytes: Buffer.from(
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe",
"hex"
).toString("base64"),
},
],
expiration_time: 0,
addresses["wormhole.wasm"] = await instantiate(
"wormhole.wasm",
{
gov_chain: govChain,
gov_address: Buffer.from(govAddress, "hex").toString("base64"),
guardian_set_expirity: 86400,
initial_guardian_set: {
addresses: [
{
bytes: Buffer.from(
"beFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe",
"hex"
).toString("base64"),
},
],
expiration_time: 0,
},
chain_id: 18,
fee_denom: "uluna",
},
chain_id: 18,
fee_denom: "uluna",
}, "wormhole");
"wormhole"
);
const pythEmitterAddress =
"71f8dcb863d176e2c420ad6610cf687359612b6fb392e0642b0ca6b1f186aa3b";
const pythChain = 1;
addresses["pyth_cosmwasm.wasm"] = await instantiate("pyth_cosmwasm.wasm", {
wormhole_contract: addresses["wormhole.wasm"],
pyth_emitter: Buffer.from(pythEmitterAddress, "hex").toString(
"base64"
),
pyth_emitter_chain: pythChain,
}, "pyth");
addresses["pyth_cosmwasm.wasm"] = await instantiate(
"pyth_cosmwasm.wasm",
{
wormhole_contract: addresses["wormhole.wasm"],
pyth_emitter: Buffer.from(pythEmitterAddress, "hex").toString("base64"),
pyth_emitter_chain: pythChain,
},
"pyth"
);
// Terra addresses are "human-readable", but for cross-chain registrations, we
// want the "canonical" version

View File

@ -74,8 +74,8 @@ spec:
- /bin/sh
- -c
- "npm run migrate -- --network development &&
npx truffle test test/pyth.js 2>&1 &&
nc -lkp 2000 0.0.0.0"
npx truffle test test/pyth.js 2>&1 &&
nc -lkp 2000 0.0.0.0"
readinessProbe:
periodSeconds: 1
failureThreshold: 300
@ -129,7 +129,7 @@ spec:
- /bin/sh
- -c
- "npm run migrate -- --network development &&
nc -lkp 2000 0.0.0.0"
nc -lkp 2000 0.0.0.0"
readinessProbe:
periodSeconds: 1
failureThreshold: 300

View File

@ -38,10 +38,10 @@ spec:
- python3
- /usr/src/pyth/p2w_autoattest.py
env:
- name: P2W_INITIALIZE_SOL_CONTRACT
value: "1"
- name: P2W_EXIT_ON_ERROR
value: "true"
- name: P2W_INITIALIZE_SOL_CONTRACT
value: "1"
- name: P2W_EXIT_ON_ERROR
value: "true"
tty: true
readinessProbe:
tcpSocket:

View File

@ -45,7 +45,7 @@ spec:
protocol: TCP
readinessProbe:
httpGet:
path: '/health'
path: "/health"
port: 4200
command:
- node
@ -68,16 +68,16 @@ spec:
- name: EVM_VERIFY_PRICE_FEEDS
value: "yes"
- name: REST_PORT
value: '4200'
value: "4200"
- name: PROM_PORT
value: '8081'
value: "8081"
- name: BAL_QUERY_INTERVAL
value: '60000'
value: "60000"
- name: RETRY_MAX_ATTEMPTS
value: '4'
value: "4"
- name: RETRY_DELAY_IN_MS
value: '250'
value: "250"
- name: MAX_MSGS_PER_BATCH
value: '1'
value: "1"
- name: LOG_LEVEL
value: debug

View File

@ -45,7 +45,7 @@ spec:
protocol: TCP
readinessProbe:
httpGet:
path: '/health'
path: "/health"
port: 4200
command:
- node
@ -71,18 +71,18 @@ spec:
- name: TERRA_COIN
value: uluna
- name: REST_PORT
value: '4200'
value: "4200"
- name: PROM_PORT
value: '8081'
value: "8081"
- name: BAL_QUERY_INTERVAL
value: '60000'
value: "60000"
- name: RETRY_MAX_ATTEMPTS
value: '6'
value: "6"
- name: RETRY_DELAY_IN_MS
value: '1000'
value: "1000"
- name: MAX_MSGS_PER_BATCH
value: '1'
value: "1"
- name: MAX_HEALTHY_NO_RELAY_DURATION_IN_SECONDS
value: '120'
value: "120"
- name: LOG_LEVEL
value: debug

View File

@ -18,6 +18,6 @@ spec:
image: pyth-evm-watcher
env:
- name: WS_ENDPOINT
value: 'ws://eth-devnet:8545'
value: "ws://eth-devnet:8545"
- name: PYTH_CONTRACT
value: '0xe982E462b094850F12AF94d21D470e21bE9D0E9C'
value: "0xe982E462b094850F12AF94d21D470e21bE9D0E9C"

View File

@ -45,14 +45,14 @@ spec:
protocol: TCP
readinessProbe:
httpGet:
path: '/ready'
path: "/ready"
port: 4200
initialDelaySeconds: 10
periodSeconds: 1
failureThreshold: 1
livenessProbe:
httpGet:
path: '/live'
path: "/live"
port: 4200
initialDelaySeconds: 20
periodSeconds: 30
@ -63,12 +63,12 @@ spec:
- name: SPY_SERVICE_FILTERS
value: '[{"chain_id":1,"emitter_address":"71f8dcb863d176e2c420ad6610cf687359612b6fb392e0642b0ca6b1f186aa3b"}]'
- name: REST_PORT
value: '4200'
value: "4200"
- name: PROM_PORT
value: '8081'
value: "8081"
- name: READINESS_SPY_SYNC_TIME_SECONDS
value: '5'
value: "5"
- name: READINESS_NUM_LOADED_SYMBOLS
value: '6'
value: "6"
- name: LOG_LEVEL
value: debug

View File

@ -46,7 +46,7 @@ spec:
# Hardcoded devnet bootstrap (generated from deterministic key in guardiand)
- --bootstrap
- /dns4/guardian-0.guardian/udp/8999/quic/p2p/12D3KooWL3XJ9EMCyZvmmGXL2LMiVBtrVa2BuESsJiXkSj7333Jw
# - --logLevel=debug
# - --logLevel=debug
ports:
- containerPort: 7072
name: spyrpc

View File

@ -2,4 +2,4 @@ node_modules
build
.openzeppelin
networks
.env
.env

View File

@ -2,4 +2,4 @@ MIGRATIONS_DIR=./migrations/prod
MIGRATIONS_NETWORK=ethereum
WORMHOLE_CHAIN_NAME=ethereum
CLUSTER=mainnet
VALID_TIME_PERIOD_SECONDS=120
VALID_TIME_PERIOD_SECONDS=120

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -230,12 +230,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -192,12 +192,7 @@
},
"t_enum(PriceStatus)2233": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -435,12 +430,7 @@
},
"t_enum(PriceStatus)2280": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -676,12 +666,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"
@ -925,12 +910,7 @@
},
"t_enum(PriceStatus)2468": {
"label": "enum PythStructs.PriceStatus",
"members": [
"UNKNOWN",
"TRADING",
"HALTED",
"AUCTION"
]
"members": ["UNKNOWN", "TRADING", "HALTED", "AUCTION"]
},
"t_uint32": {
"label": "uint32"

View File

@ -24,7 +24,7 @@ export MNEMONIC=...
npm run receiver-submit-guardian-sets -- --network $MIGRATIONS_NETWORK
```
As a sanity check, it is recommended to deploy the migrations in `migrations/prod` to the Truffle `development` network first. You can do this by using the configuration values in [`.env.prod.development`](.env.prod.development).
As a sanity check, it is recommended to deploy the migrations in `migrations/prod` to the Truffle `development` network first. You can do this by using the configuration values in [`.env.prod.development`](.env.prod.development).
As a result of this process for some files (with the network id in their name) in `networks` and `.openzeppelin` directory might change which need to be committed (if they are result of a production deployment).
@ -33,26 +33,29 @@ If you are deploying to a new network, please add the new contract address to co
To do so, add the contract address to both [Pyth Gitbook EVM Page](https://github.com/pyth-network/pyth-gitbook/blob/main/consumers/evm.md) and [pyth-evm-js package](https://github.com/pyth-network/pyth-js/blob/main/pyth-evm-js/src/index.ts#L13). You also need to add the new network address to [pyth-evm-js relaying example](https://github.com/pyth-network/pyth-js/blob/main/pyth-evm-js/src/examples/EvmRelay.ts#L47).
## `networks` directory
Truffle stores the address of the deployed contracts in the build artifacts, which can make local development difficult. We use [`truffle-deploy-registry`](https://github.com/MedXProtocol/truffle-deploy-registry) to store the addresses separately from the artifacts, in the [`networks`](networks) directory. When we need to perform operations on the deployed contracts, such as performing additional migrations, we can run `npx apply-registry` to populate the artifacts with the correct addresses.
Each file in the network directory is named after the network id and contains address of Migration contract and PythUpgradable contract
(and Wormhole Receiver if we use `prod-receiver`). If you are upgrading the contract it should not change. In case you are deploying to a new network make sure to commit this file.
## `.openzeppelin` directory
In order to handle upgrades safely this directory stores details of the contracts structure, such as implementation addresses
and their respective storage layout in one file per network (the name contains network id). This allows truffle to
check whether the upgrade is causing any memory collision. Please take a look at (this doc)[https://docs.openzeppelin.com/upgrades-plugins/1.x/writing-upgradeable]
and their respective storage layout in one file per network (the name contains network id). This allows truffle to
check whether the upgrade is causing any memory collision. Please take a look at (this doc)[https://docs.openzeppelin.com/upgrades-plugins/1.x/writing-upgradeable]
for more information.
Changes to the files in this directory should be commited as well.
# Upgrading the contract
To upgrade the contract you should add a new migration file in the `migrations/*` directories increasing the migration number.
It looks like so:
```javascript
require('dotenv').config({ path: "../.env" });
require("dotenv").config({ path: "../.env" });
const PythUpgradable = artifacts.require("PythUpgradable");
@ -60,13 +63,13 @@ const { upgradeProxy } = require("@openzeppelin/truffle-upgrades");
/**
* Version <x.y.z>.
*
*
* Briefly describe the changelog here.
*/
module.exports = async function (deployer) {
const proxy = await PythUpgradable.deployed();
await upgradeProxy(proxy.address, PythUpgradable, { deployer });
}
const proxy = await PythUpgradable.deployed();
await upgradeProxy(proxy.address, PythUpgradable, { deployer });
};
```
**When changing the storage, you might need to disable the storage checks because Open Zeppelin is very conservative,
@ -75,9 +78,10 @@ struct that contains all Pyth variables inside it. It is the last variable in th
and is safe to append fields inside it. However, Open Zeppelin only allows appending variables
in the contract surface and does not allow appending in the nested structs.
To disable security checks, you can add
`unsafeSkipStorageCheck: true` option in `upgradeProxy` call. **If you do such a thing,
To disable security checks, you can add
`unsafeSkipStorageCheck: true` option in `upgradeProxy` call. **If you do such a thing,
make sure that your change to the contract won't cause any collision**. For example:
- Renaming a variable is fine.
- Changing a variable type to another type with the same size is ok.
- Appending to the contract variables is ok. If the last variable is a struct, it is also fine
@ -121,9 +125,9 @@ It will create a new file `PythUpgradable_merged.sol` which you can use in the e
# Troubleshooting
- Sometimes the truffle might fail during the dry-run (e.g., in Ethereum). It is because openzeppelin does not have the required metadata for forking. To fix it please
follow the suggestion [here](https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/241#issuecomment-1192657444).
follow the suggestion [here](https://github.com/OpenZeppelin/openzeppelin-upgrades/issues/241#issuecomment-1192657444).
- Sometimes due to rpc problems or insufficient gas the migration is not executed completely. It is better to avoid doing multiple transactions in one
migration. However, if it happens, you can comment out the part that is already ran (you can double check in the explorer), and re-run the migration.
You can avoid gas problems by choosing a much higher gas than what is showed on the network gas tracker. Also, you can find rpc nodes from
[here](https://chainlist.org/)
migration. However, if it happens, you can comment out the part that is already ran (you can double check in the explorer), and re-run the migration.
You can avoid gas problems by choosing a much higher gas than what is showed on the network gas tracker. Also, you can find rpc nodes from
[here](https://chainlist.org/)

View File

@ -9,19 +9,20 @@ Run the following command to install required dependencies for the contract:
```
# xc-governance-sdk-js is a local dependency that should be built
# it is used in deployment (truffle migrations) to generate/sanity check
# the governance VAAs
# the governance VAAs
pushd third_party/pyth/xc-governance-sdk-js && npm ci && popd
npm ci
```
## Deployment
Please refer to [Deploying.md](./Deploying.md) for more information.
## Foundry
Foundry can be installed by the official installer, or by running our helper script which will automatically pull the correct installation script individually for Foundry and the Solidity compiler for your current OS. This may work better if you are running into networking/firewall issues using Foundry's Solidity installer. To use helper script, run the command below from this directory:
``` sh
```sh
pyth-crosschain/ethereum $ bash ../scripts/install-foundry.sh
```

View File

@ -13,7 +13,7 @@ be set in the `ETHERSCAN_KEY` environment variable for all APIs (not just
etherscan, bit of a misnomer).
Our contracts are structured as a separate proxy and an implementation. Both of
these components need to be verified, and truffle handles it.
these components need to be verified, and truffle handles it.
## Verifying the contract
@ -30,12 +30,12 @@ ETHERSCAN_KEY=... npm run verify --module=PythUpgradable --contract_address=0x0e
`truffle-plugin-verify/utils.js` to find the key names.
# Note
The `npm run verify` script uses the `truffle-plugin-verify` plugin under the
hood. The version of `truffle-plugin-verify` pinned in the repo (`^0.5.11` at
hood. The version of `truffle-plugin-verify` pinned in the repo (`^0.5.11` at
the time of writing) doesn't support the avalanche RPC. In later versions of the
plugin, support was added, but other stuff has changed as well in the transitive
dependencies, so it fails to parse the `HDWallet` arguments in our
`truffle-config.json`. As a quick workaround, we backport the patch to `0.5.11`
by applying the `truffle-verify-constants.patch` file, which the `npm run
verify` script does transparently. Once the toolchain has been upgraded and the
by applying the `truffle-verify-constants.patch` file, which the `npm run verify` script does transparently. Once the toolchain has been upgraded and the
errors fixed, this patch can be removed.

View File

@ -8,7 +8,7 @@ import "@openzeppelin/contracts/access/Ownable.sol";
contract Migrations is Ownable {
uint public last_completed_migration;
function setCompleted(uint completed) public onlyOwner {
function setCompleted(uint completed) public onlyOwner {
last_completed_migration = completed;
}
}

View File

@ -8,16 +8,11 @@
*/
pragma solidity >=0.8.0 <0.9.0;
library BytesLib {
function concat(
bytes memory _preBytes,
bytes memory _postBytes
)
internal
pure
returns (bytes memory)
{
) internal pure returns (bytes memory) {
bytes memory tempBytes;
assembly {
@ -79,16 +74,22 @@ library BytesLib {
// next 32 byte block, then round down to the nearest multiple of
// 32. If the sum of the length of the two arrays is zero then add
// one before rounding down to leave a blank 32 bytes (the length block with 0).
mstore(0x40, and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
))
mstore(
0x40,
and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
)
)
}
return tempBytes;
}
function concatStorage(bytes storage _preBytes, bytes memory _postBytes) internal {
function concatStorage(
bytes storage _preBytes,
bytes memory _postBytes
) internal {
assembly {
// Read the first 32 bytes of _preBytes storage, which is the length
// of the array. (We don't need to use the offset into the slot
@ -101,7 +102,10 @@ library BytesLib {
// If the slot is even, bitwise and the slot with 255 and divide by
// two to get the length. If the slot is odd, bitwise and the slot
// with -1 and divide by two.
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let slength := div(
and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)),
2
)
let mlength := mload(_postBytes)
let newlength := add(slength, mlength)
// slength can contain both the length and contents of the array
@ -229,11 +233,7 @@ library BytesLib {
bytes memory _bytes,
uint256 _start,
uint256 _length
)
internal
pure
returns (bytes memory)
{
) internal pure returns (bytes memory) {
require(_length + 31 >= _length, "slice_overflow");
require(_bytes.length >= _start + _length, "slice_outOfBounds");
@ -260,13 +260,22 @@ library BytesLib {
// because when slicing multiples of 32 bytes (lengthmod == 0)
// the following copy loop was copying the origin's length
// and then ending prematurely not copying everything it should.
let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod)))
let mc := add(
add(tempBytes, lengthmod),
mul(0x20, iszero(lengthmod))
)
let end := add(mc, _length)
for {
// The multiplication in the next line has the same exact purpose
// as the one above.
let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start)
let cc := add(
add(
add(_bytes, lengthmod),
mul(0x20, iszero(lengthmod))
),
_start
)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
@ -294,19 +303,28 @@ library BytesLib {
return tempBytes;
}
function toAddress(bytes memory _bytes, uint256 _start) internal pure returns (address) {
function toAddress(
bytes memory _bytes,
uint256 _start
) internal pure returns (address) {
require(_bytes.length >= _start + 20, "toAddress_outOfBounds");
address tempAddress;
assembly {
tempAddress := div(mload(add(add(_bytes, 0x20), _start)), 0x1000000000000000000000000)
tempAddress := div(
mload(add(add(_bytes, 0x20), _start)),
0x1000000000000000000000000
)
}
return tempAddress;
}
function toUint8(bytes memory _bytes, uint256 _start) internal pure returns (uint8) {
require(_bytes.length >= _start + 1 , "toUint8_outOfBounds");
function toUint8(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint8) {
require(_bytes.length >= _start + 1, "toUint8_outOfBounds");
uint8 tempUint;
assembly {
@ -316,7 +334,10 @@ library BytesLib {
return tempUint;
}
function toUint16(bytes memory _bytes, uint256 _start) internal pure returns (uint16) {
function toUint16(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint16) {
require(_bytes.length >= _start + 2, "toUint16_outOfBounds");
uint16 tempUint;
@ -327,7 +348,10 @@ library BytesLib {
return tempUint;
}
function toUint32(bytes memory _bytes, uint256 _start) internal pure returns (uint32) {
function toUint32(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint32) {
require(_bytes.length >= _start + 4, "toUint32_outOfBounds");
uint32 tempUint;
@ -338,7 +362,10 @@ library BytesLib {
return tempUint;
}
function toUint64(bytes memory _bytes, uint256 _start) internal pure returns (uint64) {
function toUint64(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint64) {
require(_bytes.length >= _start + 8, "toUint64_outOfBounds");
uint64 tempUint;
@ -349,7 +376,10 @@ library BytesLib {
return tempUint;
}
function toUint96(bytes memory _bytes, uint256 _start) internal pure returns (uint96) {
function toUint96(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint96) {
require(_bytes.length >= _start + 12, "toUint96_outOfBounds");
uint96 tempUint;
@ -360,7 +390,10 @@ library BytesLib {
return tempUint;
}
function toUint128(bytes memory _bytes, uint256 _start) internal pure returns (uint128) {
function toUint128(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint128) {
require(_bytes.length >= _start + 16, "toUint128_outOfBounds");
uint128 tempUint;
@ -371,7 +404,10 @@ library BytesLib {
return tempUint;
}
function toUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256) {
function toUint256(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint256) {
require(_bytes.length >= _start + 32, "toUint256_outOfBounds");
uint256 tempUint;
@ -382,7 +418,10 @@ library BytesLib {
return tempUint;
}
function toBytes32(bytes memory _bytes, uint256 _start) internal pure returns (bytes32) {
function toBytes32(
bytes memory _bytes,
uint256 _start
) internal pure returns (bytes32) {
require(_bytes.length >= _start + 32, "toBytes32_outOfBounds");
bytes32 tempBytes32;
@ -393,7 +432,10 @@ library BytesLib {
return tempBytes32;
}
function equal(bytes memory _preBytes, bytes memory _postBytes) internal pure returns (bool) {
function equal(
bytes memory _preBytes,
bytes memory _postBytes
) internal pure returns (bool) {
bool success = true;
assembly {
@ -413,8 +455,8 @@ library BytesLib {
for {
let cc := add(_postBytes, 0x20)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
} eq(add(lt(mc, end), cb), 2) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
@ -439,18 +481,17 @@ library BytesLib {
function equalStorage(
bytes storage _preBytes,
bytes memory _postBytes
)
internal
view
returns (bool)
{
) internal view returns (bool) {
bool success = true;
assembly {
// we know _preBytes_offset is 0
let fslot := sload(_preBytes.slot)
// Decode the length of the stored array like in concatStorage().
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let slength := div(
and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)),
2
)
let mlength := mload(_postBytes)
// if lengths don't match the arrays are not equal
@ -486,7 +527,9 @@ library BytesLib {
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
for {} eq(add(lt(mc, end), cb), 2) {
for {
} eq(add(lt(mc, end), cb), 2) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {

View File

@ -11,16 +11,11 @@
*/
pragma solidity >=0.8.0 <0.9.0;
library UnsafeBytesLib {
function concat(
bytes memory _preBytes,
bytes memory _postBytes
)
internal
pure
returns (bytes memory)
{
) internal pure returns (bytes memory) {
bytes memory tempBytes;
assembly {
@ -82,16 +77,22 @@ library UnsafeBytesLib {
// next 32 byte block, then round down to the nearest multiple of
// 32. If the sum of the length of the two arrays is zero then add
// one before rounding down to leave a blank 32 bytes (the length block with 0).
mstore(0x40, and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
))
mstore(
0x40,
and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
)
)
}
return tempBytes;
}
function concatStorage(bytes storage _preBytes, bytes memory _postBytes) internal {
function concatStorage(
bytes storage _preBytes,
bytes memory _postBytes
) internal {
assembly {
// Read the first 32 bytes of _preBytes storage, which is the length
// of the array. (We don't need to use the offset into the slot
@ -104,7 +105,10 @@ library UnsafeBytesLib {
// If the slot is even, bitwise and the slot with 255 and divide by
// two to get the length. If the slot is odd, bitwise and the slot
// with -1 and divide by two.
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let slength := div(
and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)),
2
)
let mlength := mload(_postBytes)
let newlength := add(slength, mlength)
// slength can contain both the length and contents of the array
@ -232,12 +236,7 @@ library UnsafeBytesLib {
bytes memory _bytes,
uint256 _start,
uint256 _length
)
internal
pure
returns (bytes memory)
{
) internal pure returns (bytes memory) {
bytes memory tempBytes;
assembly {
@ -261,13 +260,22 @@ library UnsafeBytesLib {
// because when slicing multiples of 32 bytes (lengthmod == 0)
// the following copy loop was copying the origin's length
// and then ending prematurely not copying everything it should.
let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod)))
let mc := add(
add(tempBytes, lengthmod),
mul(0x20, iszero(lengthmod))
)
let end := add(mc, _length)
for {
// The multiplication in the next line has the same exact purpose
// as the one above.
let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start)
let cc := add(
add(
add(_bytes, lengthmod),
mul(0x20, iszero(lengthmod))
),
_start
)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
@ -295,17 +303,26 @@ library UnsafeBytesLib {
return tempBytes;
}
function toAddress(bytes memory _bytes, uint256 _start) internal pure returns (address) {
function toAddress(
bytes memory _bytes,
uint256 _start
) internal pure returns (address) {
address tempAddress;
assembly {
tempAddress := div(mload(add(add(_bytes, 0x20), _start)), 0x1000000000000000000000000)
tempAddress := div(
mload(add(add(_bytes, 0x20), _start)),
0x1000000000000000000000000
)
}
return tempAddress;
}
function toUint8(bytes memory _bytes, uint256 _start) internal pure returns (uint8) {
function toUint8(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint8) {
uint8 tempUint;
assembly {
@ -315,7 +332,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint16(bytes memory _bytes, uint256 _start) internal pure returns (uint16) {
function toUint16(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint16) {
uint16 tempUint;
assembly {
@ -325,7 +345,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint32(bytes memory _bytes, uint256 _start) internal pure returns (uint32) {
function toUint32(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint32) {
uint32 tempUint;
assembly {
@ -335,7 +358,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint64(bytes memory _bytes, uint256 _start) internal pure returns (uint64) {
function toUint64(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint64) {
uint64 tempUint;
assembly {
@ -345,7 +371,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint96(bytes memory _bytes, uint256 _start) internal pure returns (uint96) {
function toUint96(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint96) {
uint96 tempUint;
assembly {
@ -355,7 +384,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint128(bytes memory _bytes, uint256 _start) internal pure returns (uint128) {
function toUint128(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint128) {
uint128 tempUint;
assembly {
@ -365,7 +397,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256) {
function toUint256(
bytes memory _bytes,
uint256 _start
) internal pure returns (uint256) {
uint256 tempUint;
assembly {
@ -375,7 +410,10 @@ library UnsafeBytesLib {
return tempUint;
}
function toBytes32(bytes memory _bytes, uint256 _start) internal pure returns (bytes32) {
function toBytes32(
bytes memory _bytes,
uint256 _start
) internal pure returns (bytes32) {
bytes32 tempBytes32;
assembly {
@ -385,7 +423,10 @@ library UnsafeBytesLib {
return tempBytes32;
}
function equal(bytes memory _preBytes, bytes memory _postBytes) internal pure returns (bool) {
function equal(
bytes memory _preBytes,
bytes memory _postBytes
) internal pure returns (bool) {
bool success = true;
assembly {
@ -405,8 +446,8 @@ library UnsafeBytesLib {
for {
let cc := add(_postBytes, 0x20)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
} eq(add(lt(mc, end), cb), 2) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
@ -431,18 +472,17 @@ library UnsafeBytesLib {
function equalStorage(
bytes storage _preBytes,
bytes memory _postBytes
)
internal
view
returns (bool)
{
) internal view returns (bool) {
bool success = true;
assembly {
// we know _preBytes_offset is 0
let fslot := sload(_preBytes.slot)
// Decode the length of the stored array like in concatStorage().
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let slength := div(
and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)),
2
)
let mlength := mload(_postBytes)
// if lengths don't match the arrays are not equal
@ -478,7 +518,9 @@ library UnsafeBytesLib {
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
for {} eq(add(lt(mc, end), cb), 2) {
for {
} eq(add(lt(mc, end), cb), 2) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {

View File

@ -23,34 +23,48 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
}
function updatePriceBatchFromVm(bytes calldata encodedVm) private {
parseAndProcessBatchPriceAttestation(parseAndVerifyBatchAttestationVM(encodedVm));
parseAndProcessBatchPriceAttestation(
parseAndVerifyBatchAttestationVM(encodedVm)
);
}
function updatePriceFeeds(bytes[] calldata updateData) public override payable {
function updatePriceFeeds(
bytes[] calldata updateData
) public payable override {
uint requiredFee = getUpdateFee(updateData);
require(msg.value >= requiredFee, "insufficient paid fee amount");
for(uint i = 0; i < updateData.length; ) {
for (uint i = 0; i < updateData.length; ) {
updatePriceBatchFromVm(updateData[i]);
unchecked { i++; }
unchecked {
i++;
}
}
}
/// This method is deprecated, please use the `getUpdateFee(bytes[])` instead.
function getUpdateFee(uint updateDataSize) public view returns (uint feeAmount) {
function getUpdateFee(
uint updateDataSize
) public view returns (uint feeAmount) {
return singleUpdateFeeInWei() * updateDataSize;
}
function getUpdateFee(bytes[] calldata updateData) public override view returns (uint feeAmount) {
function getUpdateFee(
bytes[] calldata updateData
) public view override returns (uint feeAmount) {
return singleUpdateFeeInWei() * updateData.length;
}
function verifyPythVM(IWormhole.VM memory vm) private view returns (bool valid) {
return isValidDataSource(vm.emitterChainId, vm.emitterAddress);
function verifyPythVM(
IWormhole.VM memory vm
) private view returns (bool valid) {
return isValidDataSource(vm.emitterChainId, vm.emitterAddress);
}
function parseAndProcessBatchPriceAttestation(IWormhole.VM memory vm) internal {
function parseAndProcessBatchPriceAttestation(
IWormhole.VM memory vm
) internal {
// Most of the math operations below are simple additions.
// In the places that there is more complex operation there is
// a comment explaining why it is safe. Also, byteslib
@ -58,12 +72,22 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
unchecked {
bytes memory encoded = vm.payload;
(uint index, uint nAttestations, uint attestationSize) =
parseBatchAttestationHeader(encoded);
(
uint index,
uint nAttestations,
uint attestationSize
) = parseBatchAttestationHeader(encoded);
// Deserialize each attestation
for (uint j=0; j < nAttestations; j++) {
(PythInternalStructs.PriceInfo memory info, bytes32 priceId) = parseSingleAttestationFromBatch(encoded, index, attestationSize);
for (uint j = 0; j < nAttestations; j++) {
(
PythInternalStructs.PriceInfo memory info,
bytes32 priceId
) = parseSingleAttestationFromBatch(
encoded,
index,
attestationSize
);
// Respect specified attestation size for forward-compat
index += attestationSize;
@ -71,9 +95,14 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// Store the attestation
uint64 latestPublishTime = latestPriceInfoPublishTime(priceId);
if(info.publishTime > latestPublishTime) {
if (info.publishTime > latestPublishTime) {
setLatestPriceInfo(priceId, info);
emit PriceFeedUpdate(priceId, info.publishTime, info.price, info.conf);
emit PriceFeedUpdate(
priceId,
info.publishTime,
info.price,
info.conf
);
}
}
@ -85,10 +114,11 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
bytes memory encoded,
uint index,
uint attestationSize
) internal pure returns (
PythInternalStructs.PriceInfo memory info,
bytes32 priceId
) {
)
internal
pure
returns (PythInternalStructs.PriceInfo memory info, bytes32 priceId)
{
unchecked {
// NOTE: We don't advance the global index immediately.
// attestationIndex is an attestation-local offset used
@ -98,22 +128,37 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// Unused bytes32 product id
attestationIndex += 32;
priceId = UnsafeBytesLib.toBytes32(encoded, index + attestationIndex);
priceId = UnsafeBytesLib.toBytes32(
encoded,
index + attestationIndex
);
attestationIndex += 32;
info.price = int64(UnsafeBytesLib.toUint64(encoded, index + attestationIndex));
info.price = int64(
UnsafeBytesLib.toUint64(encoded, index + attestationIndex)
);
attestationIndex += 8;
info.conf = UnsafeBytesLib.toUint64(encoded, index + attestationIndex);
info.conf = UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
);
attestationIndex += 8;
info.expo = int32(UnsafeBytesLib.toUint32(encoded, index + attestationIndex));
info.expo = int32(
UnsafeBytesLib.toUint32(encoded, index + attestationIndex)
);
attestationIndex += 4;
info.emaPrice = int64(UnsafeBytesLib.toUint64(encoded, index + attestationIndex));
info.emaPrice = int64(
UnsafeBytesLib.toUint64(encoded, index + attestationIndex)
);
attestationIndex += 8;
info.emaConf = UnsafeBytesLib.toUint64(encoded, index + attestationIndex);
info.emaConf = UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
);
attestationIndex += 8;
{
@ -122,7 +167,10 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// 1 = TRADING: The price feed is updating as expected.
// 2 = HALTED: The price feed is not currently updating because trading in the product has been halted.
// 3 = AUCTION: The price feed is not currently updating because an auction is setting the price.
uint8 status = UnsafeBytesLib.toUint8(encoded, index + attestationIndex);
uint8 status = UnsafeBytesLib.toUint8(
encoded,
index + attestationIndex
);
attestationIndex += 1;
// Unused uint32 numPublishers
@ -134,30 +182,48 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// Unused uint64 attestationTime
attestationIndex += 8;
info.publishTime = UnsafeBytesLib.toUint64(encoded, index + attestationIndex);
info.publishTime = UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
);
attestationIndex += 8;
if (status == 1) { // status == TRADING
if (status == 1) {
// status == TRADING
attestationIndex += 24;
} else {
// If status is not trading then the latest available price is
// the previous price info that are passed here.
// Previous publish time
info.publishTime = UnsafeBytesLib.toUint64(encoded, index + attestationIndex);
info.publishTime = UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
);
attestationIndex += 8;
// Previous price
info.price = int64(UnsafeBytesLib.toUint64(encoded, index + attestationIndex));
info.price = int64(
UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
)
);
attestationIndex += 8;
// Previous confidence
info.conf = UnsafeBytesLib.toUint64(encoded, index + attestationIndex);
info.conf = UnsafeBytesLib.toUint64(
encoded,
index + attestationIndex
);
attestationIndex += 8;
}
}
require(attestationIndex <= attestationSize, "INTERNAL: Consumed more than `attestationSize` bytes");
require(
attestationIndex <= attestationSize,
"INTERNAL: Consumed more than `attestationSize` bytes"
);
}
}
@ -173,7 +239,7 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
"priceIds and publishTimes arrays should have same length"
);
for (uint i = 0; i < priceIds.length;) {
for (uint i = 0; i < priceIds.length; ) {
// If the price does not exist, then the publish time is zero and
// this condition will work fine.
if (latestPriceInfoPublishTime(priceIds[i]) < publishTimes[i]) {
@ -181,7 +247,9 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
return;
}
unchecked { i++; }
unchecked {
i++;
}
}
revert(
@ -202,7 +270,10 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
price.price = info.price;
price.conf = info.conf;
require(price.publishTime != 0, "price feed for the given id is not pushed or does not exist");
require(
price.publishTime != 0,
"price feed for the given id is not pushed or does not exist"
);
}
// This is an overwrite of the same method in AbstractPyth.sol
@ -218,16 +289,19 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
price.price = info.emaPrice;
price.conf = info.emaConf;
require(price.publishTime != 0, "price feed for the given id is not pushed or does not exist");
require(
price.publishTime != 0,
"price feed for the given id is not pushed or does not exist"
);
}
function parseBatchAttestationHeader(
bytes memory encoded
) internal pure returns (
uint index,
uint nAttestations,
uint attestationSize
) {
)
internal
pure
returns (uint index, uint nAttestations, uint attestationSize)
{
unchecked {
index = 0;
@ -243,7 +317,10 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
uint16 versionMinor = UnsafeBytesLib.toUint16(encoded, index);
index += 2;
require(versionMinor >= 0, "invalid version minor, expected 0 or more");
require(
versionMinor >= 0,
"invalid version minor, expected 0 or more"
);
uint16 hdrSize = UnsafeBytesLib.toUint16(encoded, index);
index += 2;
@ -268,7 +345,10 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
index += hdrSize;
// Payload ID of 2 required for batch headerBa
require(payloadId == 2, "invalid payload ID, expected 2 for BatchPriceAttestation");
require(
payloadId == 2,
"invalid payload ID, expected 2 for BatchPriceAttestation"
);
}
// Parse the number of attestations
@ -281,15 +361,16 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// Given the message is valid the arithmetic below should not overflow, and
// even if it overflows then the require would fail.
require(encoded.length == (index + (attestationSize * nAttestations)), "invalid BatchPriceAttestation size");
require(
encoded.length == (index + (attestationSize * nAttestations)),
"invalid BatchPriceAttestation size"
);
}
}
function parseAndVerifyBatchAttestationVM(
bytes calldata encodedVm
) internal view returns (
IWormhole.VM memory vm
) {
) internal view returns (IWormhole.VM memory vm) {
{
bool valid;
string memory reason;
@ -305,28 +386,41 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
bytes32[] calldata priceIds,
uint64 minPublishTime,
uint64 maxPublishTime
) external payable override returns (PythStructs.PriceFeed[] memory priceFeeds) {
)
external
payable
override
returns (PythStructs.PriceFeed[] memory priceFeeds)
{
unchecked {
{
uint requiredFee = getUpdateFee(updateData);
require(msg.value >= requiredFee, "insufficient paid fee amount");
require(
msg.value >= requiredFee,
"insufficient paid fee amount"
);
}
priceFeeds = new PythStructs.PriceFeed[](priceIds.length);
for(uint i = 0; i < updateData.length; i++) {
for (uint i = 0; i < updateData.length; i++) {
bytes memory encoded;
{
IWormhole.VM memory vm = parseAndVerifyBatchAttestationVM(updateData[i]);
IWormhole.VM memory vm = parseAndVerifyBatchAttestationVM(
updateData[i]
);
encoded = vm.payload;
}
(uint index, uint nAttestations, uint attestationSize) =
parseBatchAttestationHeader(encoded);
(
uint index,
uint nAttestations,
uint attestationSize
) = parseBatchAttestationHeader(encoded);
// Deserialize each attestation
for (uint j=0; j < nAttestations; j++) {
for (uint j = 0; j < nAttestations; j++) {
// NOTE: We don't advance the global index immediately.
// attestationIndex is an attestation-local offset used
// for readability and easier debugging.
@ -335,11 +429,14 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// Unused bytes32 product id
attestationIndex += 32;
bytes32 priceId = UnsafeBytesLib.toBytes32(encoded, index + attestationIndex);
bytes32 priceId = UnsafeBytesLib.toBytes32(
encoded,
index + attestationIndex
);
// Check whether the caller requested for this data.
uint k = 0;
for(; k < priceIds.length; k++) {
for (; k < priceIds.length; k++) {
if (priceIds[k] == priceId) {
break;
}
@ -352,8 +449,18 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
continue;
}
(PythInternalStructs.PriceInfo memory info, ) = parseSingleAttestationFromBatch(encoded, index, attestationSize);
require(info.publishTime != 0, "price feed for the given id is not pushed or does not exist");
(
PythInternalStructs.PriceInfo memory info,
) = parseSingleAttestationFromBatch(
encoded,
index,
attestationSize
);
require(
info.publishTime != 0,
"price feed for the given id is not pushed or does not exist"
);
priceFeeds[k].id = priceId;
priceFeeds[k].price.price = info.price;
@ -369,26 +476,35 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
// if it is not, then set the id to 0 to indicate that this price id
// still does not have a valid price feed. This will allow other updates
// for this price id to be processed.
if (priceFeeds[k].price.publishTime < minPublishTime ||
priceFeeds[k].price.publishTime > maxPublishTime) {
priceFeeds[k].id = 0;
}
if (
priceFeeds[k].price.publishTime < minPublishTime ||
priceFeeds[k].price.publishTime > maxPublishTime
) {
priceFeeds[k].id = 0;
}
index += attestationSize;
}
}
for (uint k = 0; k < priceIds.length; k++) {
require(priceFeeds[k].id != 0,
"1 or more price feeds are not found in the updateData or they are out of the given time range");
require(
priceFeeds[k].id != 0,
"1 or more price feeds are not found in the updateData or they are out of the given time range"
);
}
}
}
function queryPriceFeed(bytes32 id) public view override returns (PythStructs.PriceFeed memory priceFeed){
function queryPriceFeed(
bytes32 id
) public view override returns (PythStructs.PriceFeed memory priceFeed) {
// Look up the latest price info for the given ID
PythInternalStructs.PriceInfo memory info = latestPriceInfo(id);
require(info.publishTime != 0, "price feed for the given id is not pushed or does not exist");
require(
info.publishTime != 0,
"price feed for the given id is not pushed or does not exist"
);
priceFeed.id = id;
priceFeed.price.price = info.price;
@ -402,11 +518,11 @@ abstract contract Pyth is PythGetters, PythSetters, AbstractPyth {
priceFeed.emaPrice.publishTime = uint(info.publishTime);
}
function priceFeedExists(bytes32 id) public override view returns (bool) {
function priceFeedExists(bytes32 id) public view override returns (bool) {
return (latestPriceInfoPublishTime(id) != 0);
}
function getValidTimePeriod() public override view returns (uint) {
function getValidTimePeriod() public view override returns (uint) {
return validTimePeriodSeconds();
}

View File

@ -1,4 +1,3 @@
// contracts/PythDeprecatedStructs.sol
// SPDX-License-Identifier: Apache 2
@ -20,7 +19,7 @@ contract PythDeprecatedStructs {
}
struct DeprecatedPriceFeedV1 {
// The price ID.
// The price ID.
bytes32 id;
// Product account key.
bytes32 productId;
@ -79,7 +78,6 @@ contract PythDeprecatedStructs {
DeprecatedPriceV2 emaPrice;
}
struct DeprecatedPriceInfoV2 {
uint256 attestationTime;
uint256 arrivalTime;

View File

@ -14,34 +14,56 @@ contract PythGetters is PythState {
}
/// Deprecated, use `validDataSources` instead
function pyth2WormholeChainId() public view returns (uint16){
function pyth2WormholeChainId() public view returns (uint16) {
return _state._deprecatedPyth2WormholeChainId;
}
/// Deprecated, use `validDataSources` instead
function pyth2WormholeEmitter() public view returns (bytes32){
function pyth2WormholeEmitter() public view returns (bytes32) {
return _state._deprecatedPyth2WormholeEmitter;
}
function latestPriceInfo(bytes32 priceId) internal view returns (PythInternalStructs.PriceInfo memory info){
function latestPriceInfo(
bytes32 priceId
) internal view returns (PythInternalStructs.PriceInfo memory info) {
return _state.latestPriceInfo[priceId];
}
function latestPriceInfoPublishTime(bytes32 priceId) public view returns (uint64) {
function latestPriceInfoPublishTime(
bytes32 priceId
) public view returns (uint64) {
return _state.latestPriceInfo[priceId].publishTime;
}
function hashDataSource(PythInternalStructs.DataSource memory ds) public pure returns (bytes32) {
function hashDataSource(
PythInternalStructs.DataSource memory ds
) public pure returns (bytes32) {
return keccak256(abi.encodePacked(ds.chainId, ds.emitterAddress));
}
function isValidDataSource(uint16 dataSourceChainId, bytes32 dataSourceEmitterAddress) public view returns (bool) {
return _state.isValidDataSource[keccak256(abi.encodePacked(dataSourceChainId, dataSourceEmitterAddress))];
function isValidDataSource(
uint16 dataSourceChainId,
bytes32 dataSourceEmitterAddress
) public view returns (bool) {
return
_state.isValidDataSource[
keccak256(
abi.encodePacked(
dataSourceChainId,
dataSourceEmitterAddress
)
)
];
}
function isValidGovernanceDataSource(uint16 governanceChainId, bytes32 governanceEmitterAddress) public view returns (bool) {
return _state.governanceDataSource.chainId == governanceChainId &&
_state.governanceDataSource.emitterAddress == governanceEmitterAddress;
function isValidGovernanceDataSource(
uint16 governanceChainId,
bytes32 governanceEmitterAddress
) public view returns (bool) {
return
_state.governanceDataSource.chainId == governanceChainId &&
_state.governanceDataSource.emitterAddress ==
governanceEmitterAddress;
}
function chainId() public view returns (uint16) {
@ -52,11 +74,19 @@ contract PythGetters is PythState {
return _state.lastExecutedGovernanceSequence;
}
function validDataSources() public view returns (PythInternalStructs.DataSource[] memory) {
function validDataSources()
public
view
returns (PythInternalStructs.DataSource[] memory)
{
return _state.validDataSources;
}
function governanceDataSource() public view returns (PythInternalStructs.DataSource memory) {
function governanceDataSource()
public
view
returns (PythInternalStructs.DataSource memory)
{
return _state.governanceDataSource;
}

View File

@ -8,27 +8,48 @@ import "./PythInternalStructs.sol";
import "./PythGetters.sol";
import "./PythSetters.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
/**
* @dev `Governance` defines a means to enacting changes to the Pyth contract.
*/
abstract contract PythGovernance is PythGetters, PythSetters, PythGovernanceInstructions {
event ContractUpgraded(address oldImplementation, address newImplementation);
event GovernanceDataSourceSet(PythInternalStructs.DataSource oldDataSource, PythInternalStructs.DataSource newDataSource,
uint64 initialSequence);
event DataSourcesSet(PythInternalStructs.DataSource[] oldDataSources, PythInternalStructs.DataSource[] newDataSources);
abstract contract PythGovernance is
PythGetters,
PythSetters,
PythGovernanceInstructions
{
event ContractUpgraded(
address oldImplementation,
address newImplementation
);
event GovernanceDataSourceSet(
PythInternalStructs.DataSource oldDataSource,
PythInternalStructs.DataSource newDataSource,
uint64 initialSequence
);
event DataSourcesSet(
PythInternalStructs.DataSource[] oldDataSources,
PythInternalStructs.DataSource[] newDataSources
);
event FeeSet(uint oldFee, uint newFee);
event ValidPeriodSet(uint oldValidPeriod, uint newValidPeriod);
function verifyGovernanceVM(bytes memory encodedVM) internal returns (IWormhole.VM memory parsedVM){
(IWormhole.VM memory vm, bool valid, string memory reason) = wormhole().parseAndVerifyVM(encodedVM);
function verifyGovernanceVM(
bytes memory encodedVM
) internal returns (IWormhole.VM memory parsedVM) {
(IWormhole.VM memory vm, bool valid, string memory reason) = wormhole()
.parseAndVerifyVM(encodedVM);
require(valid, reason);
require(isValidGovernanceDataSource(vm.emitterChainId, vm.emitterAddress), "VAA is not coming from the governance data source");
require(
isValidGovernanceDataSource(vm.emitterChainId, vm.emitterAddress),
"VAA is not coming from the governance data source"
);
require(vm.sequence > lastExecutedGovernanceSequence(), "VAA is older than the last executed governance VAA");
require(
vm.sequence > lastExecutedGovernanceSequence(),
"VAA is older than the last executed governance VAA"
);
setLastExecutedGovernanceSequence(vm.sequence);
@ -38,23 +59,39 @@ abstract contract PythGovernance is PythGetters, PythSetters, PythGovernanceInst
function executeGovernanceInstruction(bytes calldata encodedVM) public {
IWormhole.VM memory vm = verifyGovernanceVM(encodedVM);
GovernanceInstruction memory gi = parseGovernanceInstruction(vm.payload);
GovernanceInstruction memory gi = parseGovernanceInstruction(
vm.payload
);
require(gi.targetChainId == chainId() || gi.targetChainId == 0, "invalid target chain for this governance instruction");
require(
gi.targetChainId == chainId() || gi.targetChainId == 0,
"invalid target chain for this governance instruction"
);
if (gi.action == GovernanceAction.UpgradeContract) {
require(gi.targetChainId != 0, "upgrade with chain id 0 is not possible");
require(
gi.targetChainId != 0,
"upgrade with chain id 0 is not possible"
);
upgradeContract(parseUpgradeContractPayload(gi.payload));
} else if (gi.action == GovernanceAction.AuthorizeGovernanceDataSourceTransfer) {
AuthorizeGovernanceDataSourceTransfer(parseAuthorizeGovernanceDataSourceTransferPayload(gi.payload));
} else if (
gi.action == GovernanceAction.AuthorizeGovernanceDataSourceTransfer
) {
AuthorizeGovernanceDataSourceTransfer(
parseAuthorizeGovernanceDataSourceTransferPayload(gi.payload)
);
} else if (gi.action == GovernanceAction.SetDataSources) {
setDataSources(parseSetDataSourcesPayload(gi.payload));
} else if (gi.action == GovernanceAction.SetFee) {
setFee(parseSetFeePayload(gi.payload));
} else if (gi.action == GovernanceAction.SetValidPeriod) {
setValidPeriod(parseSetValidPeriodPayload(gi.payload));
} else if (gi.action == GovernanceAction.RequestGovernanceDataSourceTransfer) {
revert("RequestGovernanceDataSourceTransfer can be only part of AuthorizeGovernanceDataSourceTransfer message");
} else if (
gi.action == GovernanceAction.RequestGovernanceDataSourceTransfer
) {
revert(
"RequestGovernanceDataSourceTransfer can be only part of AuthorizeGovernanceDataSourceTransfer message"
);
} else {
revert("invalid governance action");
}
@ -66,45 +103,73 @@ abstract contract PythGovernance is PythGetters, PythSetters, PythGovernanceInst
upgradeUpgradableContract(payload);
}
function upgradeUpgradableContract(UpgradeContractPayload memory payload) virtual internal;
function upgradeUpgradableContract(
UpgradeContractPayload memory payload
) internal virtual;
// Transfer the governance data source to a new value with sanity checks
// to ensure the new governance data source can manage the contract.
function AuthorizeGovernanceDataSourceTransfer(AuthorizeGovernanceDataSourceTransferPayload memory payload) internal {
PythInternalStructs.DataSource memory oldGovernanceDatSource = governanceDataSource();
function AuthorizeGovernanceDataSourceTransfer(
AuthorizeGovernanceDataSourceTransferPayload memory payload
) internal {
PythInternalStructs.DataSource
memory oldGovernanceDatSource = governanceDataSource();
// Make sure the claimVaa is a valid VAA with RequestGovernanceDataSourceTransfer governance message
// If it's valid then its emitter can take over the governance from the current emitter.
// The VAA is checked here to ensure that the new governance data source is valid and can send message
// through wormhole.
(IWormhole.VM memory vm, bool valid, string memory reason) = wormhole().parseAndVerifyVM(payload.claimVaa);
(IWormhole.VM memory vm, bool valid, string memory reason) = wormhole()
.parseAndVerifyVM(payload.claimVaa);
require(valid, reason);
GovernanceInstruction memory gi = parseGovernanceInstruction(vm.payload);
require(gi.targetChainId == chainId() || gi.targetChainId == 0, "invalid target chain for this governance instruction");
require(gi.action == GovernanceAction.RequestGovernanceDataSourceTransfer,
"governance data source change inner vaa is not of claim action type");
GovernanceInstruction memory gi = parseGovernanceInstruction(
vm.payload
);
require(
gi.targetChainId == chainId() || gi.targetChainId == 0,
"invalid target chain for this governance instruction"
);
require(
gi.action == GovernanceAction.RequestGovernanceDataSourceTransfer,
"governance data source change inner vaa is not of claim action type"
);
RequestGovernanceDataSourceTransferPayload memory claimPayload = parseRequestGovernanceDataSourceTransferPayload(gi.payload);
RequestGovernanceDataSourceTransferPayload
memory claimPayload = parseRequestGovernanceDataSourceTransferPayload(
gi.payload
);
// Governance data source index is used to prevent replay attacks, so a claimVaa cannot be used twice.
require(governanceDataSourceIndex() < claimPayload.governanceDataSourceIndex,
"cannot upgrade to an older governance data source");
require(
governanceDataSourceIndex() <
claimPayload.governanceDataSourceIndex,
"cannot upgrade to an older governance data source"
);
setGovernanceDataSourceIndex(claimPayload.governanceDataSourceIndex);
PythInternalStructs.DataSource memory newGovernanceDS = PythInternalStructs.DataSource(vm.emitterChainId, vm.emitterAddress);
PythInternalStructs.DataSource
memory newGovernanceDS = PythInternalStructs.DataSource(
vm.emitterChainId,
vm.emitterAddress
);
setGovernanceDataSource(newGovernanceDS);
// Setting the last executed governance to the claimVaa sequence to avoid using older sequences.
setLastExecutedGovernanceSequence(vm.sequence);
emit GovernanceDataSourceSet(oldGovernanceDatSource, governanceDataSource(), lastExecutedGovernanceSequence());
emit GovernanceDataSourceSet(
oldGovernanceDatSource,
governanceDataSource(),
lastExecutedGovernanceSequence()
);
}
function setDataSources(SetDataSourcesPayload memory payload) internal {
PythInternalStructs.DataSource[] memory oldDataSources = validDataSources();
PythInternalStructs.DataSource[]
memory oldDataSources = validDataSources();
for (uint i = 0; i < oldDataSources.length; i += 1) {
_state.isValidDataSource[hashDataSource(oldDataSources[i])] = false;
@ -113,7 +178,9 @@ abstract contract PythGovernance is PythGetters, PythSetters, PythGovernanceInst
delete _state.validDataSources;
for (uint i = 0; i < payload.dataSources.length; i++) {
_state.validDataSources.push(payload.dataSources[i]);
_state.isValidDataSource[hashDataSource(payload.dataSources[i])] = true;
_state.isValidDataSource[
hashDataSource(payload.dataSources[i])
] = true;
}
emit DataSourcesSet(oldDataSources, validDataSources());

View File

@ -11,7 +11,6 @@ import "./PythInternalStructs.sol";
* for Pyth governance instructions.
*/
contract PythGovernanceInstructions {
using BytesLib for bytes;
// Magic is `PTGM` encoded as a 4 byte data: Pyth Governance Message
@ -70,7 +69,9 @@ contract PythGovernanceInstructions {
}
/// @dev Parse a GovernanceInstruction
function parseGovernanceInstruction(bytes memory encodedInstruction) public pure returns (GovernanceInstruction memory gi) {
function parseGovernanceInstruction(
bytes memory encodedInstruction
) public pure returns (GovernanceInstruction memory gi) {
uint index = 0;
uint32 magic = encodedInstruction.toUint32(index);
@ -81,7 +82,10 @@ contract PythGovernanceInstructions {
gi.module = GovernanceModule(modNumber);
index += 1;
require(gi.module == MODULE, "invalid module for GovernanceInstruction");
require(
gi.module == MODULE,
"invalid module for GovernanceInstruction"
);
uint8 actionNumber = encodedInstruction.toUint8(index);
gi.action = GovernanceAction(actionNumber);
@ -93,46 +97,71 @@ contract PythGovernanceInstructions {
// As solidity performs math operations in a checked mode
// if the length of the encoded instruction be smaller than index
// it will revert. So we don't need any extra check.
gi.payload = encodedInstruction.slice(index, encodedInstruction.length - index);
gi.payload = encodedInstruction.slice(
index,
encodedInstruction.length - index
);
}
/// @dev Parse a UpgradeContractPayload (action 1) with minimal validation
function parseUpgradeContractPayload(bytes memory encodedPayload) public pure returns (UpgradeContractPayload memory uc) {
function parseUpgradeContractPayload(
bytes memory encodedPayload
) public pure returns (UpgradeContractPayload memory uc) {
uint index = 0;
uc.newImplementation = address(encodedPayload.toAddress(index));
index += 20;
require(encodedPayload.length == index, "invalid length for UpgradeContractPayload");
require(
encodedPayload.length == index,
"invalid length for UpgradeContractPayload"
);
}
/// @dev Parse a AuthorizeGovernanceDataSourceTransferPayload (action 2) with minimal validation
function parseAuthorizeGovernanceDataSourceTransferPayload(bytes memory encodedPayload) public pure returns (AuthorizeGovernanceDataSourceTransferPayload memory sgds) {
function parseAuthorizeGovernanceDataSourceTransferPayload(
bytes memory encodedPayload
)
public
pure
returns (AuthorizeGovernanceDataSourceTransferPayload memory sgds)
{
sgds.claimVaa = encodedPayload;
}
/// @dev Parse a AuthorizeGovernanceDataSourceTransferPayload (action 2) with minimal validation
function parseRequestGovernanceDataSourceTransferPayload(bytes memory encodedPayload) public pure
returns (RequestGovernanceDataSourceTransferPayload memory sgdsClaim) {
function parseRequestGovernanceDataSourceTransferPayload(
bytes memory encodedPayload
)
public
pure
returns (RequestGovernanceDataSourceTransferPayload memory sgdsClaim)
{
uint index = 0;
sgdsClaim.governanceDataSourceIndex = encodedPayload.toUint32(index);
index += 4;
require(encodedPayload.length == index, "invalid length for RequestGovernanceDataSourceTransferPayload");
require(
encodedPayload.length == index,
"invalid length for RequestGovernanceDataSourceTransferPayload"
);
}
/// @dev Parse a SetDataSourcesPayload (action 3) with minimal validation
function parseSetDataSourcesPayload(bytes memory encodedPayload) public pure returns (SetDataSourcesPayload memory sds) {
function parseSetDataSourcesPayload(
bytes memory encodedPayload
) public pure returns (SetDataSourcesPayload memory sds) {
uint index = 0;
uint8 dataSourcesLength = encodedPayload.toUint8(index);
index += 1;
sds.dataSources = new PythInternalStructs.DataSource[](dataSourcesLength);
sds.dataSources = new PythInternalStructs.DataSource[](
dataSourcesLength
);
for(uint i = 0; i < dataSourcesLength; i++) {
for (uint i = 0; i < dataSourcesLength; i++) {
sds.dataSources[i].chainId = encodedPayload.toUint16(index);
index += 2;
@ -140,11 +169,16 @@ contract PythGovernanceInstructions {
index += 32;
}
require(encodedPayload.length == index, "invalid length for SetDataSourcesPayload");
require(
encodedPayload.length == index,
"invalid length for SetDataSourcesPayload"
);
}
/// @dev Parse a SetFeePayload (action 4) with minimal validation
function parseSetFeePayload(bytes memory encodedPayload) public pure returns (SetFeePayload memory sf) {
function parseSetFeePayload(
bytes memory encodedPayload
) public pure returns (SetFeePayload memory sf) {
uint index = 0;
uint64 val = encodedPayload.toUint64(index);
@ -153,18 +187,26 @@ contract PythGovernanceInstructions {
uint64 expo = encodedPayload.toUint64(index);
index += 8;
sf.newFee = uint256(val) * uint256(10)**uint256(expo);
sf.newFee = uint256(val) * uint256(10) ** uint256(expo);
require(encodedPayload.length == index, "invalid length for SetFeePayload");
require(
encodedPayload.length == index,
"invalid length for SetFeePayload"
);
}
/// @dev Parse a SetValidPeriodPayload (action 5) with minimal validation
function parseSetValidPeriodPayload(bytes memory encodedPayload) public pure returns (SetValidPeriodPayload memory svp) {
function parseSetValidPeriodPayload(
bytes memory encodedPayload
) public pure returns (SetValidPeriodPayload memory svp) {
uint index = 0;
svp.newValidPeriod = uint256(encodedPayload.toUint64(index));
index += 8;
require(encodedPayload.length == index, "invalid length for SetValidPeriodPayload");
require(
encodedPayload.length == index,
"invalid length for SetValidPeriodPayload"
);
}
}

View File

@ -15,7 +15,6 @@ contract PythInternalStructs {
int32 expo;
int64 price;
uint64 conf;
// slot 2
int64 emaPrice;
uint64 emaConf;

View File

@ -18,7 +18,10 @@ contract PythSetters is PythState {
_state.wormhole = payable(wh);
}
function setLatestPriceInfo(bytes32 priceId, PythInternalStructs.PriceInfo memory info) internal {
function setLatestPriceInfo(
bytes32 priceId,
PythInternalStructs.PriceInfo memory info
) internal {
_state.latestPriceInfo[priceId] = info;
}
@ -30,7 +33,9 @@ contract PythSetters is PythState {
_state.validTimePeriodSeconds = validTimePeriodSeconds;
}
function setGovernanceDataSource(PythInternalStructs.DataSource memory newDataSource) internal {
function setGovernanceDataSource(
PythInternalStructs.DataSource memory newDataSource
) internal {
_state.governanceDataSource = newDataSource;
}

View File

@ -11,24 +11,18 @@ contract PythStorage {
address wormhole;
uint16 _deprecatedPyth2WormholeChainId; // Replaced by validDataSources/isValidDataSource
bytes32 _deprecatedPyth2WormholeEmitter; // Ditto
// After a backward-incompatible change in PriceFeed this mapping got deprecated.
mapping(bytes32 => PythDeprecatedStructs.DeprecatedPriceInfoV1) _deprecatedLatestPriceInfoV1;
// For tracking all active emitter/chain ID pairs
PythInternalStructs.DataSource[] validDataSources;
// (chainId, emitterAddress) => isValid; takes advantage of
// constant-time mapping lookup for VM verification
mapping(bytes32 => bool) isValidDataSource;
uint singleUpdateFeeInWei;
/// Maximum acceptable time period before price is considered to be stale.
/// This includes attestation delay, block time, and potential clock drift
/// between the source/target chains.
uint validTimePeriodSeconds;
// Governance data source. VAA messages from this source can change this contract
// state. e.g., upgrade the contract, change the valid data sources, and more.
PythInternalStructs.DataSource governanceDataSource;
@ -36,15 +30,12 @@ contract PythStorage {
// with a lower or equal sequence number will be discarded. This prevents double-execution,
// and also makes sure that messages are executed in the right order.
uint64 lastExecutedGovernanceSequence;
// Mapping of cached price information
// priceId => PriceInfo
mapping(bytes32 => PythDeprecatedStructs.DeprecatedPriceInfoV2) _deprecatedLatestPriceInfoV2;
// Index of the governance data source, increased each time the governance data source
// changes.
// changes.
uint32 governanceDataSourceIndex;
// Mapping of cached price information
// priceId => PriceInfo
mapping(bytes32 => PythInternalStructs.PriceInfo) latestPriceInfo;

View File

@ -12,13 +12,18 @@ import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
import "./PythGovernance.sol";
import "./Pyth.sol";
contract PythUpgradable is Initializable, OwnableUpgradeable, UUPSUpgradeable, Pyth, PythGovernance {
contract PythUpgradable is
Initializable,
OwnableUpgradeable,
UUPSUpgradeable,
Pyth,
PythGovernance
{
function initialize(
address wormhole,
uint16 pyth2WormholeChainId,
bytes32 pyth2WormholeEmitter
) initializer public {
) public initializer {
__Ownable_init();
__UUPSUpgradeable_init();
@ -26,50 +31,70 @@ contract PythUpgradable is Initializable, OwnableUpgradeable, UUPSUpgradeable, P
}
/// Privileged function to specify additional data sources in the contract
function addDataSource(uint16 chainId, bytes32 emitter) onlyOwner public {
PythInternalStructs.DataSource memory ds = PythInternalStructs.DataSource(chainId, emitter);
require(!PythGetters.isValidDataSource(ds.chainId, ds.emitterAddress), "Data source already added");
function addDataSource(uint16 chainId, bytes32 emitter) public onlyOwner {
PythInternalStructs.DataSource memory ds = PythInternalStructs
.DataSource(chainId, emitter);
require(
!PythGetters.isValidDataSource(ds.chainId, ds.emitterAddress),
"Data source already added"
);
_state.isValidDataSource[hashDataSource(ds)] = true;
_state.validDataSources.push(ds);
}
/// Privileged fucntion to remove the specified data source. Assumes _state.validDataSources has no duplicates.
function removeDataSource(uint16 chainId, bytes32 emitter) onlyOwner public {
PythInternalStructs.DataSource memory ds = PythInternalStructs.DataSource(chainId, emitter);
require(PythGetters.isValidDataSource(ds.chainId, ds.emitterAddress), "Data source not found, not removing");
function removeDataSource(
uint16 chainId,
bytes32 emitter
) public onlyOwner {
PythInternalStructs.DataSource memory ds = PythInternalStructs
.DataSource(chainId, emitter);
require(
PythGetters.isValidDataSource(ds.chainId, ds.emitterAddress),
"Data source not found, not removing"
);
_state.isValidDataSource[hashDataSource(ds)] = false;
for (uint i = 0; i < _state.validDataSources.length; ++i) {
for (uint i = 0; i < _state.validDataSources.length; ++i) {
// Find the source to remove
if (_state.validDataSources[i].chainId == ds.chainId || _state.validDataSources[i].emitterAddress == ds.emitterAddress) {
if (
_state.validDataSources[i].chainId == ds.chainId ||
_state.validDataSources[i].emitterAddress == ds.emitterAddress
) {
// Copy last element to overwrite the target data source
_state.validDataSources[i] = _state.validDataSources[_state.validDataSources.length - 1];
_state.validDataSources[i] = _state.validDataSources[
_state.validDataSources.length - 1
];
// Remove the last element we just preserved
_state.validDataSources.pop();
break;
}
}
}
/// Privileged function to update the price update fee
function updateSingleUpdateFeeInWei(uint newFee) onlyOwner public {
function updateSingleUpdateFeeInWei(uint newFee) public onlyOwner {
PythSetters.setSingleUpdateFeeInWei(newFee);
}
/// Privileged function to update the valid time period for a price.
function updateValidTimePeriodSeconds(uint newValidTimePeriodSeconds) onlyOwner public {
function updateValidTimePeriodSeconds(
uint newValidTimePeriodSeconds
) public onlyOwner {
PythSetters.setValidTimePeriodSeconds(newValidTimePeriodSeconds);
}
// Privileged function to update the governance emitter
function updateGovernanceDataSource(uint16 chainId, bytes32 emitter, uint64 sequence) onlyOwner public {
PythInternalStructs.DataSource memory ds = PythInternalStructs.DataSource(chainId, emitter);
function updateGovernanceDataSource(
uint16 chainId,
bytes32 emitter,
uint64 sequence
) public onlyOwner {
PythInternalStructs.DataSource memory ds = PythInternalStructs
.DataSource(chainId, emitter);
PythSetters.setGovernanceDataSource(ds);
PythSetters.setLastExecutedGovernanceSequence(sequence);
}
@ -86,13 +111,18 @@ contract PythUpgradable is Initializable, OwnableUpgradeable, UUPSUpgradeable, P
}
// Execute a UpgradeContract governance message
function upgradeUpgradableContract(UpgradeContractPayload memory payload) override internal {
function upgradeUpgradableContract(
UpgradeContractPayload memory payload
) internal override {
address oldImplementation = _getImplementation();
_upgradeToAndCallUUPS(payload.newImplementation, new bytes(0), false);
// Calling a method using `this.<method>` will cause a contract call that will use
// the new contract.
require(this.pythUpgradableMagic() == 0x97a6f304, "the new implementation is not a Pyth contract");
require(
this.pythUpgradableMagic() == 0x97a6f304,
"the new implementation is not a Pyth contract"
);
emit ContractUpgraded(oldImplementation, _getImplementation());
}

View File

@ -8,9 +8,11 @@ import "./ReceiverState.sol";
contract ReceiverGetters is ReceiverState {
function owner() public view returns (address) {
return _state.owner;
}
function getGuardianSet(uint32 index) public view returns (ReceiverStructs.GuardianSet memory) {
}
function getGuardianSet(
uint32 index
) public view returns (ReceiverStructs.GuardianSet memory) {
return _state.guardianSets[index];
}
@ -22,7 +24,9 @@ contract ReceiverGetters is ReceiverState {
return _state.guardianSetExpiry;
}
function governanceActionIsConsumed(bytes32 hash) public view returns (bool) {
function governanceActionIsConsumed(
bytes32 hash
) public view returns (bool) {
return _state.consumedGovernanceActions[hash];
}
@ -30,11 +34,11 @@ contract ReceiverGetters is ReceiverState {
return _state.initializedImplementations[impl];
}
function governanceChainId() public view returns (uint16){
function governanceChainId() public view returns (uint16) {
return _state.provider.governanceChainId;
}
function governanceContract() public view returns (bytes32){
function governanceContract() public view returns (bytes32) {
return _state.provider.governanceContract;
}
}

View File

@ -10,12 +10,24 @@ import "./ReceiverSetters.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
abstract contract ReceiverGovernance is ReceiverGovernanceStructs, ReceiverMessages, ReceiverSetters, ERC1967Upgrade {
event ContractUpgraded(address indexed oldContract, address indexed newContract);
event OwnershipTransfered(address indexed oldOwner, address indexed newOwner);
abstract contract ReceiverGovernance is
ReceiverGovernanceStructs,
ReceiverMessages,
ReceiverSetters,
ERC1967Upgrade
{
event ContractUpgraded(
address indexed oldContract,
address indexed newContract
);
event OwnershipTransfered(
address indexed oldOwner,
address indexed newOwner
);
// "Core" (left padded)
bytes32 constant module = 0x00000000000000000000000000000000000000000000000000000000436f7265;
bytes32 constant module =
0x00000000000000000000000000000000000000000000000000000000436f7265;
function submitNewGuardianSet(bytes memory _vm) public {
ReceiverStructs.VM memory vm = parseVM(_vm);
@ -23,12 +35,19 @@ abstract contract ReceiverGovernance is ReceiverGovernanceStructs, ReceiverMessa
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
ReceiverGovernanceStructs.GuardianSetUpgrade memory upgrade = parseGuardianSetUpgrade(vm.payload);
ReceiverGovernanceStructs.GuardianSetUpgrade
memory upgrade = parseGuardianSetUpgrade(vm.payload);
require(upgrade.module == module, "invalid Module");
require(upgrade.newGuardianSet.keys.length > 0, "new guardian set is empty");
require(upgrade.newGuardianSetIndex == getCurrentGuardianSetIndex() + 1, "index must increase in steps of 1");
require(
upgrade.newGuardianSet.keys.length > 0,
"new guardian set is empty"
);
require(
upgrade.newGuardianSetIndex == getCurrentGuardianSetIndex() + 1,
"index must increase in steps of 1"
);
setGovernanceActionConsumed(vm.hash);
@ -43,17 +62,21 @@ abstract contract ReceiverGovernance is ReceiverGovernanceStructs, ReceiverMessa
_upgradeTo(newImplementation);
// Call initialize function of the new implementation
(bool success, bytes memory reason) = newImplementation.delegatecall(abi.encodeWithSignature("initialize()"));
(bool success, bytes memory reason) = newImplementation.delegatecall(
abi.encodeWithSignature("initialize()")
);
require(success, string(reason));
emit ContractUpgraded(currentImplementation, newImplementation);
}
function verifyGovernanceVM(ReceiverStructs.VM memory vm) internal view returns (bool, string memory){
function verifyGovernanceVM(
ReceiverStructs.VM memory vm
) internal view returns (bool, string memory) {
// validate vm
(bool isValid, string memory reason) = verifyVM(vm);
if (!isValid){
if (!isValid) {
return (false, reason);
}
@ -71,7 +94,7 @@ abstract contract ReceiverGovernance is ReceiverGovernanceStructs, ReceiverMessa
}
// prevent re-entry
if (governanceActionIsConsumed(vm.hash)){
if (governanceActionIsConsumed(vm.hash)) {
return (false, "governance action already consumed");
}
@ -82,7 +105,7 @@ abstract contract ReceiverGovernance is ReceiverGovernanceStructs, ReceiverMessa
require(newOwner != address(0), "new owner cannot be the zero address");
address currentOwner = owner();
setOwner(newOwner);
emit OwnershipTransfered(currentOwner, newOwner);

View File

@ -18,12 +18,13 @@ contract ReceiverGovernanceStructs {
bytes32 module;
uint8 action;
uint16 chain;
ReceiverStructs.GuardianSet newGuardianSet;
uint32 newGuardianSetIndex;
}
function parseGuardianSetUpgrade(bytes memory encodedUpgrade) public pure returns (GuardianSetUpgrade memory gsu) {
function parseGuardianSetUpgrade(
bytes memory encodedUpgrade
) public pure returns (GuardianSetUpgrade memory gsu) {
uint index = 0;
gsu.module = encodedUpgrade.toBytes32(index);
@ -44,16 +45,15 @@ contract ReceiverGovernanceStructs {
index += 1;
gsu.newGuardianSet = ReceiverStructs.GuardianSet({
keys : new address[](guardianLength),
expirationTime : 0
keys: new address[](guardianLength),
expirationTime: 0
});
for(uint i = 0; i < guardianLength; i++) {
for (uint i = 0; i < guardianLength; i++) {
gsu.newGuardianSet.keys[i] = encodedUpgrade.toAddress(index);
index += 20;
}
require(encodedUpgrade.length == index, "invalid GuardianSetUpgrade");
}
}

View File

@ -9,21 +9,21 @@ import "./ReceiverGovernance.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
contract ReceiverImplementation is ReceiverGovernance {
modifier initializer() {
address implementation = ERC1967Upgrade._getImplementation();
require(
!isInitialized(implementation),
"already initialized"
);
require(!isInitialized(implementation), "already initialized");
setInitialized(implementation);
_;
}
fallback() external payable {revert("unsupported");}
fallback() external payable {
revert("unsupported");
}
receive() external payable {revert("the Wormhole Receiver contract does not accept assets");}
receive() external payable {
revert("the Wormhole Receiver contract does not accept assets");
}
}

View File

@ -8,56 +8,75 @@ import "./ReceiverGetters.sol";
import "./ReceiverStructs.sol";
import "../libraries/external/BytesLib.sol";
contract ReceiverMessages is ReceiverGetters {
using BytesLib for bytes;
/// @dev parseAndVerifyVM serves to parse an encodedVM and wholy validate it for consumption
function parseAndVerifyVM(bytes calldata encodedVM) public view returns (ReceiverStructs.VM memory vm, bool valid, string memory reason) {
function parseAndVerifyVM(
bytes calldata encodedVM
)
public
view
returns (ReceiverStructs.VM memory vm, bool valid, string memory reason)
{
vm = parseVM(encodedVM);
(valid, reason) = verifyVM(vm);
}
/**
* @dev `verifyVM` serves to validate an arbitrary vm against a valid Guardian set
* - it aims to make sure the VM is for a known guardianSet
* - it aims to ensure the guardianSet is not expired
* - it aims to ensure the VM has reached quorum
* - it aims to verify the signatures provided against the guardianSet
*/
function verifyVM(ReceiverStructs.VM memory vm) public view returns (bool valid, string memory reason) {
/**
* @dev `verifyVM` serves to validate an arbitrary vm against a valid Guardian set
* - it aims to make sure the VM is for a known guardianSet
* - it aims to ensure the guardianSet is not expired
* - it aims to ensure the VM has reached quorum
* - it aims to verify the signatures provided against the guardianSet
*/
function verifyVM(
ReceiverStructs.VM memory vm
) public view returns (bool valid, string memory reason) {
/// @dev Obtain the current guardianSet for the guardianSetIndex provided
ReceiverStructs.GuardianSet memory guardianSet = getGuardianSet(vm.guardianSetIndex);
ReceiverStructs.GuardianSet memory guardianSet = getGuardianSet(
vm.guardianSetIndex
);
/**
* @dev Checks whether the guardianSet has zero keys
* WARNING: This keys check is critical to ensure the guardianSet has keys present AND to ensure
* that guardianSet key size doesn't fall to zero and negatively impact quorum assessment. If guardianSet
* key length is 0 and vm.signatures length is 0, this could compromise the integrity of both vm and
* signature verification.
*/
if(guardianSet.keys.length == 0){
/**
* @dev Checks whether the guardianSet has zero keys
* WARNING: This keys check is critical to ensure the guardianSet has keys present AND to ensure
* that guardianSet key size doesn't fall to zero and negatively impact quorum assessment. If guardianSet
* key length is 0 and vm.signatures length is 0, this could compromise the integrity of both vm and
* signature verification.
*/
if (guardianSet.keys.length == 0) {
return (false, "invalid guardian set");
}
/// @dev Checks if VM guardian set index matches the current index (unless the current set is expired).
if(vm.guardianSetIndex != getCurrentGuardianSetIndex() && guardianSet.expirationTime < block.timestamp){
if (
vm.guardianSetIndex != getCurrentGuardianSetIndex() &&
guardianSet.expirationTime < block.timestamp
) {
return (false, "guardian set has expired");
}
/**
* @dev We're using a fixed point number transformation with 1 decimal to deal with rounding.
* WARNING: This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM
* if making any changes to this, obtain additional peer review. If guardianSet key length is 0 and
* vm.signatures length is 0, this could compromise the integrity of both vm and signature verification.
*/
if(((guardianSet.keys.length * 10 / 3) * 2) / 10 + 1 > vm.signatures.length){
/**
* @dev We're using a fixed point number transformation with 1 decimal to deal with rounding.
* WARNING: This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM
* if making any changes to this, obtain additional peer review. If guardianSet key length is 0 and
* vm.signatures length is 0, this could compromise the integrity of both vm and signature verification.
*/
if (
(((guardianSet.keys.length * 10) / 3) * 2) / 10 + 1 >
vm.signatures.length
) {
return (false, "no quorum");
}
/// @dev Verify the proposed vm.signatures against the guardianSet
(bool signaturesValid, string memory invalidReason) = verifySignatures(vm.hash, vm.signatures, guardianSet);
if(!signaturesValid){
(bool signaturesValid, string memory invalidReason) = verifySignatures(
vm.hash,
vm.signatures,
guardianSet
);
if (!signaturesValid) {
return (false, invalidReason);
}
@ -71,17 +90,27 @@ contract ReceiverMessages is ReceiverGetters {
* - it intentioanlly does not solve for quorum (you should use verifyVM if you need these protections)
* - it intentionally returns true when signatures is an empty set (you should use verifyVM if you need these protections)
*/
function verifySignatures(bytes32 hash, ReceiverStructs.Signature[] memory signatures, ReceiverStructs.GuardianSet memory guardianSet) public pure returns (bool valid, string memory reason) {
function verifySignatures(
bytes32 hash,
ReceiverStructs.Signature[] memory signatures,
ReceiverStructs.GuardianSet memory guardianSet
) public pure returns (bool valid, string memory reason) {
uint8 lastIndex = 0;
for (uint i = 0; i < signatures.length; i++) {
ReceiverStructs.Signature memory sig = signatures[i];
/// Ensure that provided signature indices are ascending only
require(i == 0 || sig.guardianIndex > lastIndex, "signature indices must be ascending");
require(
i == 0 || sig.guardianIndex > lastIndex,
"signature indices must be ascending"
);
lastIndex = sig.guardianIndex;
/// Check to see if the signer of the signature does not match a specific Guardian key at the provided index
if(ecrecover(hash, sig.v, sig.r, sig.s) != guardianSet.keys[sig.guardianIndex]){
if (
ecrecover(hash, sig.v, sig.r, sig.s) !=
guardianSet.keys[sig.guardianIndex]
) {
return (false, "VM signature invalid");
}
}
@ -94,7 +123,9 @@ contract ReceiverMessages is ReceiverGetters {
* @dev parseVM serves to parse an encodedVM into a vm struct
* - it intentionally performs no validation functions, it simply parses raw into a struct
*/
function parseVM(bytes memory encodedVM) public pure virtual returns (ReceiverStructs.VM memory vm) {
function parseVM(
bytes memory encodedVM
) public pure virtual returns (ReceiverStructs.VM memory vm) {
uint index = 0;
vm.version = encodedVM.toUint8(index);

View File

@ -9,16 +9,21 @@ contract ReceiverSetters is ReceiverState {
function setOwner(address owner_) internal {
_state.owner = owner_;
}
function updateGuardianSetIndex(uint32 newIndex) internal {
_state.guardianSetIndex = newIndex;
}
function expireGuardianSet(uint32 index) internal {
_state.guardianSets[index].expirationTime = uint32(block.timestamp) + 86400;
_state.guardianSets[index].expirationTime =
uint32(block.timestamp) +
86400;
}
function storeGuardianSet(ReceiverStructs.GuardianSet memory set, uint32 index) internal {
function storeGuardianSet(
ReceiverStructs.GuardianSet memory set,
uint32 index
) internal {
_state.guardianSets[index] = set;
}
@ -37,5 +42,4 @@ contract ReceiverSetters is ReceiverState {
function setGovernanceContract(bytes32 governanceContract) internal {
_state.provider.governanceContract = governanceContract;
}
}

View File

@ -19,10 +19,8 @@ contract ReceiverSetup is ReceiverSetters, ERC1967Upgrade {
setOwner(msg.sender);
ReceiverStructs.GuardianSet memory initialGuardianSet = ReceiverStructs.GuardianSet({
keys : initialGuardians,
expirationTime : 0
});
ReceiverStructs.GuardianSet memory initialGuardianSet = ReceiverStructs
.GuardianSet({keys: initialGuardians, expirationTime: 0});
storeGuardianSet(initialGuardianSet, 0);
// initial guardian set index is 0, which is the default value of the storage slot anyways

View File

@ -21,22 +21,16 @@ contract ReceiverEvents {
contract ReceiverStorage {
struct WormholeState {
ReceiverStructs.Provider provider;
// contract deployer
address owner;
// Mapping of guardian_set_index => guardian set
mapping(uint32 => ReceiverStructs.GuardianSet) guardianSets;
// Current active guardian set index
uint32 guardianSetIndex;
// Period for which a guardian set stays active after it has been replaced
uint32 guardianSetExpiry;
// Mapping of consumed governance actions
mapping(bytes32 => bool) consumedGovernanceActions;
// Mapping of initialized implementations
mapping(address => bool) initializedImplementations;
}

View File

@ -4,36 +4,34 @@
pragma solidity ^0.8.0;
interface ReceiverStructs {
struct Provider {
uint16 governanceChainId;
bytes32 governanceContract;
}
struct Provider {
uint16 governanceChainId;
bytes32 governanceContract;
}
struct GuardianSet {
address[] keys;
uint32 expirationTime;
}
struct GuardianSet {
address[] keys;
uint32 expirationTime;
}
struct Signature {
bytes32 r;
bytes32 s;
uint8 v;
uint8 guardianIndex;
}
struct Signature {
bytes32 r;
bytes32 s;
uint8 v;
uint8 guardianIndex;
}
struct VM {
uint8 version;
uint32 timestamp;
uint32 nonce;
uint16 emitterChainId;
bytes32 emitterAddress;
uint64 sequence;
uint8 consistencyLevel;
bytes payload;
uint32 guardianSetIndex;
Signature[] signatures;
bytes32 hash;
}
struct VM {
uint8 version;
uint32 timestamp;
uint32 nonce;
uint16 emitterChainId;
bytes32 emitterAddress;
uint64 sequence;
uint8 consistencyLevel;
bytes payload;
uint32 guardianSetIndex;
Signature[] signatures;
bytes32 hash;
}
}

View File

@ -6,8 +6,8 @@ pragma solidity ^0.8.0;
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol";
contract WormholeReceiver is ERC1967Proxy {
constructor (address setup, bytes memory initData) ERC1967Proxy(
setup,
initData
) { }
constructor(
address setup,
bytes memory initData
) ERC1967Proxy(setup, initData) {}
}

View File

@ -6,7 +6,9 @@ pragma solidity ^0.8.0;
import "./State.sol";
contract Getters is State {
function getGuardianSet(uint32 index) public view returns (Structs.GuardianSet memory) {
function getGuardianSet(
uint32 index
) public view returns (Structs.GuardianSet memory) {
return _state.guardianSets[index];
}
@ -18,7 +20,9 @@ contract Getters is State {
return _state.guardianSetExpiry;
}
function governanceActionIsConsumed(bytes32 hash) public view returns (bool) {
function governanceActionIsConsumed(
bytes32 hash
) public view returns (bool) {
return _state.consumedGovernanceActions[hash];
}
@ -30,11 +34,11 @@ contract Getters is State {
return _state.provider.chainId;
}
function governanceChainId() public view returns (uint16){
function governanceChainId() public view returns (uint16) {
return _state.provider.governanceChainId;
}
function governanceContract() public view returns (bytes32){
function governanceContract() public view returns (bytes32) {
return _state.provider.governanceContract;
}
@ -45,4 +49,4 @@ contract Getters is State {
function nextSequence(address emitter) public view returns (uint64) {
return _state.sequences[emitter];
}
}
}

View File

@ -10,12 +10,21 @@ import "./Setters.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upgrade {
event ContractUpgraded(address indexed oldContract, address indexed newContract);
abstract contract Governance is
GovernanceStructs,
Messages,
Setters,
ERC1967Upgrade
{
event ContractUpgraded(
address indexed oldContract,
address indexed newContract
);
event GuardianSetAdded(uint32 indexed index);
// "Core" (left padded)
bytes32 constant module = 0x00000000000000000000000000000000000000000000000000000000436f7265;
bytes32 constant module =
0x00000000000000000000000000000000000000000000000000000000436f7265;
function submitContractUpgrade(bytes memory _vm) public {
Structs.VM memory vm = parseVM(_vm);
@ -23,7 +32,9 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
GovernanceStructs.ContractUpgrade memory upgrade = parseContractUpgrade(vm.payload);
GovernanceStructs.ContractUpgrade memory upgrade = parseContractUpgrade(
vm.payload
);
require(upgrade.module == module, "Invalid Module");
require(upgrade.chain == chainId(), "Invalid Chain");
@ -39,7 +50,9 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
GovernanceStructs.SetMessageFee memory upgrade = parseSetMessageFee(vm.payload);
GovernanceStructs.SetMessageFee memory upgrade = parseSetMessageFee(
vm.payload
);
require(upgrade.module == module, "Invalid Module");
require(upgrade.chain == chainId(), "Invalid Chain");
@ -55,13 +68,23 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
GovernanceStructs.GuardianSetUpgrade memory upgrade = parseGuardianSetUpgrade(vm.payload);
GovernanceStructs.GuardianSetUpgrade
memory upgrade = parseGuardianSetUpgrade(vm.payload);
require(upgrade.module == module, "invalid Module");
require(upgrade.chain == chainId() || upgrade.chain == 0, "invalid Chain");
require(
upgrade.chain == chainId() || upgrade.chain == 0,
"invalid Chain"
);
require(upgrade.newGuardianSet.keys.length > 0, "new guardian set is empty");
require(upgrade.newGuardianSetIndex == getCurrentGuardianSetIndex() + 1, "index must increase in steps of 1");
require(
upgrade.newGuardianSet.keys.length > 0,
"new guardian set is empty"
);
require(
upgrade.newGuardianSetIndex == getCurrentGuardianSetIndex() + 1,
"index must increase in steps of 1"
);
setGovernanceActionConsumed(vm.hash);
@ -76,14 +99,21 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
GovernanceStructs.TransferFees memory transfer = parseTransferFees(vm.payload);
GovernanceStructs.TransferFees memory transfer = parseTransferFees(
vm.payload
);
require(transfer.module == module, "invalid Module");
require(transfer.chain == chainId() || transfer.chain == 0, "invalid Chain");
require(
transfer.chain == chainId() || transfer.chain == 0,
"invalid Chain"
);
setGovernanceActionConsumed(vm.hash);
address payable recipient = payable(address(uint160(uint256(transfer.recipient))));
address payable recipient = payable(
address(uint160(uint256(transfer.recipient)))
);
recipient.transfer(transfer.amount);
}
@ -94,17 +124,21 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
_upgradeTo(newImplementation);
// Call initialize function of the new implementation
(bool success, bytes memory reason) = newImplementation.delegatecall(abi.encodeWithSignature("initialize()"));
(bool success, bytes memory reason) = newImplementation.delegatecall(
abi.encodeWithSignature("initialize()")
);
require(success, string(reason));
emit ContractUpgraded(currentImplementation, newImplementation);
}
function verifyGovernanceVM(Structs.VM memory vm) internal view returns (bool, string memory){
function verifyGovernanceVM(
Structs.VM memory vm
) internal view returns (bool, string memory) {
// validate vm
(bool isValid, string memory reason) = verifyVM(vm);
if (!isValid){
if (!isValid) {
return (false, reason);
}
@ -122,10 +156,10 @@ abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upg
}
// prevent re-entry
if (governanceActionIsConsumed(vm.hash)){
if (governanceActionIsConsumed(vm.hash)) {
return (false, "governance action already consumed");
}
return (true, "");
}
}
}

View File

@ -18,7 +18,6 @@ contract GovernanceStructs {
bytes32 module;
uint8 action;
uint16 chain;
address newContract;
}
@ -26,7 +25,6 @@ contract GovernanceStructs {
bytes32 module;
uint8 action;
uint16 chain;
Structs.GuardianSet newGuardianSet;
uint32 newGuardianSetIndex;
}
@ -35,7 +33,6 @@ contract GovernanceStructs {
bytes32 module;
uint8 action;
uint16 chain;
uint256 messageFee;
}
@ -43,12 +40,13 @@ contract GovernanceStructs {
bytes32 module;
uint8 action;
uint16 chain;
uint256 amount;
bytes32 recipient;
}
function parseContractUpgrade(bytes memory encodedUpgrade) public pure returns (ContractUpgrade memory cu) {
function parseContractUpgrade(
bytes memory encodedUpgrade
) public pure returns (ContractUpgrade memory cu) {
uint index = 0;
cu.module = encodedUpgrade.toBytes32(index);
@ -62,13 +60,17 @@ contract GovernanceStructs {
cu.chain = encodedUpgrade.toUint16(index);
index += 2;
cu.newContract = address(uint160(uint256(encodedUpgrade.toBytes32(index))));
cu.newContract = address(
uint160(uint256(encodedUpgrade.toBytes32(index)))
);
index += 32;
require(encodedUpgrade.length == index, "invalid ContractUpgrade");
}
function parseGuardianSetUpgrade(bytes memory encodedUpgrade) public pure returns (GuardianSetUpgrade memory gsu) {
function parseGuardianSetUpgrade(
bytes memory encodedUpgrade
) public pure returns (GuardianSetUpgrade memory gsu) {
uint index = 0;
gsu.module = encodedUpgrade.toBytes32(index);
@ -89,11 +91,11 @@ contract GovernanceStructs {
index += 1;
gsu.newGuardianSet = Structs.GuardianSet({
keys : new address[](guardianLength),
expirationTime : 0
keys: new address[](guardianLength),
expirationTime: 0
});
for(uint i = 0; i < guardianLength; i++) {
for (uint i = 0; i < guardianLength; i++) {
gsu.newGuardianSet.keys[i] = encodedUpgrade.toAddress(index);
index += 20;
}
@ -101,7 +103,9 @@ contract GovernanceStructs {
require(encodedUpgrade.length == index, "invalid GuardianSetUpgrade");
}
function parseSetMessageFee(bytes memory encodedSetMessageFee) public pure returns (SetMessageFee memory smf) {
function parseSetMessageFee(
bytes memory encodedSetMessageFee
) public pure returns (SetMessageFee memory smf) {
uint index = 0;
smf.module = encodedSetMessageFee.toBytes32(index);
@ -121,7 +125,9 @@ contract GovernanceStructs {
require(encodedSetMessageFee.length == index, "invalid SetMessageFee");
}
function parseTransferFees(bytes memory encodedTransferFees) public pure returns (TransferFees memory tf) {
function parseTransferFees(
bytes memory encodedTransferFees
) public pure returns (TransferFees memory tf) {
uint index = 0;
tf.module = encodedTransferFees.toBytes32(index);
@ -143,4 +149,4 @@ contract GovernanceStructs {
require(encodedTransferFees.length == index, "invalid TransferFees");
}
}
}

Some files were not shown because too many files have changed in this diff Show More