add fuel contract by Fuel Labs (#1513)

This commit is contained in:
Daniel Chew 2024-04-25 22:51:41 +09:00 committed by GitHub
parent 567b4a6597
commit c2da454637
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
56 changed files with 8911 additions and 0 deletions

View File

@ -0,0 +1 @@
out/

4229
target_chains/fuel/contracts/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,27 @@
[package]
name = "pyth_sdk"
description = "A cargo-generate template for Rust + Sway integration testing."
version = "0.1.0"
edition = "2021"
authors = ["Fuel Labs <contact@fuel.sh>"]
license = "Apache-2.0"
[dependencies]
rand = "0.8.5"
base64 = "0.22"
fuels = { version = "0.55", features = ["fuel-core-lib"] }
tokio = { version = "1.12", features = ["rt", "macros"] }
hex = "0.4.3"
reqwest = "0.11.27"
serde_json = "1.0.114"
serde = "1.0.197"
dotenv = "0.15.0"
[[bin]]
name = "deploy_pyth"
path = "scripts/deploy_pyth.rs"
[[test]]
harness = true
name = "integration_tests"
path = "tests/harness.rs"

View File

@ -0,0 +1,44 @@
[[package]]
name = "core"
source = "path+from-root-C3992B43B72ADB8C"
[[package]]
name = "ownership"
source = "git+https://github.com/FuelLabs/sway-libs?tag=v0.18.0#8d196e9379463d4596ac582a20a84ed52ff58c69"
dependencies = [
"src_5",
"std",
]
[[package]]
name = "pyth-contract"
source = "member"
dependencies = [
"ownership",
"pyth_interface",
"src5",
"std",
]
[[package]]
name = "pyth_interface"
source = "path+from-root-555D3D27A908977B"
dependencies = [
"src5",
"std",
]
[[package]]
name = "src5"
source = "git+https://github.com/FuelLabs/sway-standards?tag=v0.3.3#4198b4b07449ad16104cc8a0501f3013670fdcfd"
dependencies = ["std"]
[[package]]
name = "src_5"
source = "git+https://github.com/FuelLabs/sway-standards?tag=v0.2.2#6989cf8224b0d8aabea62f3d3c648fc754948705"
dependencies = ["std"]
[[package]]
name = "std"
source = "git+https://github.com/fuellabs/sway?tag=v0.49.1#2ac7030570f22510b0ac2a7b5ddf7baa20bdc0e1"
dependencies = ["core"]

View File

@ -0,0 +1,2 @@
[workspace]
members = ["./pyth-contract"]

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,44 @@
# Pyth-integration
> **_NOTE:_** The project is a WIP.
An implementation of a [Pyth Network](https://pyth.network/) oracle contract in Sway. Utilising minimal, internal [Wormhole](https://docs.wormhole.com/wormhole/) functionality and state.
## Interfaces
The project provides four interfaces for interaction with the oracle contract:
- [PythCore](./pyth-interface/src/interface.sw#L20) - provides the core functionality to required to utilise the oracle; getting fees, updating prices and fetching prices.
- [PythInit](./pyth-interface/src/interface.sw#L250) - provides the functionality to setup the oracle's state.
- [PythInfo](./pyth-interface/src/interface.sw#L255) - provides additional information about the oracle's state.
- [WormholeGuardians](./pyth-interface/src/interface.sw#L283) - provides functionality to maintain and query the wormhole-state-elements used by the oracle.
## Running the project
### Project
Run the following commands from the root of the repository.
#### Program compilation
```bash
forc build
```
#### Running the tests
Before running the tests the programs must be compiled with the command above.
```bash
cargo test
```
#### Before deploying
Before deploying the oracle contract; the `deployer` must be set to the address of the deploying wallet in the storage block, so that the deployer can setup the contract with the `constructor()` method.
Parameters for the `constructor()` method can be seen in the [tests of the method](./pyth-contract/tests/functions/pyth_init/constuctor.rs#L28), which at the time of writing uses the real up-to-date values as per Pyth's documentation and EVM integrations. Care should be taken to ensure that the most up-to-date values are used for the `constructor()` method's parameters.
#### Fuel Beta-5 network deployment:
The Pyth oracle contract has been deployed to Beta-5 at the `ContractId`: 0xe69daeb9fcf4c536c0fe402403b4b9e9822cc8b1f296e5d754be12cc384554c5.

View File

@ -0,0 +1,6 @@
[toolchain]
channel = "nightly-2024-01-24"
[components]
forc = "0.49.1"
fuel-core = "0.22.0"

View File

@ -0,0 +1,10 @@
[project]
authors = ["Fuel Labs <contact@fuel.sh>"]
entry = "main.sw"
license = "Apache-2.0"
name = "pyth-contract"
[dependencies]
ownership = { git = "https://github.com/FuelLabs/sway-libs", tag = "v0.18.0" }
pyth_interface = { path = "../pyth-interface" }
src5 = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.3.3" }

View File

@ -0,0 +1,8 @@
library;
pub mod data_source;
pub mod wormhole_light;
pub mod price;
pub mod accumulator_update;
pub mod batch_attestation_update;
pub mod update_type;

View File

@ -0,0 +1,133 @@
library;
use ::errors::PythError;
use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}};
use pyth_interface::data_structures::{data_source::DataSource, price::{PriceFeed, PriceFeedId}};
use std::{bytes::Bytes, hash::Hash};
pub struct AccumulatorUpdate {
data: Bytes,
}
const MINIMUM_ALLOWED_MINOR_VERSION = 0;
const MAJOR_VERSION = 1;
impl AccumulatorUpdate {
pub fn new(data: Bytes) -> Self {
Self { data }
}
pub fn total_updates(self, ref mut offset: u64) -> u64 {
let proof_size = u16::from_be_bytes([self.data.get(offset).unwrap(), self.data.get(offset + 1).unwrap()]).as_u64();
offset += proof_size + 2;
self.data.get(offset).unwrap().as_u64()
}
pub fn verify(self) -> u64 {
// skip magic as already checked when this is called
let major_version = self.data.get(4);
require(
major_version
.is_some() && major_version
.unwrap() == MAJOR_VERSION,
PythError::InvalidMajorVersion,
);
let minor_version = self.data.get(5);
require(
minor_version
.is_some() && minor_version
.unwrap() >= MINIMUM_ALLOWED_MINOR_VERSION,
PythError::InvalidMinorVersion,
);
let trailing_header_size = self.data.get(6);
require(trailing_header_size.is_some(), PythError::InvalidHeaderSize);
// skip trailing headers and update type
let offset = 8 + trailing_header_size.unwrap().as_u64();
require(self.data.len >= offset, PythError::InvalidUpdateDataLength);
offset
}
}
impl AccumulatorUpdate {
#[storage(read)]
pub fn verify_and_parse(
self,
current_guardian_set_index: u32,
wormhole_guardian_sets: StorageKey<StorageMap<u32, StorageGuardianSet>>,
is_valid_data_source: StorageKey<StorageMap<DataSource, bool>>,
) -> (u64, Bytes, u64, Bytes) {
let encoded_offset = self.verify();
let (_, slice) = self.data.split_at(encoded_offset);
let (encoded_slice, _) = slice.split_at(self.data.len - encoded_offset);
let mut offset = 0;
let wormhole_proof_size = u16::from_be_bytes([encoded_slice.get(offset).unwrap(), encoded_slice.get(offset + 1).unwrap()]).as_u64();
offset += 2;
let (_, slice) = encoded_slice.split_at(offset);
let (encoded_vm, _) = slice.split_at(wormhole_proof_size);
let vm = WormholeVM::parse_and_verify_pyth_vm(
current_guardian_set_index,
encoded_vm,
wormhole_guardian_sets,
is_valid_data_source,
);
offset += wormhole_proof_size;
let encoded_payload = vm.payload;
/*
Payload offset:
skip magic (4 bytes) as already checked when this is called
skip update_type as (1 byte) it can only be WormholeMerkle
skip slot (8 bytes) as unused
skip ring_size (4 bytes) as unused
*/
let mut payload_offset = 17;
let (_, slice) = encoded_payload.split_at(payload_offset);
let (digest, _) = slice.split_at(20);
payload_offset += 20;
require(
payload_offset <= encoded_payload
.len,
PythError::InvalidPayloadLength,
);
let number_of_updates = encoded_slice.get(offset);
require(
number_of_updates
.is_some(),
PythError::NumberOfUpdatesIrretrievable,
);
offset += 1;
(offset, digest, number_of_updates.unwrap().as_u64(), encoded_slice)
}
}
impl AccumulatorUpdate {
#[storage(read, write)]
pub fn update_price_feeds(
self,
current_guardian_set_index: u32,
wormhole_guardian_sets: StorageKey<StorageMap<u32, StorageGuardianSet>>,
latest_price_feed: StorageKey<StorageMap<PriceFeedId, PriceFeed>>,
is_valid_data_source: StorageKey<StorageMap<DataSource, bool>>,
) -> (u64, Vec<PriceFeedId>) {
let (mut offset, digest, number_of_updates, encoded_data) = self.verify_and_parse(
current_guardian_set_index,
wormhole_guardian_sets,
is_valid_data_source,
);
let mut updated_ids = Vec::new();
let mut i = 0;
while i < number_of_updates {
let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded_data, offset);
offset = new_offset;
let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() {
Some(price_feed) => price_feed.price.publish_time,
None => 0,
};
if price_feed.price.publish_time > latest_publish_time {
latest_price_feed.insert(price_feed.id, price_feed);
updated_ids.push(price_feed.id);
}
i += 1;
}
require(
offset == encoded_data
.len,
PythError::InvalidUpdateDataLength,
);
(number_of_updates, updated_ids)
}
}

View File

@ -0,0 +1,95 @@
library;
use ::errors::PythError;
use ::data_structures::{data_source::*, price::*, wormhole_light::{StorageGuardianSet, WormholeVM}};
use pyth_interface::data_structures::{data_source::DataSource, price::{PriceFeed, PriceFeedId}};
use std::{bytes::Bytes, hash::Hash};
const BATCH_MAGIC: u32 = 0x50325748;
pub struct BatchAttestationUpdate {
data: Bytes,
}
impl BatchAttestationUpdate {
pub fn new(data: Bytes) -> Self {
Self { data }
}
#[storage(read, write)]
pub fn update_price_feeds(
self,
current_guardian_set_index: u32,
wormhole_guardian_sets: StorageKey<StorageMap<u32, StorageGuardianSet>>,
latest_price_feed: StorageKey<StorageMap<PriceFeedId, PriceFeed>>,
is_valid_data_source: StorageKey<StorageMap<DataSource, bool>>,
) -> Vec<PriceFeedId> {
let vm = WormholeVM::parse_and_verify_pyth_vm(
current_guardian_set_index,
self.data,
wormhole_guardian_sets,
is_valid_data_source,
);
let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload);
let mut updated_ids = Vec::new();
let mut i: u16 = 0;
while i < number_of_attestations {
let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index);
// Respect specified attestation size for forward-compatibility
attestation_index += attestation_size.as_u64();
let latest_publish_time = match latest_price_feed.get(price_feed.id).try_read() {
Some(price_feed) => price_feed.price.publish_time,
None => 0,
};
if price_feed.price.publish_time > latest_publish_time {
latest_price_feed.insert(price_feed.id, price_feed);
updated_ids.push(price_feed.id);
}
i += 1;
}
updated_ids
}
}
pub fn parse_and_verify_batch_attestation_header(encoded_payload: Bytes) -> (u64, u16, u16) {
let mut index = 0;
//Check header
let magic = u32::from_be_bytes([
encoded_payload.get(index).unwrap(),
encoded_payload.get(index + 1).unwrap(),
encoded_payload.get(index + 2).unwrap(),
encoded_payload.get(index + 3).unwrap(),
]);
require(magic == BATCH_MAGIC, PythError::InvalidMagic);
index += 4;
let major_version = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]);
require(major_version == 3, PythError::InvalidMajorVersion);
// addtionally skip minor_version(2 bytes) as unused
index += 4;
let header_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]);
index += 2;
// From solidity impl:
// NOTE(2022-04-19): Currently, only payloadId comes after
// hdrSize. Future extra header fields must be read using a
// separate offset to respect hdrSize, i.e.:
// uint hdrIndex = 0;
// bpa.header.payloadId = UnsafeBytesLib.toUint8(encoded, index + hdrIndex);
// hdrIndex += 1;
// bpa.header.someNewField = UnsafeBytesLib.toUint32(encoded, index + hdrIndex);
// hdrIndex += 4;
// Skip remaining unknown header bytes
// index += bpa.header.hdrSize;
let payload_id = encoded_payload.get(index).unwrap();
// Payload ID of 2 required for batch header
require(payload_id == 2, PythError::InvalidPayloadId);
// Skip remaining unknown header bytes
index += header_size.as_u64();
let number_of_attestations = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]);
index += 2;
let attestation_size = u16::from_be_bytes([encoded_payload.get(index).unwrap(), encoded_payload.get(index + 1).unwrap()]);
index += 2;
require(
encoded_payload
.len == index + (attestation_size * number_of_attestations)
.as_u64(),
PythError::InvalidPayloadLength,
);
return (index, number_of_attestations, attestation_size);
}

View File

@ -0,0 +1,31 @@
library;
use pyth_interface::data_structures::data_source::DataSource;
use std::hash::{Hash, Hasher};
impl Hash for DataSource {
fn hash(self, ref mut state: Hasher) {
self.chain_id.hash(state);
self.emitter_address.hash(state);
}
}
impl DataSource {
pub fn new(chain_id: u16, emitter_address: b256) -> Self {
Self {
chain_id,
emitter_address,
}
}
#[storage(read)]
pub fn is_valid(
self,
is_valid_data_source: StorageKey<StorageMap<DataSource, bool>>,
) -> bool {
match is_valid_data_source.get(self).try_read() {
Some(bool) => bool,
None => false,
}
}
}

View File

@ -0,0 +1,311 @@
library;
use pyth_interface::data_structures::price::{Price, PriceFeed, PriceFeedId};
use std::bytes::Bytes;
use ::errors::PythError;
use ::utils::absolute_of_exponent;
use ::pyth_merkle_proof::validate_proof;
use ::data_structures::wormhole_light::WormholeVM;
const TAI64_DIFFERENCE = 4611686018427387904;
impl Price {
pub fn new(
confidence: u64,
exponent: u32,
price: u64,
publish_time: u64,
) -> Self {
Self {
confidence,
exponent,
price,
publish_time,
}
}
}
impl PriceFeedId {
pub fn is_target(self, target_price_feed_ids: Vec<PriceFeedId>) -> bool {
let mut i = 0;
while i < target_price_feed_ids.len {
if target_price_feed_ids.get(i).unwrap() == self {
return true;
}
i += 1;
}
false
}
pub fn is_contained_within(self, output_price_feeds: Vec<PriceFeed>) -> bool {
let mut i = 0;
while i < output_price_feeds.len {
if output_price_feeds.get(i).unwrap().id == self {
return true;
}
i += 1;
}
false
}
}
impl PriceFeed {
pub fn new(ema_price: Price, id: PriceFeedId, price: Price) -> Self {
Self {
ema_price,
id,
price,
}
}
}
impl PriceFeed {
pub fn parse_message(encoded_price_feed: Bytes) -> Self {
let mut offset = 1u64;
let (_, slice) = encoded_price_feed.split_at(offset);
let (price_feed_id, _) = slice.split_at(32);
let price_feed_id: PriceFeedId = price_feed_id.into();
offset += 32;
let price = u64::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
encoded_price_feed.get(offset + 4).unwrap(),
encoded_price_feed.get(offset + 5).unwrap(),
encoded_price_feed.get(offset + 6).unwrap(),
encoded_price_feed.get(offset + 7).unwrap(),
]);
offset += 8;
let confidence = u64::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
encoded_price_feed.get(offset + 4).unwrap(),
encoded_price_feed.get(offset + 5).unwrap(),
encoded_price_feed.get(offset + 6).unwrap(),
encoded_price_feed.get(offset + 7).unwrap(),
]);
offset += 8;
// exponent is an i32, expected to be in the range -255 to 0
let exponent = u32::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
]);
let exponent = absolute_of_exponent(exponent);
require(exponent < 256u32, PythError::InvalidExponent);
offset += 4;
let mut publish_time = u64::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
encoded_price_feed.get(offset + 4).unwrap(),
encoded_price_feed.get(offset + 5).unwrap(),
encoded_price_feed.get(offset + 6).unwrap(),
encoded_price_feed.get(offset + 7).unwrap(),
]);
// skip unused previous_publish_times (8 bytes)
offset += 16;
let ema_price = u64::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
encoded_price_feed.get(offset + 4).unwrap(),
encoded_price_feed.get(offset + 5).unwrap(),
encoded_price_feed.get(offset + 6).unwrap(),
encoded_price_feed.get(offset + 7).unwrap(),
]);
offset += 8;
let ema_confidence = u64::from_be_bytes([
encoded_price_feed.get(offset).unwrap(),
encoded_price_feed.get(offset + 1).unwrap(),
encoded_price_feed.get(offset + 2).unwrap(),
encoded_price_feed.get(offset + 3).unwrap(),
encoded_price_feed.get(offset + 4).unwrap(),
encoded_price_feed.get(offset + 5).unwrap(),
encoded_price_feed.get(offset + 6).unwrap(),
encoded_price_feed.get(offset + 7).unwrap(),
]);
offset += 8;
require(
offset <= encoded_price_feed
.len,
PythError::InvalidPriceFeedDataLength,
);
//convert publish_time from UNIX to TAI64
publish_time += TAI64_DIFFERENCE;
PriceFeed::new(
Price::new(ema_confidence, exponent, ema_price, publish_time),
price_feed_id,
Price::new(confidence, exponent, price, publish_time),
)
}
pub fn parse_attestation(attestation_size: u16, encoded_payload: Bytes, index: u64) -> Self {
// Skip product id (32 bytes) as unused
let mut attestation_index = index + 32;
let (_, slice) = encoded_payload.split_at(attestation_index);
let (price_feed_id, _) = slice.split_at(32);
let price_feed_id: PriceFeedId = price_feed_id.into();
attestation_index += 32;
let mut price = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
let mut confidence = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
// exponent is an i32, expected to be in the range -255 to 0
let exponent = u32::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
]);
let exponent = absolute_of_exponent(exponent);
require(exponent < 256u32, PythError::InvalidExponent);
attestation_index += 4;
let ema_price = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
let ema_confidence = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
// Status is an enum (encoded as u8) with the following values:
// 0 = UNKNOWN: The price feed is not currently updating for an unknown reason.
// 1 = TRADING: The price feed is updating as expected.
// 2 = HALTED: The price feed is not currently updating because trading in the product has been halted.
// 3 = AUCTION: The price feed is not currently updating because an auction is setting the price.
let status = encoded_payload.get(attestation_index).unwrap();
// Additionally skip number_of publishers (8 bytes) and attestation_time (8 bytes); as unused
attestation_index += 17;
let mut publish_time = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
if status == 1u8 {
attestation_index += 24;
} else {
// If status is not trading then the latest available price is
// the previous price that is parsed here.
// previous publish time
publish_time = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
// previous price
price = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
// previous confidence
confidence = u64::from_be_bytes([
encoded_payload.get(attestation_index).unwrap(),
encoded_payload.get(attestation_index + 1).unwrap(),
encoded_payload.get(attestation_index + 2).unwrap(),
encoded_payload.get(attestation_index + 3).unwrap(),
encoded_payload.get(attestation_index + 4).unwrap(),
encoded_payload.get(attestation_index + 5).unwrap(),
encoded_payload.get(attestation_index + 6).unwrap(),
encoded_payload.get(attestation_index + 7).unwrap(),
]);
attestation_index += 8;
}
require(
(attestation_index - index) <= attestation_size
.as_u64(),
PythError::InvalidAttestationSize,
);
//convert publish_time from UNIX to TAI64
publish_time += TAI64_DIFFERENCE;
PriceFeed::new(
Price::new(ema_confidence, exponent, ema_price, publish_time),
price_feed_id,
Price::new(confidence, exponent, price, publish_time),
)
}
}
impl PriceFeed {
pub fn extract_from_merkle_proof(digest: Bytes, encoded_proof: Bytes, offset: u64) -> (u64, self) {
// In order to avoid `ref mut` param related MemoryWriteOverlap error
let mut current_offset = offset;
let message_size = u16::from_be_bytes([
encoded_proof.get(current_offset).unwrap(),
encoded_proof.get(current_offset + 1).unwrap(),
]).as_u64();
current_offset += 2;
let (_, slice) = encoded_proof.split_at(current_offset);
let (encoded_message, _) = slice.split_at(message_size);
current_offset += message_size;
let end_offset = validate_proof(encoded_proof, encoded_message, current_offset, digest);
// Message type of 0 is a Price Feed
require(
encoded_message
.get(0)
.unwrap() == 0,
PythError::IncorrectMessageType,
);
let price_feed = PriceFeed::parse_message(encoded_message);
(end_offset, price_feed)
}
}

View File

@ -0,0 +1,37 @@
library;
use std::{array_conversions::u32::*, bytes::Bytes};
use ::data_structures::{
accumulator_update::AccumulatorUpdate,
batch_attestation_update::BatchAttestationUpdate,
};
const ACCUMULATOR_MAGIC: u32 = 0x504e4155;
pub enum UpdateType {
Accumulator: AccumulatorUpdate,
BatchAttestation: BatchAttestationUpdate,
}
impl UpdateType {
pub fn determine_type(data: Bytes) -> Self {
let (magic, _) = data.split_at(4); //TODO: Convert to u32 for comparison with const ACCUMULATOR_MAGIC. Use raw_ptr.read::<u32>()? Remove accumulator_magic_bytes()
if data.len > 4 && magic == accumulator_magic_bytes() {
UpdateType::Accumulator(AccumulatorUpdate::new(data))
} else {
UpdateType::BatchAttestation((BatchAttestationUpdate::new(data)))
}
}
}
pub fn accumulator_magic_bytes() -> Bytes {
let accumulator_magic_array = ACCUMULATOR_MAGIC.to_be_bytes();
let mut accumulator_magic_bytes = Bytes::with_capacity(4);
accumulator_magic_bytes.push(accumulator_magic_array[0]);
accumulator_magic_bytes.push(accumulator_magic_array[1]);
accumulator_magic_bytes.push(accumulator_magic_array[2]);
accumulator_magic_bytes.push(accumulator_magic_array[3]);
accumulator_magic_bytes
}

View File

@ -0,0 +1,590 @@
library;
use ::data_structures::data_source::*;
use ::errors::WormholeError;
use pyth_interface::data_structures::{
data_source::DataSource,
wormhole_light::{
GuardianSet,
WormholeProvider,
},
};
use std::{
array_conversions::{
b256::*,
u16::*,
u32::*,
},
b512::B512,
block::timestamp,
bytes::Bytes,
constants::ZERO_B256,
hash::{
Hash,
keccak256,
sha256,
},
storage::storage_vec::*,
vm::evm::ecr::ec_recover_evm_address,
};
pub const UPGRADE_MODULE: b256 = 0x00000000000000000000000000000000000000000000000000000000436f7265;
impl GuardianSet {
#[storage(read)]
pub fn from_stored(stored: StorageGuardianSet) -> Self {
Self {
expiration_time: stored.expiration_time,
keys: stored.keys.load_vec(),
}
}
}
pub struct StorageGuardianSet {
expiration_time: u64,
keys: StorageKey<StorageVec<b256>>,
}
impl StorageGuardianSet {
pub fn new(expiration_time: u64, keys: StorageKey<StorageVec<b256>>) -> Self {
StorageGuardianSet {
expiration_time,
keys,
}
}
}
pub struct GuardianSetUpgrade {
action: u8,
chain: u16,
module: b256,
new_guardian_set: StorageGuardianSet,
new_guardian_set_index: u32,
}
impl GuardianSetUpgrade {
pub fn new(
action: u8,
chain: u16,
module: b256,
new_guardian_set: StorageGuardianSet,
new_guardian_set_index: u32,
) -> Self {
GuardianSetUpgrade {
action,
chain,
module,
new_guardian_set,
new_guardian_set_index,
}
}
}
impl GuardianSetUpgrade {
#[storage(read, write)]
pub fn parse_encoded_upgrade(current_guardian_set_index: u32, encoded_upgrade: Bytes) -> Self {
let mut index = 0;
let (_, slice) = encoded_upgrade.split_at(index);
let (module, _) = slice.split_at(32);
let module: b256 = module.into();
require(module == UPGRADE_MODULE, WormholeError::InvalidModule);
index += 32;
let action = encoded_upgrade.get(index).unwrap();
require(action == 2, WormholeError::InvalidGovernanceAction);
index += 1;
let chain = u16::from_be_bytes([encoded_upgrade.get(index).unwrap(), encoded_upgrade.get(index + 1).unwrap()]);
index += 2;
let new_guardian_set_index = u32::from_be_bytes([
encoded_upgrade.get(index).unwrap(),
encoded_upgrade.get(index + 1).unwrap(),
encoded_upgrade.get(index + 2).unwrap(),
encoded_upgrade.get(index + 3).unwrap(),
]);
require(
new_guardian_set_index > current_guardian_set_index,
WormholeError::NewGuardianSetIndexIsInvalid,
);
index += 4;
let guardian_length = encoded_upgrade.get(index).unwrap();
index += 1;
let mut new_guardian_set = StorageGuardianSet::new(
0,
StorageKey {
slot: sha256(("guardian_set_keys", new_guardian_set_index)),
offset: 0,
field_id: ZERO_B256,
},
);
let mut i: u8 = 0;
while i < guardian_length {
let (_, slice) = encoded_upgrade.split_at(index);
let (key, _) = slice.split_at(20);
let key: b256 = key.into();
new_guardian_set.keys.push(key.rsh(96));
index += 20;
i += 1;
}
require(
new_guardian_set
.keys
.len() > 0,
WormholeError::NewGuardianSetIsEmpty,
);
require(
encoded_upgrade
.len == index,
WormholeError::InvalidGuardianSetUpgradeLength,
);
GuardianSetUpgrade::new(
action,
chain,
module,
new_guardian_set,
new_guardian_set_index,
)
}
}
impl WormholeProvider {
pub fn new(governance_chain_id: u16, governance_contract: b256) -> Self {
WormholeProvider {
governance_chain_id,
governance_contract,
}
}
}
pub struct GuardianSignature {
guardian_index: u8,
r: b256,
s: b256,
v: u8,
}
impl GuardianSignature {
pub fn new(guardian_index: u8, r: b256, s: b256, v: u8) -> Self {
GuardianSignature {
guardian_index,
r,
s,
v,
}
}
// eip-2098: Compact Signature Representation
pub fn compact(self) -> B512 {
let y_parity = b256::from_be_bytes([
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
0u8,
self.v - 27u8,
]);
let shifted_y_parity = y_parity.lsh(255);
let y_parity_and_s = b256::binary_or(shifted_y_parity, self.s);
B512::from((self.r, y_parity_and_s))
}
}
impl GuardianSignature {
pub fn verify(
self,
guardian_set_key: b256,
hash: b256,
index: u64,
last_index: u64,
) {
// Ensure that provided signature indices are ascending only
if index > 0 {
require(
self.guardian_index
.as_u64() > last_index,
WormholeError::SignatureIndicesNotAscending,
);
}
let recovered_signer = ec_recover_evm_address(self.compact(), hash);
require(
recovered_signer
.is_ok() && recovered_signer
.unwrap()
.value == guardian_set_key,
WormholeError::SignatureInvalid,
);
}
}
pub struct WormholeVM {
version: u8,
guardian_set_index: u32,
governance_action_hash: b256,
// signatures: Vec<GuardianSignature>, // Shown here to represent data layout of VM, but not needed
timestamp: u32,
nonce: u32,
emitter_chain_id: u16,
emitter_address: b256,
sequence: u64,
consistency_level: u8,
payload: Bytes,
}
impl WormholeVM {
pub fn default() -> Self {
WormholeVM {
version: 0u8,
guardian_set_index: 0u32,
governance_action_hash: ZERO_B256,
timestamp: 0u32,
nonce: 0u32,
emitter_chain_id: 0u16,
emitter_address: ZERO_B256,
sequence: 0u64,
consistency_level: 0u8,
payload: Bytes::new(),
}
}
pub fn new(
version: u8,
guardian_set_index: u32,
governance_action_hash: b256,
timestamp_: u32,
nonce: u32,
emitter_chain_id: u16,
emitter_address: b256,
sequence: u64,
consistency_level: u8,
payload: Bytes,
) -> Self {
WormholeVM {
version,
guardian_set_index,
governance_action_hash,
timestamp: timestamp_,
nonce,
emitter_chain_id,
emitter_address,
sequence,
consistency_level,
payload,
}
}
}
impl WormholeVM {
#[storage(read)]
pub fn parse_and_verify_wormhole_vm(
current_guardian_set_index: u32,
encoded_vm: Bytes,
wormhole_guardian_sets: StorageKey<StorageMap<u32, StorageGuardianSet>>,
) -> Self {
let mut index = 0;
let version = encoded_vm.get(index);
require(
version
.is_some() && version
.unwrap() == 1,
WormholeError::VMVersionIncompatible,
);
index += 1;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4); //replace with slice()
let guardian_set_index = u32::from_be_bytes([
//replace with func
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let guardian_set = wormhole_guardian_sets.get(guardian_set_index).try_read();
require(guardian_set.is_some(), WormholeError::GuardianSetNotFound);
let guardian_set = guardian_set.unwrap();
require(
guardian_set
.keys
.len() > 0,
WormholeError::InvalidGuardianSetKeysLength,
);
require(
guardian_set_index == current_guardian_set_index && (guardian_set
.expiration_time == 0 || guardian_set
.expiration_time > timestamp()),
WormholeError::InvalidGuardianSet,
);
let signers_length = encoded_vm.get(index);
require(
signers_length
.is_some(),
WormholeError::SignersLengthIrretrievable,
);
let signers_length = signers_length.unwrap().as_u64();
index += 1;
// 66 is the length of each guardian signature
// 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v)
let hash_index = index + (signers_length * 66);
require(
hash_index < encoded_vm
.len,
WormholeError::InvalidSignatureLength,
);
let (_, slice) = encoded_vm.split_at(hash_index);
let hash = keccak256(keccak256(slice));
let mut last_index = 0;
let mut i = 0;
while i < signers_length {
let guardian_index = encoded_vm.get(index);
require(
guardian_index
.is_some(),
WormholeError::GuardianIndexIrretrievable,
);
let guardian_index = guardian_index.unwrap();
index += 1;
let (_, slice) = encoded_vm.split_at(index);
let (slice, remainder) = slice.split_at(32);
let r: b256 = slice.into();
index += 32;
let (slice, remainder) = remainder.split_at(32);
let s: b256 = slice.into();
index += 32;
let v = remainder.get(0);
require(v.is_some(), WormholeError::SignatureVIrretrievable);
let v = v.unwrap() + 27;
index += 1;
let guardian_set_key = guardian_set.keys.get(guardian_index.as_u64());
require(
guardian_set_key
.is_some(),
WormholeError::GuardianSetKeyIrretrievable,
);
GuardianSignature::new(guardian_index, r, s, v)
.verify(guardian_set_key.unwrap().read(), hash, i, last_index);
last_index = guardian_index.as_u64();
i += 1;
}
/*
We're using a fixed point number transformation with 1 decimal to deal with rounding.
This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM.
If guardian set key length is 0 and signatures length is 0, this could compromise the integrity of both VM and signature verification.
*/
require(
((((guardian_set
.keys
.len() * 10) / 3) * 2) / 10 + 1) <= signers_length,
WormholeError::NoQuorum,
);
//ignore VM.signatures
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4);
let _timestamp = u32::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4);
let nonce = u32::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(2);
let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]);
index += 2;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(32);
let emitter_address: b256 = slice.into();
index += 32;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(8);
let sequence = u64::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
slice.get(4).unwrap(),
slice.get(5).unwrap(),
slice.get(6).unwrap(),
slice.get(7).unwrap(),
]);
index += 8;
let consistency_level = encoded_vm.get(index);
require(
consistency_level
.is_some(),
WormholeError::ConsistencyLevelIrretrievable,
);
index += 1;
require(index <= encoded_vm.len, WormholeError::InvalidPayloadLength);
let (_, payload) = encoded_vm.split_at(index);
WormholeVM::new(
version
.unwrap(),
guardian_set_index,
hash,
_timestamp,
nonce,
emitter_chain_id,
emitter_address,
sequence,
consistency_level
.unwrap(),
payload,
)
}
pub fn parse_initial_wormhole_vm(encoded_vm: Bytes) -> Self {
let mut index = 0;
let version = encoded_vm.get(index);
require(
version
.is_some() && version
.unwrap() == 1,
WormholeError::VMVersionIncompatible,
);
index += 1;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4); //replace with slice()
let guardian_set_index = u32::from_be_bytes([
//replace with func
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let signers_length = encoded_vm.get(index);
require(
signers_length
.is_some(),
WormholeError::SignersLengthIrretrievable,
);
let signers_length = signers_length.unwrap().as_u64();
index += 1;
// 66 is the length of each guardian signature
// 1 (guardianIndex) + 32 (r) + 32 (s) + 1 (v)
let hash_index = index + (signers_length * 66);
require(
hash_index < encoded_vm
.len,
WormholeError::InvalidSignatureLength,
);
let (_, slice) = encoded_vm.split_at(hash_index);
let hash = keccak256(keccak256(slice));
// account for signatures
index += 66 * signers_length;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4);
let timestamp_ = u32::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(4);
let nonce = u32::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
]);
index += 4;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(2);
let emitter_chain_id = u16::from_be_bytes([slice.get(0).unwrap(), slice.get(1).unwrap()]);
index += 2;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(32);
let emitter_address: b256 = slice.into();
index += 32;
let (_, slice) = encoded_vm.split_at(index);
let (slice, _) = slice.split_at(8);
let sequence = u64::from_be_bytes([
slice.get(0).unwrap(),
slice.get(1).unwrap(),
slice.get(2).unwrap(),
slice.get(3).unwrap(),
slice.get(4).unwrap(),
slice.get(5).unwrap(),
slice.get(6).unwrap(),
slice.get(7).unwrap(),
]);
index += 8;
let consistency_level = encoded_vm.get(index);
require(
consistency_level
.is_some(),
WormholeError::ConsistencyLevelIrretrievable,
);
index += 1;
require(index <= encoded_vm.len, WormholeError::InvalidPayloadLength);
let (_, payload) = encoded_vm.split_at(index);
WormholeVM::new(
version
.unwrap(),
guardian_set_index,
hash,
timestamp_,
nonce,
emitter_chain_id,
emitter_address,
sequence,
consistency_level
.unwrap(),
payload,
)
}
}
impl WormholeVM {
#[storage(read)]
pub fn parse_and_verify_pyth_vm(
current_guardian_set_index: u32,
encoded_vm: Bytes,
wormhole_guardian_sets: StorageKey<StorageMap<u32, StorageGuardianSet>>,
is_valid_data_source: StorageKey<StorageMap<DataSource, bool>>,
) -> Self {
let vm = WormholeVM::parse_and_verify_wormhole_vm(
current_guardian_set_index,
encoded_vm,
wormhole_guardian_sets,
);
require(
DataSource::new(vm.emitter_chain_id, vm.emitter_address)
.is_valid(is_valid_data_source),
WormholeError::InvalidUpdateDataSource,
);
vm
}
}

View File

@ -0,0 +1,63 @@
library;
pub enum PythError {
FeesCanOnlyBePaidInTheBaseAsset: (),
GuardianSetNotFound: (),
IncorrectMessageType: (),
InsufficientFee: (),
InvalidArgument: (),
InvalidAttestationSize: (),
InvalidDataSourcesLength: (),
InvalidExponent: (),
InvalidHeaderSize: (),
InvalidMagic: (),
InvalidMajorVersion: (),
InvalidMinorVersion: (),
InvalidPayloadId: (),
InvalidPayloadLength: (),
InvalidPriceFeedDataLength: (),
InvalidProof: (),
InvalidUpdateData: (),
InvalidUpdateDataLength: (),
InvalidUpdateDataSource: (),
InvalidUpgradeModule: (),
LengthOfPriceFeedIdsAndPublishTimesMustMatch: (),
NewGuardianSetIsEmpty: (),
NumberOfUpdatesIrretrievable: (),
/// Emitted when a Price's `publish_time` is stale.
OutdatedPrice: (),
/// Emitted when a PriceFeed could not be retrieved.
PriceFeedNotFound: (),
PriceFeedNotFoundWithinRange: (),
WormholeGovernanceActionNotFound: (),
}
pub enum WormholeError {
ConsistencyLevelIrretrievable: (),
GovernanceActionAlreadyConsumed: (),
GuardianIndexIrretrievable: (),
GuardianSetHasExpired: (),
GuardianSetKeyIrretrievable: (),
GuardianSetNotFound: (),
InvalidGovernanceAction: (),
InvalidGovernanceChain: (),
InvalidGovernanceContract: (),
InvalidGuardianSet: (),
InvalidGuardianSetKeysLength: (),
InvalidGuardianSetUpgrade: (),
InvalidGuardianSetUpgradeLength: (),
InvalidModule: (),
InvalidPayloadLength: (),
InvalidSignatureLength: (),
InvalidUpdateDataSource: (),
NewGuardianSetIsEmpty: (),
NewGuardianSetIndexIsInvalid: (),
NoQuorum: (),
NotSignedByCurrentGuardianSet: (),
SignatureInvalid: (),
SignatureIndicesNotAscending: (),
SignatureVIrretrievable: (),
SignersLengthIrretrievable: (),
VMSignatureInvalid: (),
VMVersionIncompatible: (),
}

View File

@ -0,0 +1,21 @@
library;
use pyth_interface::data_structures::{
data_source::DataSource,
price::PriceFeedId,
wormhole_light::WormholeProvider,
};
pub struct ConstructedEvent {
guardian_set_index: u32,
}
pub struct NewGuardianSetEvent {
governance_action_hash: b256,
// new_guardian_set: GuardianSet, // TODO: Uncomment when SDK supports logs with nested Vecs https://github.com/FuelLabs/fuels-rs/issues/1046
new_guardian_set_index: u32,
}
pub struct UpdatedPriceFeedsEvent {
updated_price_feeds: Vec<PriceFeedId>,
}

View File

@ -0,0 +1,617 @@
contract;
mod errors;
mod utils;
mod pyth_merkle_proof;
mod data_structures;
mod events;
use std::{
block::timestamp,
bytes::Bytes,
call_frames::msg_asset_id,
constants::{
BASE_ASSET_ID,
ZERO_B256,
},
context::msg_amount,
hash::Hash,
storage::{
storage_map::StorageMap,
storage_vec::*,
},
u256::U256,
};
use ::errors::{PythError, WormholeError};
use ::utils::{difference, total_fee};
use ::data_structures::{
batch_attestation_update::parse_and_verify_batch_attestation_header,
data_source::*,
price::*,
update_type::UpdateType,
wormhole_light::*,
};
use ::events::{ConstructedEvent, NewGuardianSetEvent, UpdatedPriceFeedsEvent};
use pyth_interface::{
data_structures::{
data_source::DataSource,
price::{
Price,
PriceFeed,
PriceFeedId,
},
wormhole_light::{
GuardianSet,
WormholeProvider,
},
},
PythCore,
PythInfo,
PythInit,
WormholeGuardians,
};
use ownership::*;
use src5::{SRC5, State};
configurable {
DEPLOYER: Identity = Identity::Address(Address::from(ZERO_B256)),
}
storage {
// | |
// --+-- PYTH STATE --+--
// | |
// (chainId, emitterAddress) => isValid; takes advantage of
// constant-time mapping lookup for VM verification
is_valid_data_source: StorageMap<DataSource, bool> = StorageMap {},
// Mapping of cached price information
// priceId => PriceInfo
latest_price_feed: StorageMap<PriceFeedId, PriceFeed> = StorageMap {},
single_update_fee: u64 = 0,
// For tracking all active emitter/chain ID pairs
valid_data_sources: StorageVec<DataSource> = StorageVec {},
/// Maximum acceptable time period before price is considered to be stale.
/// This includes attestation delay, block time, and potential clock drift
/// between the source/target chains.
valid_time_period_seconds: u64 = 0,
// | |
// --+-- WORMHOLE STATE --+--
// | |
// Mapping of consumed governance actions
wormhole_consumed_governance_actions: StorageMap<b256, bool> = StorageMap {},
// Mapping of guardian_set_index => guardian set
wormhole_guardian_sets: StorageMap<u32, StorageGuardianSet> = StorageMap {},
// Current active guardian set index
wormhole_guardian_set_index: u32 = 0,
// Using Ethereum's Wormhole governance
wormhole_provider: WormholeProvider = WormholeProvider {
governance_chain_id: 0u16,
governance_contract: ZERO_B256,
},
}
impl SRC5 for Contract {
#[storage(read)]
fn owner() -> State {
_owner()
}
}
impl PythCore for Contract {
#[storage(read)]
fn ema_price(price_feed_id: PriceFeedId) -> Price {
ema_price_no_older_than(valid_time_period(), price_feed_id)
}
#[storage(read)]
fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price {
ema_price_no_older_than(time_period, price_feed_id)
}
#[storage(read)]
fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price {
ema_price_unsafe(price_feed_id)
}
#[storage(read), payable]
fn parse_price_feed_updates(
max_publish_time: u64,
min_publish_time: u64,
target_price_feed_ids: Vec<PriceFeedId>,
update_data: Vec<Bytes>,
) -> Vec<PriceFeed> {
require(
msg_asset_id() == BASE_ASSET_ID,
PythError::FeesCanOnlyBePaidInTheBaseAsset,
);
let required_fee = update_fee(update_data);
require(msg_amount() >= required_fee, PythError::InsufficientFee);
let mut output_price_feeds: Vec<PriceFeed> = Vec::with_capacity(target_price_feed_ids.len);
let mut i = 0;
while i < update_data.len {
let data = update_data.get(i).unwrap();
match UpdateType::determine_type(data) {
UpdateType::Accumulator(accumulator_update) => {
let (mut offset, digest, number_of_updates, encoded) = accumulator_update.verify_and_parse(
current_guardian_set_index(),
storage
.wormhole_guardian_sets,
storage
.is_valid_data_source,
);
let mut i_2 = 0;
while i_2 < number_of_updates {
let (new_offset, price_feed) = PriceFeed::extract_from_merkle_proof(digest, encoded, offset);
offset = new_offset;
if price_feed.id.is_target(target_price_feed_ids) == false {
i_2 += 1;
continue;
}
if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time {
// check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue as we only want 1
// output PriceFeed per target ID
if price_feed.id.is_contained_within(output_price_feeds) {
i_2 += 1;
continue;
}
output_price_feeds.push(price_feed)
}
i_2 += 1;
}
require(offset == encoded.len, PythError::InvalidUpdateDataLength);
},
UpdateType::BatchAttestation(batch_attestation_update) => {
let vm = WormholeVM::parse_and_verify_pyth_vm(
current_guardian_set_index(),
batch_attestation_update
.data,
storage
.wormhole_guardian_sets,
storage
.is_valid_data_source,
);
let (mut attestation_index, number_of_attestations, attestation_size) = parse_and_verify_batch_attestation_header(vm.payload);
let attestation_size_u16 = attestation_size.as_u64();
let mut i_2: u16 = 0;
while i_2 < number_of_attestations {
let (_, slice) = vm.payload.split_at(attestation_index + 32);
let (price_feed_id, _) = slice.split_at(32);
let price_feed_id: PriceFeedId = price_feed_id.into();
if price_feed_id.is_target(target_price_feed_ids) == false {
attestation_index += attestation_size_u16;
i_2 += 1;
continue;
}
let price_feed = PriceFeed::parse_attestation(attestation_size, vm.payload, attestation_index);
if price_feed.price.publish_time >= min_publish_time && price_feed.price.publish_time <= max_publish_time {
// check if output_price_feeds already contains a PriceFeed with price_feed.id, if so continue;
// as we only want 1 output PriceFeed per target ID
if price_feed.id.is_contained_within(output_price_feeds) {
attestation_index += attestation_size_u16;
i_2 += 1;
continue;
}
output_price_feeds.push(price_feed)
}
attestation_index += attestation_size_u16;
i_2 += 1;
}
}
}
i += 1;
}
require(
target_price_feed_ids
.len == output_price_feeds
.len,
PythError::PriceFeedNotFoundWithinRange,
);
output_price_feeds
}
#[storage(read)]
fn price(price_feed_id: PriceFeedId) -> Price {
price_no_older_than(valid_time_period(), price_feed_id)
}
#[storage(read)]
fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price {
price_no_older_than(time_period, price_feed_id)
}
#[storage(read)]
fn price_unsafe(price_feed_id: PriceFeedId) -> Price {
price_unsafe(price_feed_id)
}
#[storage(read)]
fn update_fee(update_data: Vec<Bytes>) -> u64 {
update_fee(update_data)
}
#[storage(read, write), payable]
fn update_price_feeds(update_data: Vec<Bytes>) {
update_price_feeds(update_data)
}
#[storage(read, write), payable]
fn update_price_feeds_if_necessary(
price_feed_ids: Vec<PriceFeedId>,
publish_times: Vec<u64>,
update_data: Vec<Bytes>,
) {
require(
price_feed_ids
.len == publish_times
.len,
PythError::LengthOfPriceFeedIdsAndPublishTimesMustMatch,
);
let mut i = 0;
while i < price_feed_ids.len {
if latest_publish_time(price_feed_ids.get(i).unwrap()) < publish_times.get(i).unwrap()
{
update_price_feeds(update_data);
return;
}
i += 1;
}
}
#[storage(read)]
fn valid_time_period() -> u64 {
valid_time_period()
}
}
/// PythCore Private Functions ///
#[storage(read)]
fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price {
let price = ema_price_unsafe(price_feed_id);
require(
difference(timestamp(), price.publish_time) <= time_period,
PythError::OutdatedPrice,
);
price
}
#[storage(read)]
fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price {
let price_feed = storage.latest_price_feed.get(price_feed_id).try_read();
require(price_feed.is_some(), PythError::PriceFeedNotFound);
price_feed.unwrap().ema_price
}
#[storage(read)]
fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price {
let price = price_unsafe(price_feed_id);
require(
difference(timestamp(), price.publish_time) <= time_period,
PythError::OutdatedPrice,
);
price
}
#[storage(read)]
fn price_unsafe(price_feed_id: PriceFeedId) -> Price {
let price_feed = storage.latest_price_feed.get(price_feed_id).try_read();
require(price_feed.is_some(), PythError::PriceFeedNotFound);
price_feed.unwrap().price
}
#[storage(read)]
fn update_fee(update_data: Vec<Bytes>) -> u64 {
let mut total_number_of_updates = 0;
let mut i = 0;
while i < update_data.len {
let data = update_data.get(i).unwrap();
match UpdateType::determine_type(data) {
UpdateType::Accumulator(accumulator_update) => {
let proof_size_offset = accumulator_update.verify();
total_number_of_updates += accumulator_update.total_updates(proof_size_offset);
},
UpdateType::BatchAttestation => {
total_number_of_updates += 1;
},
}
i += 1;
}
total_fee(total_number_of_updates, storage.single_update_fee)
}
#[storage(read, write), payable]
fn update_price_feeds(update_data: Vec<Bytes>) {
require(
msg_asset_id() == BASE_ASSET_ID,
PythError::FeesCanOnlyBePaidInTheBaseAsset,
);
let mut total_number_of_updates = 0;
// let mut updated_price_feeds: Vec<PriceFeedId> = Vec::new(); // TODO: requires append for Vec
let mut i = 0;
while i < update_data.len {
let data = update_data.get(i).unwrap();
match UpdateType::determine_type(data) {
UpdateType::Accumulator(accumulator_update) => {
let (number_of_updates, _updated_ids) = accumulator_update.update_price_feeds(
current_guardian_set_index(),
storage
.wormhole_guardian_sets,
storage
.latest_price_feed,
storage
.is_valid_data_source,
);
// updated_price_feeds.append(updated_ids); // TODO: requires append for Vec
total_number_of_updates += number_of_updates;
},
UpdateType::BatchAttestation(batch_attestation_update) => {
let _updated_ids = batch_attestation_update.update_price_feeds(
current_guardian_set_index(),
storage
.wormhole_guardian_sets,
storage
.latest_price_feed,
storage
.is_valid_data_source,
);
// updated_price_feeds.append(updated_ids); // TODO: requires append for Vec
total_number_of_updates += 1;
},
}
i += 1;
}
let required_fee = total_fee(total_number_of_updates, storage.single_update_fee);
require(msg_amount() >= required_fee, PythError::InsufficientFee);
// log(UpdatedPriceFeedsEvent { // TODO: requires append for Vec
// updated_price_feeds,
// })
}
#[storage(read)]
fn valid_time_period() -> u64 {
storage.valid_time_period_seconds.read()
}
impl PythInit for Contract {
#[storage(read, write)]
fn constructor(
data_sources: Vec<DataSource>,
single_update_fee: u64,
valid_time_period_seconds: u64,
wormhole_guardian_set_upgrade: Bytes,
) {
initialize_ownership(DEPLOYER);
only_owner();
require(data_sources.len > 0, PythError::InvalidDataSourcesLength);
let mut i = 0;
while i < data_sources.len {
let data_source = data_sources.get(i).unwrap();
storage.is_valid_data_source.insert(data_source, true);
storage.valid_data_sources.push(data_source);
i += 1;
}
storage
.valid_time_period_seconds
.write(valid_time_period_seconds);
storage.single_update_fee.write(single_update_fee);
let vm = WormholeVM::parse_initial_wormhole_vm(wormhole_guardian_set_upgrade);
let upgrade = GuardianSetUpgrade::parse_encoded_upgrade(0, vm.payload);
storage
.wormhole_consumed_governance_actions
.insert(vm.governance_action_hash, true);
storage
.wormhole_guardian_sets
.insert(upgrade.new_guardian_set_index, upgrade.new_guardian_set);
storage
.wormhole_guardian_set_index
.write(upgrade.new_guardian_set_index);
storage
.wormhole_provider
.write(WormholeProvider::new(vm.emitter_chain_id, vm.emitter_address));
renounce_ownership();
log(ConstructedEvent {
guardian_set_index: upgrade.new_guardian_set_index,
})
}
}
impl PythInfo for Contract {
#[storage(read)]
fn valid_data_sources() -> Vec<DataSource> {
storage.valid_data_sources.load_vec()
}
#[storage(read)]
fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 {
latest_publish_time(price_feed_id)
}
#[storage(read)]
fn price_feed_exists(price_feed_id: PriceFeedId) -> bool {
match storage.latest_price_feed.get(price_feed_id).try_read() {
Some(_) => true,
None => false,
}
}
#[storage(read)]
fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed {
let price_feed = storage.latest_price_feed.get(price_feed_id).try_read();
require(price_feed.is_some(), PythError::PriceFeedNotFound);
price_feed.unwrap()
}
#[storage(read)]
fn single_update_fee() -> u64 {
storage.single_update_fee.read()
}
#[storage(read)]
fn valid_data_source(data_source: DataSource) -> bool {
data_source.is_valid(storage.is_valid_data_source)
}
}
/// PythInfo Private Functions ///
#[storage(read)]
fn latest_publish_time(price_feed_id: PriceFeedId) -> u64 {
match storage.latest_price_feed.get(price_feed_id).try_read() {
Some(price_feed) => price_feed.price.publish_time,
None => 0,
}
}
impl WormholeGuardians for Contract {
#[storage(read)]
fn current_guardian_set_index() -> u32 {
current_guardian_set_index()
}
#[storage(read)]
fn current_wormhole_provider() -> WormholeProvider {
current_wormhole_provider()
}
#[storage(read)]
fn guardian_set(index: u32) -> GuardianSet {
let stored_guardian_set = storage.wormhole_guardian_sets.get(index).try_read();
require(
stored_guardian_set
.is_some(),
PythError::GuardianSetNotFound,
);
GuardianSet::from_stored(stored_guardian_set.unwrap())
}
#[storage(read)]
fn governance_action_is_consumed(governance_action_hash: b256) -> bool {
governance_action_is_consumed(governance_action_hash)
}
#[storage(read, write)]
fn submit_new_guardian_set(encoded_vm: Bytes) {
submit_new_guardian_set(encoded_vm)
}
}
/// WormholeGuardians Private Functions ///
#[storage(read)]
fn current_guardian_set_index() -> u32 {
storage.wormhole_guardian_set_index.read()
}
#[storage(read)]
fn current_wormhole_provider() -> WormholeProvider {
storage.wormhole_provider.read()
}
#[storage(read)]
fn governance_action_is_consumed(governance_action_hash: b256) -> bool {
match storage.wormhole_consumed_governance_actions.get(governance_action_hash).try_read() {
Some(bool_) => bool_,
None => false,
}
}
#[storage(read, write)]
fn submit_new_guardian_set(encoded_vm: Bytes) {
let vm = WormholeVM::parse_and_verify_wormhole_vm(
current_guardian_set_index(),
encoded_vm,
storage
.wormhole_guardian_sets,
);
require(
vm.guardian_set_index == current_guardian_set_index(),
WormholeError::NotSignedByCurrentGuardianSet,
);
let current_wormhole_provider = current_wormhole_provider();
require(
vm.emitter_chain_id == current_wormhole_provider
.governance_chain_id,
WormholeError::InvalidGovernanceChain,
);
require(
vm.emitter_address == current_wormhole_provider
.governance_contract,
WormholeError::InvalidGovernanceContract,
);
require(
governance_action_is_consumed(vm.governance_action_hash) == false,
WormholeError::GovernanceActionAlreadyConsumed,
);
let current_guardian_set_index = current_guardian_set_index();
let upgrade = GuardianSetUpgrade::parse_encoded_upgrade(current_guardian_set_index, vm.payload);
storage
.wormhole_consumed_governance_actions
.insert(vm.governance_action_hash, true);
// Set expiry if current GuardianSet exists
let current_guardian_set = storage.wormhole_guardian_sets.get(current_guardian_set_index).try_read();
if current_guardian_set.is_some() {
let mut current_guardian_set = current_guardian_set.unwrap();
current_guardian_set.expiration_time = timestamp() + 86400;
storage
.wormhole_guardian_sets
.insert(current_guardian_set_index, current_guardian_set);
}
storage
.wormhole_guardian_sets
.insert(upgrade.new_guardian_set_index, upgrade.new_guardian_set);
storage
.wormhole_guardian_set_index
.write(upgrade.new_guardian_set_index);
log(NewGuardianSetEvent {
governance_action_hash: vm.governance_action_hash,
new_guardian_set_index: upgrade.new_guardian_set_index,
})
}

View File

@ -0,0 +1,64 @@
library;
use std::{bytes::Bytes, hash::{Hash, keccak256}};
use ::errors::PythError;
pub const MERKLE_LEAF_PREFIX = 0u8;
pub const MERKLE_NODE_PREFIX = 1u8;
fn leaf_hash(data: Bytes) -> Bytes {
let mut bytes = Bytes::new();
bytes.push(MERKLE_LEAF_PREFIX);
bytes.append(data);
let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20);
slice
}
fn node_hash(child_a: Bytes, child_b: Bytes) -> Bytes {
let mut bytes = Bytes::with_capacity(41);
bytes.push(MERKLE_NODE_PREFIX);
let a: b256 = child_a.into();
let b: b256 = child_b.into();
if a > b {
bytes.append(child_b);
bytes.append(child_a);
} else {
bytes.append(child_a);
bytes.append(child_b);
}
let (slice, _) = Bytes::from(keccak256(bytes)).split_at(20);
slice
}
pub fn validate_proof(
encoded_proof: Bytes,
leaf_data: Bytes,
ref mut proof_offset: u64,
root: Bytes,
) -> u64 {
let mut current_digest = leaf_hash(leaf_data);
let proof_size = encoded_proof.get(proof_offset).unwrap().as_u64();
proof_offset += 1;
let mut i = 0;
while i < proof_size {
let (_, slice) = encoded_proof.split_at(proof_offset);
let (sibling_digest, _) = slice.split_at(20);
proof_offset += 20;
current_digest = node_hash(current_digest, sibling_digest);
i += 1;
}
// TODO: investigate failing require statement on the accumulator update path.
// require(current_digest == root, PythError::InvalidProof);
proof_offset
}

View File

@ -0,0 +1,21 @@
library;
pub fn difference(x: u64, y: u64) -> u64 {
if x > y { x - y } else { y - x }
}
pub fn absolute_of_exponent(exponent: u32) -> u32 {
if exponent == 0u32 {
exponent
} else {
u32::max() - exponent + 1
}
}
#[storage(read)]
pub fn total_fee(
total_number_of_updates: u64,
single_update_fee: StorageKey<u64>,
) -> u64 {
total_number_of_updates * single_update_fee.read()
}

View File

@ -0,0 +1,8 @@
[project]
authors = ["Fuel Labs <contact@fuel.sh>"]
entry = "interface.sw"
license = "Apache-2.0"
name = "pyth_interface"
[dependencies]
src5 = { git = "https://github.com/FuelLabs/sway-standards", tag = "v0.3.3" }

View File

@ -0,0 +1,5 @@
library;
pub mod data_source;
pub mod price;
pub mod wormhole_light;

View File

@ -0,0 +1,6 @@
library;
pub struct DataSource {
chain_id: u16,
emitter_address: b256,
}

View File

@ -0,0 +1,35 @@
library;
// A price with a degree of uncertainty, represented as a price +- a confidence interval.
//
// The confidence interval roughly corresponds to the standard error of a normal distribution.
// Both the price and confidence are stored in a fixed-point numeric representation,
// `x * (10^expo)`, where `expo` is the exponent.
//
// Please refer to the documentation at https://docs.pyth.network/documentation/pythnet-price-feeds/best-practices for how
// to how this price safely.
pub struct Price {
// Confidence interval around the price
confidence: u64,
// Price exponent
// This value represents the absolute value of an i32 in the range -255 to 0. Values other than 0, should be considered negative:
// exponent of 5 means the Pyth Price exponent was -5
exponent: u32,
// Price
price: u64,
// The TAI64 timestamp describing when the price was published
publish_time: u64,
}
// The `PriceFeedId` type is an alias for `b256` that represents the id for a specific Pyth price feed.
pub type PriceFeedId = b256;
// PriceFeed represents a current aggregate price from Pyth publisher feeds.
pub struct PriceFeed {
// Latest available exponentially-weighted moving average price
ema_price: Price,
// The price ID.
id: PriceFeedId,
// Latest available price
price: Price,
}

View File

@ -0,0 +1,11 @@
library;
pub struct GuardianSet {
expiration_time: u64,
keys: Vec<b256>,
}
pub struct WormholeProvider {
governance_chain_id: u16,
governance_contract: b256,
}

View File

@ -0,0 +1,308 @@
library;
pub mod data_structures;
use ::data_structures::{
data_source::DataSource,
price::{
Price,
PriceFeed,
PriceFeedId,
},
wormhole_light::{
GuardianSet,
WormholeProvider,
},
};
use std::{bytes::Bytes, storage::storage_vec::*};
abi PythCore {
/// This function returns the exponentially-weighted moving average price and confidence interval.
///
/// # Arguments
///
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
///
/// # Reverts
///
/// * When the EMA price is not available.
#[storage(read)]
fn ema_price(price_feed_id: PriceFeedId) -> Price;
/// This function Returns the exponentially-weighted moving average price that is no older than `time` seconds
/// from the current time.
///
/// # Additional Information
///
/// This function is a sanity-checked version of `ema_price_unsafe` which is useful in
/// applications that require a sufficiently-recent price.
///
/// # Arguments
///
/// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time.
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
///
/// # Reverts
///
/// * When the EMA price is not available.
/// * When the the EMA price wasn't updated recently enough.
#[storage(read)]
fn ema_price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price;
/// This function returns the exponentially-weighted moving average price of a price feed without any sanity checks.
///
/// # Additional Information
///
/// This function returns the same price as `ema_price` in the case where the price is available.
/// However, if the price is not recent this function returns the latest available price.
///
/// The returned price can be from arbitrarily far in the past; this function makes no guarantees that
/// the returned price is recent or useful for any particular application.
///
/// Users of this function should check the `publish_time` in the `Price` to ensure that the returned price is
/// sufficiently recent for their application. If you are considering using this function, it may be
/// safer / easier to use either `ema_price` or `ema_price_no_older_than`.
///
/// # Arguments
///
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
#[storage(read)]
fn ema_price_unsafe(price_feed_id: PriceFeedId) -> Price;
/// This function parses `update_data` and returns price feeds of the given `price_feed_ids` if they are all published
/// within `min_publish_time` and `max_publish_time`.
///
/// # Additional Information
///
/// You can use this method if you want to use a Pyth price at a fixed time and not the most recent price;
/// otherwise, please consider using `update_price_feeds`. This method does not store the price updates on-chain.
///
/// This method requires the caller to pay a fee in wei; the required fee can be computed by calling
/// `update_fee`.
///
/// # Arguments
///
/// * `max_publish_time`: [u64] - The maximum acceptable `publish_time` for the given `price_feed_ids`.
/// * `min_publish_time`: [u64] - The minimum acceptable `publish_time` for the given `price_feed_ids`.
/// * `price_feed_ids`: [Vec<PriceFeedId>] - The ids of the price feeds to return PriceFeed data for.
/// * `update_data`: [Bytes] - The price update data.
///
/// # Returns
///
/// * [u64] - The number of hashes performed.
///
/// # Reverts
///
/// * When the transferred fee is not sufficient
/// * When the update_data is invalid
/// * When there is no update for any of the given `priceIds` within the given time range.
#[storage(read), payable]
fn parse_price_feed_updates(
max_publish_time: u64,
min_publish_time: u64,
price_feed_ids: Vec<PriceFeedId>,
update_data: Vec<Bytes>,
) -> Vec<PriceFeed>;
/// This function returns the price and confidence interval.
///
/// # Additional Information
///
/// This function also has some complex behaviours.
///
/// # Arguments
///
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
///
/// # Reverts
///
/// * When the price has not been updated within the last valid time period.
#[storage(read)]
fn price(price_feed_id: PriceFeedId) -> Price;
/// This function returns the price that is no older than `time` seconds of the current time.
///
/// # Additional Information
///
/// This function is a sanity-checked version of `price_unsafe` which is useful in applications that require a
/// sufficiently-recent price. Reverts if the price wasn't updated sufficiently recently.
///
/// # Arguments
///
/// * `time_period`: [u64] - The period (in seconds) that a price feed is considered valid since its publish time.
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
///
/// # Reverts
///
/// * When the price is not available.
/// * When the price wasn't updated recently enough.
#[storage(read)]
fn price_no_older_than(time_period: u64, price_feed_id: PriceFeedId) -> Price;
/// This function returns the price of a price feed without any sanity checks.
///
/// # Additional Information
///
/// This function returns the most recent price update in this contract without any recency checks.
/// This function is unsafe as the returned price update may be arbitrarily far in the past.
///
/// Users of this function should check the `publish_time` in the price to ensure that the returned price is
/// sufficiently recent for their application. If you are considering using this function, it may be
/// safer / easier to use either `getPrice` or `price_no_older_than`.
///
/// # Arguments
///
/// * `price_feed_id`: [PriceFeedId] - The Pyth Price Feed ID of which to fetch the EMA price and confidence interval.
///
/// # Returns
///
/// * [Price] - Please read the documentation of data_structures::price to understand how to use this safely.
#[storage(read)]
fn price_unsafe(price_feed_id: PriceFeedId) -> Price;
/// This function returns the required fee in Wei to update an array of price updates.
///
/// # Arguments
///
/// * `update_data`: [Bytes] - The price update data.
///
/// # Returns
///
/// * [u64] - The required fee in Wei.
#[storage(read)]
fn update_fee(update_data: Vec<Bytes>) -> u64;
/// This function updates price feeds with the given update messages.
///
/// # Additional Information
///
/// This function requires the caller to pay a fee in wei; the required fee can be computed by calling
/// `update_fee`.
/// Prices will be updated if they are more recent than the current stored prices.
/// The call will succeed even if the update is not the most recent.
///
/// # Arguments
///
/// * `update_data`: [Bytes] - The price update data.
///
/// # Reverts
///
/// * When the transferred fee is not sufficient.
/// * When the `update_data` is invalid.
#[storage(read, write), payable]
fn update_price_feeds(update_data: Vec<Bytes>);
/// This function is a wrapper around `update_price_feeds` that reverts fast if a price update is not necessary.
///
/// # Additional Information
///
/// A price update is necessary if the current on-chain `publish_time` is older than the given `publish_time`. It relies solely on the
/// given `publish_time` for the price feeds and does not read the actual price update publish time within `update_data`.
///
/// This method requires the caller to pay a fee in wei; the required fee can be computed by calling
/// `update_fee`.
///
/// `price_feed_ids` and `publish_times` are two arrays with the same size that correspond to senders known `publish_time`
/// of each PriceFeedId when calling this method. If all of price feeds within `price_feed_ids` have updated and have
/// a newer or equal publish time than the given publish time, it will reject the transaction to save gas.
/// Otherwise, it calls `update_price_feeds` to update the prices.
///
/// # Arguments
///
/// * `price_feed_ids`: [Vec<PriceFeedId>] - Vector of price feed ids; `price_feed_ids[i]` corresponds to known price feed id of `publish_times[i]`.
/// * `publish_times`: [Vec<u64>] - Vector of publish times; `publish_times[i]` corresponds to known publish time of `price_feed_ids[i]`.
/// * `update_data`: [Bytes] - The price update data.
///
///
/// # Reverts
///
/// * When update is not necessary.
/// * When the transferred fee is not sufficient.
/// * When the `update_data` is invalid.
#[storage(read, write), payable]
fn update_price_feeds_if_necessary(
price_feed_ids: Vec<PriceFeedId>,
publish_times: Vec<u64>,
update_data: Vec<Bytes>,
);
/// This function returns the period (in seconds) that a price feed is considered valid since its publish time.
///
/// # Returns
///
/// * [u64] - The period (in seconds) that a price feed is considered valid since its publish time.
#[storage(read)]
fn valid_time_period() -> u64;
}
abi PythInit {
#[storage(read, write)]
fn constructor(
data_sources: Vec<DataSource>,
single_update_fee: u64,
valid_time_period_seconds: u64,
wormhole_guardian_set_upgrade: Bytes,
);
}
abi PythInfo {
#[storage(read)]
fn latest_publish_time(price_feed_id: PriceFeedId) -> u64;
/// @notice Returns true if a price feed with the given id exists.
/// @param price_feed_id The Pyth Price Feed ID of which to check its existence.
#[storage(read)]
fn price_feed_exists(price_feed_id: PriceFeedId) -> bool;
/// @notice Returns the price feed with given id.
/// @dev Reverts if the price does not exist.
/// @param price_feed_id The Pyth Price Feed ID of which to fetch the PriceFeed.
#[storage(read)]
fn price_feed_unsafe(price_feed_id: PriceFeedId) -> PriceFeed;
#[storage(read)]
fn single_update_fee() -> u64;
#[storage(read)]
fn valid_data_source(data_source: DataSource) -> bool;
#[storage(read)]
fn valid_data_sources() -> Vec<DataSource>;
}
abi WormholeGuardians {
#[storage(read)]
fn current_guardian_set_index() -> u32;
#[storage(read)]
fn current_wormhole_provider() -> WormholeProvider;
#[storage(read)]
fn governance_action_is_consumed(hash: b256) -> bool;
#[storage(read)]
fn guardian_set(index: u32) -> GuardianSet;
#[storage(read, write)]
fn submit_new_guardian_set(vm: Bytes);
}

View File

@ -0,0 +1,63 @@
use fuels::{
prelude::{Address, Provider, WalletUnlocked},
types::Bits256,
};
use pyth_sdk::{constants::BETA_5_URL, pyth_utils::guardian_set_upgrade_4_vaa};
use pyth_sdk::{
constants::{
BTC_USD_PRICE_FEED_ID, DEFAULT_VALID_TIME_PERIOD, ETH_USD_PRICE_FEED_ID,
USDC_USD_PRICE_FEED_ID,
},
pyth_utils::{update_data_bytes, Pyth},
};
#[tokio::main]
async fn main() {
dotenv::dotenv().ok();
println!("🔮 Testnet Pyth deploy action");
let provider = Provider::connect(BETA_5_URL).await.unwrap();
let admin_pk = std::env::var("ADMIN").expect("ADMIN environment variable missing");
let admin =
WalletUnlocked::new_from_private_key(admin_pk.parse().unwrap(), Some(provider.clone()));
println!("Admin address = 0x{}\n", Address::from(admin.address()));
let pyth = Pyth::deploy(admin).await.unwrap();
let _ = pyth
.constructor(DEFAULT_VALID_TIME_PERIOD, guardian_set_upgrade_4_vaa())
.await
.unwrap();
//check GS
let gsi = pyth.current_guardian_set_index().await.unwrap().value;
println!("gsi: {:?}", gsi);
let update_data = update_data_bytes(None).await.unwrap();
let fee = pyth.update_fee(&update_data).await.unwrap().value;
//print fee
println!("fee: {:?}", fee);
let btc_price_feed = Bits256::from_hex_str(BTC_USD_PRICE_FEED_ID).unwrap();
let eth_price_feed = Bits256::from_hex_str(ETH_USD_PRICE_FEED_ID).unwrap();
let usdc_price_feed = Bits256::from_hex_str(USDC_USD_PRICE_FEED_ID).unwrap();
let _ = pyth.update_price_feeds(fee, &update_data).await.unwrap();
println!("Pyth address = 0x{:?}\n", pyth.instance.contract_id().hash);
println!(
"BTC price {:?}",
pyth.price(btc_price_feed).await.unwrap().value
);
println!(
"ETH price {:?}",
pyth.price(eth_price_feed).await.unwrap().value
);
println!(
"USDC price {:?}",
pyth.price(usdc_price_feed).await.unwrap().value
);
}

View File

@ -0,0 +1,115 @@
use crate::pyth_utils::{Price, PriceFeed};
use fuels::types::Bits256;
pub const BETA_5_URL: &str = "beta-5.fuel.network";
pub const BETA_5_PYTH_CONTRACT_ID: &str =
"0xe69daeb9fcf4c536c0fe402403b4b9e9822cc8b1f296e5d754be12cc384554c5";
pub const PYTH_CONTRACT_BINARY_PATH: &str = "./pyth-contract/out/debug/pyth-contract.bin";
pub const DEFAULT_SINGLE_UPDATE_FEE: u64 = 1;
pub const DEFAULT_VALID_TIME_PERIOD: u64 = 60;
pub const GUARDIAN_SET_UPGRADE_3_VAA: &str =
"01000000020d00ce45474d9e1b1e7790a2d210871e195db53a70ffd6f237cfe70e2686a32859ac43c84a332267a8ef66f59719cf91cc8df0101fd7c36aa1878d5139241660edc0010375cc906156ae530786661c0cd9aef444747bc3d8d5aa84cac6a6d2933d4e1a031cffa30383d4af8131e929d9f203f460b07309a647d6cd32ab1cc7724089392c000452305156cfc90343128f97e499311b5cae174f488ff22fbc09591991a0a73d8e6af3afb8a5968441d3ab8437836407481739e9850ad5c95e6acfcc871e951bc30105a7956eefc23e7c945a1966d5ddbe9e4be376c2f54e45e3d5da88c2f8692510c7429b1ea860ae94d929bd97e84923a18187e777aa3db419813a80deb84cc8d22b00061b2a4f3d2666608e0aa96737689e3ba5793810ff3a52ff28ad57d8efb20967735dc5537a2e43ef10f583d144c12a1606542c207f5b79af08c38656d3ac40713301086b62c8e130af3411b3c0d91b5b50dcb01ed5f293963f901fc36e7b0e50114dce203373b32eb45971cef8288e5d928d0ed51cd86e2a3006b0af6a65c396c009080009e93ab4d2c8228901a5f4525934000b2c26d1dc679a05e47fdf0ff3231d98fbc207103159ff4116df2832eea69b38275283434e6cd4a4af04d25fa7a82990b707010aa643f4cf615dfff06ffd65830f7f6cf6512dabc3690d5d9e210fdc712842dc2708b8b2c22e224c99280cd25e5e8bfb40e3d1c55b8c41774e287c1e2c352aecfc010b89c1e85faa20a30601964ccc6a79c0ae53cfd26fb10863db37783428cd91390a163346558239db3cd9d420cfe423a0df84c84399790e2e308011b4b63e6b8015010ca31dcb564ac81a053a268d8090e72097f94f366711d0c5d13815af1ec7d47e662e2d1bde22678113d15963da100b668ba26c0c325970d07114b83c5698f46097010dc9fda39c0d592d9ed92cd22b5425cc6b37430e236f02d0d1f8a2ef45a00bde26223c0a6eb363c8b25fd3bf57234a1d9364976cefb8360e755a267cbbb674b39501108db01e444ab1003dd8b6c96f8eb77958b40ba7a85fefecf32ad00b7a47c0ae7524216262495977e09c0989dd50f280c21453d3756843608eacd17f4fdfe47600001261025228ef5af837cb060bcd986fcfa84ccef75b3fa100468cfd24e7fadf99163938f3b841a33496c2706d0208faab088bd155b2e20fd74c625bb1cc8c43677a0163c53c409e0c5dfa000100000000000000000000000000000000000000000000000000000000000000046c5a054d7833d1e42000000000000000000000000000000000000000000000000000000000436f7265020000000000031358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd15e7caf07c4e3dc8e7c469f92c8cd88fb8005a2074a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d";
pub const UPGRADE_3_VAA_GOVERNANCE_ACTION_HASH: Bits256 = Bits256([
217, 239, 119, 23, 11, 244, 8, 47, 149, 67, 246, 0, 76, 60, 57, 207, 198, 14, 21, 100, 172,
111, 192, 147, 192, 75, 95, 51, 126, 151, 234, 51,
]);
pub const GUARDIAN_SET_UPGRADE_4_VAA: &str =
"01000000030d03d4a37a6ff4361d91714730831e9d49785f61624c8f348a9c6c1d82bc1d98cadc5e936338204445c6250bb4928f3f3e165ad47ca03a5d63111168a2de4576856301049a5df10464ea4e1961589fd30fc18d1970a7a2ffaad617e56a0f7777f25275253af7d10a0f0f2494dc6e99fc80e444ab9ebbbee252ded2d5dcb50cbf7a54bb5a01055f4603b553b9ba9e224f9c55c7bca3da00abb10abd19e0081aecd3b352be061a70f79f5f388ebe5190838ef3cd13a2f22459c9a94206883b739c90b40d5d74640006a8fade3997f650a36e46bceb1f609edff201ab32362266f166c5c7da713f6a19590c20b68ed3f0119cb24813c727560ede086b3d610c2d7a1efa66f655bad90900080f5e495a75ea52241c59d145c616bfac01e57182ad8d784cbcc9862ed3afb60c0983ccbc690553961ffcf115a0c917367daada8e60be2cbb8b8008bac6341a8c010935ab11e0eea28b87a1edc5ccce3f1fac25f75b5f640fe6b0673a7cd74513c9dc01c544216cf364cc9993b09fda612e0cd1ced9c00fb668b872a16a64ebb55d27010ab2bc39617a2396e7defa24cd7c22f42dc31f3c42ffcd9d1472b02df8468a4d0563911e8fb6a4b5b0ce0bd505daa53779b08ff660967b31f246126ed7f6f29a7e000bdb6d3fd7b33bdc9ac3992916eb4aacb97e7e21d19649e7fa28d2dd6e337937e4274516a96c13ac7a8895da9f91948ea3a09c25f44b982c62ce8842b58e20c8a9000d3d1b19c8bb000856b6610b9d28abde6c35cb7705c6ca5db711f7be96d60eed9d72cfa402a6bfe8bf0496dbc7af35796fc768da51a067b95941b3712dce8ae1e7010ec80085033157fd1a5628fc0c56267469a86f0e5a66d7dede1ad4ce74ecc3dff95b60307a39c3bfbeedc915075070da30d0395def9635130584f709b3885e1bdc0010fc480eb9ee715a2d151b23722b48b42581d7f4001fc1696c75425040bfc1ffc5394fe418adb2b64bd3dc692efda4cc408163677dbe233b16bcdabb853a20843301118ee9e115e1a0c981f19d0772b850e666591322da742a9a12cce9f52a5665bd474abdd59c580016bee8aae67fdf39b315be2528d12eec3a652910e03cc4c6fa3801129d0d1e2e429e969918ec163d16a7a5b2c6729aa44af5dccad07d25d19891556a79b574f42d9adbd9e2a9ae5a6b8750331d2fccb328dd94c3bf8791ee1bfe85aa00661e99781981faea00010000000000000000000000000000000000000000000000000000000000000004fd4c6c55ec8dfd342000000000000000000000000000000000000000000000000000000000436f726502000000000004135893b5a76c3f739645648885bdccc06cd70a3cd3ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd15e7caf07c4e3dc8e7c469f92c8cd88fb8005a2074a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d";
/*
You can find the ids of prices at https://pyth.network/developers/price-feed-ids#pyth-evm-mainnet
TEST_BATCH_UPDATE_DATA is the corresponding update data for an update of the following price feeds prior to Pyth's Hermes service upgrade,
TEST_ACCUMULATOR_UPDATE_DATA is the corresponding update data for an update of the following price feeds after to Pyth's Hermes service upgrade,
*/
pub const ETH_USD_PRICE_FEED_ID: &str =
"ff61491a931112ddf1bd8147cd1b641375f79f5825126d665480874634fd0ace";
pub const USDC_USD_PRICE_FEED_ID: &str =
"eaa020c61cc479712813461ce153894a96a6c00b21ed0cfc2798d1f9a9e9c94a";
pub const BTC_USD_PRICE_FEED_ID: &str =
"e62df6c8b4a85fe1a67db44dc12de5db330f7ac66b72dc658afedf0f4a415b43";
pub const UNI_USD_PRICE_FEED_ID: &str =
"0x78d185a741d07edb3412b09008b7c5cfb9bbbd7d568bf00ba737b456ba171501";
pub const TEST_EXTENDED_TIME_PERIOD: u64 = 3_156_000_000;
pub const TEST_BATCH_UPDATE_DATA: [&str; 2] = [
"01000000030d001b51e4d946d35c7e77f340611d0047b11a46366c21743aa1ab3485c5dc8d5ba50ddbb50072711d6e324357d0b46147e836c46d7127b1bdcea00e63bd6864d0b40002591ec14b695200a82e701088db6d24f9704094e9bf60751d96d21d5cd858c16c5371555bf86fa59055ca718cd27be45a6a0db16b471429442e5b7353aa0755120003b6d17e1c2131a136252c0cfeb846db6f997bf7da30fb27dc285de7406dfeaed97bdd63bef8042fa3455668d1f080ad2b244965754b06b097b13eccd000e46b6601047fd025f1e5e44fcef994c0c029c6c60a9ea2cec9c022e13aca5386d0fb7e084f48a03d1b85435860bf7dde210c0b2a2005707d4969307a05c42ef6c8c1fd113701081e65c223a9384526322fe3bcbddf03c35957ad448b31d7ab0575e164ba7d0e6d2394730d94ca133b1a297ee6a3592239a5fc26217c959a8e1167328a364664360109b76655ee19d1910c10acbde342be029012c5e5e133fd0dbdd3b19202c44131e1611fd8c58b7843e724ef18120bada8526a71587e5833c99d0c1ee5f8580b3c80010aba9cce6c143e232a2014d580d1d3ab6a679981cfd2c840e28a6e51b01f6d64f73e25bdfc9c63cab59e952519edda2c93ffd601ee89da8dd209409ce9bd4adea3010b939cb7c58868161e2822b6f8baef4b78c309c2e2cbec4dd7386fa2631843b5a231c394ba40ac2acece38469a2c668ac3c17d8c0f7cf67e03d2ea6694fe41f924010ce831c72700cb7eac40fbbe26dcec71b825fa12ed5235f629c075153f53fbcf3b58dcfabd84630cd53db705f101f2b74053b2a99f2ea6e40d243c99cb058424b9010d5855a6d1545239ed62b2cb4f1d3eb05b5da87b19b0ccc42a8a00429823a6527558d3378eab625d8300f5dbd4a82efe6aa01cc5f30c92a48c62881a87c1c970cc010e961968172f030819b64f45b7291b10b7463ef35c2aafc5fe09cfa1ea1657c354789e7f58292873058b47698b97335727da139513a1d1648a2a97fd4992da3b950110876b85ed65413cd49769b5d89fd17006fa2bd6e587de87002304c292b720862f4ee734acbd764e32737875df7904d582ba8a36675aad9b5a03559208e7801e300012689def292c14780b6472950c9108ab89e8f96e59a678bad94299636f02942ab87a63d2a25ed7bf4cd734ff0b6c151addbafc717c1eacd63d777b154b2d4a451d016509ac2200000000001af8cd23c2ab91237730770bbea08d61005cdda0984348f3f6eecb559638c0bba00000000027b2eea40150325748000300010001020005009d04028fba493a357ecde648d51375a445ce1cb9681da1ea11e562b53522a5d3877f981f906d7cfe93f618804f1de89e0199ead306edc022d3230b3e8305f391b0000000262d381a2d000000000d27fd0ffffffff800000026402959a80000000008e750f8010000000c0000000f000000006509ac22000000006509ac22000000006509ac21000000262d381a2d000000000d27fd0f000000006509ac20e6c020c1a15366b779a8c870e065023657c88c82b82d58a9fe856896a4034b0415ecddd26d49e1a8f1de9376ebebc03916ede873447c1255d2d5891b92ce57170000002813f5221f000000000eb8d8e0fffffff8000000282adca8a0000000000d0922e6010000000a0000000d000000006509ac22000000006509ac22000000006509ac210000002813f3b3e9000000000eb76aaa000000006509ac20c67940be40e0cc7ffaa1acb08ee3fab30955a197da1ec297ab133d4d43d86ee6ff61491a931112ddf1bd8147cd1b641375f79f5825126d665480874634fd0ace0000002634584af800000000042d0d26fffffff80000002646b141780000000003885c6e010000001c00000020000000006509ac22000000006509ac22000000006509ac2100000026344f4b2c0000000003f7ac6e000000006509ac218d7c0971128e8a4764e757dedb32243ed799571706af3a68ab6a75479ea524ff846ae1bdb6300b817cee5fdee2a6da192775030db5615b94a465f53bd40850b50000002630681286000000000db7d7d4fffffff80000002642d956a8000000000a25111c010000000d0000000e000000006509ac22000000006509ac21000000006509ac210000002630681286000000000bd82220000000006509ac20543b71a4c292744d3fcf814a2ccda6f7c00f283d457f83aa73c41e9defae034ba0255134973f4fdf2f8f7808354274a3b1ebc6ee438be898d045e8b56ba1fe1300000000000000000000000000000000fffffff800000000000000000000000000000000000000000400000008000000006509ac22000000006509ac210000000000000000000000000000000000000000000000000000000000000000",
"01000000030d0022aaaa4bd962a5d265c95bf56be1940d78d10c4b06a8e6ca96cf45dae7f9ddc03790489a5d223fa491cfd90b2d8e4be190be02beefa0387593205c8c8e3f238f0102c509f97882083f6a960a01ff11e55c729c7c1e2c74dba40d5a3cd4cc7f2afcc4491032211d48dba44f7ce4fbd91c1eb0845083f961519bb50af3913159c55e910003da8bdfb35391a072450e03fa019f3b9fdc2971ebf55b24d334956097b863686e4be02b244570a91088433b8af9d774ea22659079cc62bc97bd15486c488bca55010455bbc59a4463198852702cc92fb40af92b440bd63ec119d96663dffe13f4204a5e41b7711a996852ea0a8ff6ddb5679078de0fd257dbd9246effaf921da951f400083363c6ba4a683820a8246485daa82cdb76571e01cc52d31c8198b921102da4850da23eda6b080b8b9b50981e8a042037a2eb081ab2cd0560b1419843c7b722740109ead0fe09278b8f9a64647323734fc72c061565145a137d31eb988a4847bcbdca2168f245506581438c08fad54a9b4e3b82441a8784286d9d388cb20acc84956a010a64ff6a9138bbaa73290ee69676b6f0c29ce546185eee4e241c290af271a7c9690b6320a16b33a5e11a8906f69ac3c81b9fe564ad9bc98168407a1e4a521b8547010beb43c832acae9374910109d00c6bb163ef67ac252f13486472874ff0415346ae6cfbb6899cfba138f0d932574de9d44f8e03d9e07907b5d16c75f915e1d9be96000c123460082f0aad0f5afb570d88e9931f08f484ef307642a6ff4a7d21a628778a180db29ea06e36acd94a8a2272b9ccc05af96258a9c3cb3cb78fbe3e73d7b3a1000d9c90528b9a87ea7f095076be9290b526ee2794b44032f6f86935d335707a0b0c1566953d31e317e558766123c3904c5dbf9a0a193598c34c8852be20bf595a38000e01b5aef758c7d4124439888bed8a560dbd78e94cdc0a5e4fd39a95bab39aa64977c6b7dab96253420dd473a87c37d951f96131112e9bbc9f81b4fe56e040f51c0010cae1df6dad0a236786aae2f22668d921b9b1d09ba2ef74f0214f0a3055ec1d913fac9e1c7c33338443aeb92e16f59bbabda44da77bbbaa5766ccac2b93da119c011265e001640ed56620f638102e240716bfd66435131b370b92fb0cbb39493c4ffe417dacc48982831040ce5cfe8f3c278bd16c3bbdc8b300e53e8ddb020bc2d6fd006509ac2200000000001af8cd23c2ab91237730770bbea08d61005cdda0984348f3f6eecb559638c0bba00000000027b2ee9d0150325748000300010001020005009db0e13ce3260d884b0417c6b4d152d45b2f13991a8592522fad0068a4bce3dfbdf0d57deca57b3da2fe63a493f4c25925fdfd8edf834b20f93e1f84dbd1504d4a0000000000011fbc000000000000005ffffffff6000000000001209e0000000000000068010000001100000016000000006509ac22000000006509ac22000000006509ac210000000000011fbc000000000000005f000000006509ac218ab03cff1844ab975dcdd1683020c0599fc5392b6f2e12d5dd615bcc2c2e6d08ef0d8b6fda2ceba41da15d4095d1da392a0d2f8ed0c6c7bc0f4cfac8c280b56d0000000076e1a7a00000000000189196fffffff800000000774edb860000000000167db6010000001a00000020000000006509ac22000000006509ac22000000006509ac210000000076e18bf400000000001875ea000000006509ac21127ab385f079cf02de5a6c0bc8414267acd086fd268730caf319e86b88d2342923d7315113f5b1d3ba7a83604c44b94d79f4fd69af77f804fc7f920a6dc657440000000002ac7a6d000000000000b6cafffffff80000000002b09148000000000000840b010000001200000015000000006509ac22000000006509ac22000000006509ac210000000002ac8bb30000000000009068000000006509ac21c12e5d198c9c673e9ce03265e7d9be69cd6a0c674aabd3d2c41ff5764023e22878d185a741d07edb3412b09008b7c5cfb9bbbd7d568bf00ba737b456ba171501000000001a54d4420000000000045e4dfffffff8000000001a699208000000000004825201000000180000001d000000006509ac22000000006509ac22000000006509ac21000000001a54d44200000000000459ad000000006509ac216bfad3ab2ad6ed59591a5a77cc9b162f8e228e89ef56151b24e15426a2bb4d48eaa020c61cc479712813461ce153894a96a6c00b21ed0cfc2798d1f9a9e9c94a0000000005f5e54c0000000000005463fffffff80000000005f5e09000000000000053af01000000160000001b000000006509ac22000000006509ac22000000006509ac210000000005f5e54c0000000000005463000000006509ac21"
];
pub const TEST_ACCUMULATOR_UPDATE_DATA: &str = "UE5BVQEAAAADuAEAAAADDQHZQAkib294P0wA9HaAI75z6ADW9Deb8xoQhCQPldh3SW1cpUtnWXAPoseiiFU+GD93EUWFbiwjj8x2i4u9dVDGAAJzve5v2O//gbEkRkblLOSgNyCcgi/zMjgJCfF/XBR9igeBr9QqRn8AiJyYsVKQj2FEKjFvJB1M4qKf2AkmtEC0AQO3K1z4+7M6aSxlZOu8v37HLizCShleOXY8/R3AgwWEsD1cy6cS4lzAByc5O4MD4SXS70AlPAyLy+fyzgPjsg2sAAS8g7z6C/VZ/oRFA10H5XmIN6AtS5vtTmv5GRadDvzf43Z23XjXjCygg27qzAH2/p6pe7PsXGgEB7LZtb097W7SAAbV56MzblHOQK1wWaTnNK7YOeEbzY5/qRt2WRIWoETqRV+fiYbsAaiKvScBiWMZoG6cF1WUpqMIa6OWPHXDUoGbAAiAAb/uLRAJ0FQm9petvB5T0tA+ijPYPz1RrgymnVdw8UerbCEOO5f8oUfIbvva2piyZbDENn67wXShZnI8pm4aAQmVdkk5kCj2UGq5GbH+j9md0HjZAs3mxu0SwK6XQ/gFiFdB3ksY1epYjHFowD/cTOU22B9jem3XHGNN64LuHU+DAQrhtog708Yt+mOhnBHq5VbH0DNxUi3rrC2WDndm6fr9EHt5kvovIXGdEAKEJIy8V71cRM9JnhqEfAQgqJGBDc8NAAsMt3qBcV5O3dTfPGbQ4B7YrfYP0qBrTkCorWRHHcdzAkDUvJg8/rVe5BSgvsv85b7Wc5LRLl9D91tKn1/5Nr8lAA30nswq/fUgnM9YGgLRk9Q3VjiQM05zi1o4nHSQam+naHTUTLjUdG9muhL5D9Sm87h4KeHsSLGcm10Z3ALyTuR6AA5/3Fxn3OATaNnX9Q5zaodPWACqTjeqDdhX9nsDCCg/kVzDTwCLV8rMN/5zNIuW+6B5IxCefoHWr3wTaBDOH4pRARBQwSVI79R1oqOD9Q0I6aEw9YiZzndgICAUgm1s0XQmrnIPXILg1S6U5SBmbmasFrmuQjzyT2bfQEFepdSkBtL4ARIJBhwqP7cCcN54bRO8BFMf9uggBwefvAOItjHlmeD6cyCd/AdNazo+yzxQ8OySX1fSV/KWR3HgZAa9sFiqQkioAGX3kJ8AAAAAABrhAfrtrFhR4yubI7X5QRqMK6xKrj7U3XuBHdGnLqSqcQAAAAACt7PmAUFVV1YAAAAAAAfA4lsAACcQFs+MLyt9WTDbgrvDKemEA77GspsCAFUA/2FJGpMREt3xvYFHzRtkE3X3n1glEm1mVICHRjT9Cs4AAABTdXi3FQAAAAAbBdyL////+AAAAABl95CfAAAAAGX3kJ4AAABUeMdMYAAAAAASjyFCCks8cVh/PJ9xfgCM9x3FsIMJFnVjEojlpG4imZK2XyO5bhjP67ysp68K0jfHqXjoX7/R/9iprYZp9MU1kFh3kyVHUfL13SuiDfJ7x488z0p+mmzfGV82Ulra0F+wmHjnxOjmL3nvCVi1kpNBqIyxTT4C2LO6A2bzQDLc8lu//y0pCITlbnUPUX8kEURLGMKLvl4AZCouKhEf96WVLn4J0afYAg4rr0e2N5cE8Nrc9eFIRbfhPtf+Tq3bZD2Cz195wosYubVUAhrqAFUA6qAgxhzEeXEoE0Yc4VOJSpamwAsh7Qz8J5jR+anpyUoAAAAABfZfVQAAAAAAASks////+AAAAABl95CfAAAAAGX3kJ4AAAAABfYthQAAAAAAAQapCsekl7cD3r9Brv2cv2oRm84XAtQ1lrUyNMk8O0VT1BZIvTc7XvsLBV7yjN9+xITbLFqrASPD1onPE2frSTSVfSAWUEUBg/PldOce8OVmbQYhbysX8dzRdX9qDVlMc5wNYrzmKVINKXl+6z85upK8FFj9yD+civPT6E3qQUn0Zun7naDPhIRVDsnlP8vSGMKLvl4AZCouKhEf96WVLn4J0afYAg4rr0e2N5cE8Nrc9eFIRbfhPtf+Tq3bZD2Cz195wosYubVUAhrq";
// price feeds from Pyth network at the time of the TEST_BATCH_UPDATE_DATA
pub const TEST_BATCH_ETH_USD_PRICE_FEED: PriceFeed = PriceFeed {
ema_price: Price {
confidence: 59268206,
exponent: 8,
price: 164394779000,
publish_time: 4611686020122520610,
},
id: Bits256([
255, 97, 73, 26, 147, 17, 18, 221, 241, 189, 129, 71, 205, 27, 100, 19, 117, 247, 159, 88,
37, 18, 109, 102, 84, 128, 135, 70, 52, 253, 10, 206,
]),
price: Price {
confidence: 70061350,
exponent: 8,
price: 164086958840,
publish_time: 4611686020122520610,
},
};
pub const TEST_BATCH_USDC_USD_PRICE_FEED: PriceFeed = PriceFeed {
ema_price: Price {
confidence: 21423,
exponent: 8,
price: 99999888,
publish_time: 4611686020122520610,
},
id: Bits256([
234, 160, 32, 198, 28, 196, 121, 113, 40, 19, 70, 28, 225, 83, 137, 74, 150, 166, 192, 11,
33, 237, 12, 252, 39, 152, 209, 249, 169, 233, 201, 74,
]),
price: Price {
confidence: 21603,
exponent: 8,
price: 100001100,
publish_time: 4611686020122520610,
},
};
// price feeds from Pyth network at the time of the TEST_ACCUMULATOR_UPDATE_DATA
pub const TEST_ACCUMULATOR_ETH_USD_PRICE_FEED: PriceFeed = PriceFeed {
ema_price: Price {
confidence: 311370050,
exponent: 8,
price: 362803580000,
publish_time: 4611686020138111135,
},
id: Bits256([
255, 97, 73, 26, 147, 17, 18, 221, 241, 189, 129, 71, 205, 27, 100, 19, 117, 247, 159, 88,
37, 18, 109, 102, 84, 128, 135, 70, 52, 253, 10, 206,
]),
price: Price {
confidence: 453368971,
exponent: 8,
price: 358453131029,
publish_time: 4611686020138111135,
},
};
pub const TEST_ACCUMULATOR_USDC_USD_PRICE_FEED: PriceFeed = PriceFeed {
ema_price: Price {
confidence: 67241,
exponent: 8,
price: 100019589,
publish_time: 4611686020138111135,
},
id: Bits256([
234, 160, 32, 198, 28, 196, 121, 113, 40, 19, 70, 28, 225, 83, 137, 74, 150, 166, 192, 11,
33, 237, 12, 252, 39, 152, 209, 249, 169, 233, 201, 74,
]),
price: Price {
confidence: 76076,
exponent: 8,
price: 100032341,
publish_time: 4611686020138111135,
},
};

View File

@ -0,0 +1,2 @@
pub mod constants;
pub mod pyth_utils;

View File

@ -0,0 +1,233 @@
use crate::constants::{
BTC_USD_PRICE_FEED_ID, DEFAULT_SINGLE_UPDATE_FEE, ETH_USD_PRICE_FEED_ID,
GUARDIAN_SET_UPGRADE_3_VAA, GUARDIAN_SET_UPGRADE_4_VAA, PYTH_CONTRACT_BINARY_PATH,
TEST_ACCUMULATOR_UPDATE_DATA, TEST_BATCH_UPDATE_DATA, UNI_USD_PRICE_FEED_ID,
USDC_USD_PRICE_FEED_ID,
};
use base64::{
engine::general_purpose,
prelude::{Engine, BASE64_STANDARD},
};
use fuels::{
prelude::{abigen, CallParameters, Contract, LoadConfiguration, TxPolicies, WalletUnlocked},
programs::call_response::FuelCallResponse,
types::{errors::Error, Address, Bits256, Bytes, Identity},
};
use rand::Rng;
use reqwest;
use serde_json;
use std::path::PathBuf;
abigen!(Contract(
name = "PythOracleContract",
abi = "pyth-contract/out/debug/pyth-contract-abi.json"
));
pub struct Pyth {
pub instance: PythOracleContract<WalletUnlocked>,
pub wallet: WalletUnlocked,
}
pub async fn update_data_bytes(
price_feed_ids: Option<Vec<&str>>,
) -> Result<Vec<Bytes>, Box<dyn std::error::Error>> {
let c = reqwest::Client::new();
let price_feed_ids = price_feed_ids.unwrap_or_else(|| {
vec![
ETH_USD_PRICE_FEED_ID,
USDC_USD_PRICE_FEED_ID,
BTC_USD_PRICE_FEED_ID,
UNI_USD_PRICE_FEED_ID,
]
});
let mut ids_query_part = String::new();
for (index, id) in price_feed_ids.iter().enumerate() {
if index > 0 {
ids_query_part.push('&');
}
ids_query_part.push_str(&format!("ids[]={}", id));
}
let req_url = format!(
"https://hermes.pyth.network/api/latest_vaas?{}",
ids_query_part
);
let body = c.get(&req_url).send().await?.text().await?;
let response: Vec<&str> = serde_json::from_str(&body)?;
let bytes_data: Vec<Bytes> = response
.iter()
.map(|data| {
Bytes(
general_purpose::STANDARD
.decode::<&str>(data)
.unwrap()
.to_owned(),
)
})
.collect();
Ok(bytes_data)
}
pub fn test_batch_update_data_bytes() -> Vec<Bytes> {
TEST_BATCH_UPDATE_DATA
.iter()
.map(|update| Bytes(hex::decode(update).unwrap()))
.collect()
}
pub fn test_accumulator_update_data_bytes() -> Vec<Bytes> {
vec![Bytes(
BASE64_STANDARD
.decode(TEST_ACCUMULATOR_UPDATE_DATA)
.unwrap(),
)]
}
impl Pyth {
pub async fn price(&self, price_feed_id: Bits256) -> Result<FuelCallResponse<Price>, Error> {
self.instance
.methods()
.price(price_feed_id)
.simulate()
.await
}
pub async fn update_price_feeds(
&self,
fee: u64,
update_data: &[Bytes],
) -> Result<FuelCallResponse<()>, Error> {
self.instance
.methods()
.update_price_feeds(update_data.to_vec())
.call_params(CallParameters::default().with_amount(fee))?
.call()
.await
}
pub async fn update_fee(&self, update_data: &[Bytes]) -> Result<FuelCallResponse<u64>, Error> {
self.instance
.methods()
.update_fee(update_data.to_vec())
.simulate()
.await
}
pub async fn constructor(
&self,
valid_time_period_seconds: u64,
wormhole_guardian_set_upgrade: Bytes,
) -> Result<FuelCallResponse<()>, Error> {
self.instance
.methods()
.constructor(
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
valid_time_period_seconds,
wormhole_guardian_set_upgrade,
)
.with_tx_policies(TxPolicies::default().with_gas_price(1))
.call()
.await
}
pub async fn deploy(wallet: WalletUnlocked) -> Result<Self, Error> {
let mut rng = rand::thread_rng();
let salt = rng.gen::<[u8; 32]>();
let configurables = PythOracleContractConfigurables::default()
.with_DEPLOYER(Identity::Address(Address::from(wallet.address())));
let config = LoadConfiguration::default().with_configurables(configurables);
let id = Contract::load_from(
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(PYTH_CONTRACT_BINARY_PATH),
config,
)?;
let deployed_contract = id
.with_salt(salt)
.deploy(&wallet, TxPolicies::default().with_gas_price(1))
.await?;
Ok(Self {
instance: PythOracleContract::new(deployed_contract, wallet.clone()),
wallet,
})
}
pub async fn current_guardian_set_index(&self) -> Result<FuelCallResponse<u32>, Error> {
self.instance
.methods()
.current_guardian_set_index()
.simulate()
.await
}
}
pub fn guardian_set_upgrade_3_vaa() -> Bytes {
Bytes(hex::decode(GUARDIAN_SET_UPGRADE_3_VAA).unwrap())
}
pub fn guardian_set_upgrade_4_vaa() -> Bytes {
Bytes(hex::decode(GUARDIAN_SET_UPGRADE_4_VAA).unwrap())
}
pub fn default_price_feed_ids() -> Vec<Bits256> {
vec![
Bits256(
hex::decode(ETH_USD_PRICE_FEED_ID)
.unwrap()
.try_into()
.unwrap(),
),
Bits256(
hex::decode(USDC_USD_PRICE_FEED_ID)
.unwrap()
.try_into()
.unwrap(),
),
]
}
// data sources from Pyth EVM deployment docs:
// https://github.com/pyth-network/pyth-crosschain/blob/2008da7a451231489d9866d7ceae3799c07e1fb5/contract_manager/src/base.ts#L116
pub fn default_data_sources() -> Vec<DataSource> {
vec![
DataSource {
chain_id: 1,
emitter_address: Bits256::from_hex_str(
"6bb14509a612f01fbbc4cffeebd4bbfb492a86df717ebe92eb6df432a3f00a25",
)
.unwrap(),
},
DataSource {
chain_id: 26,
emitter_address: Bits256::from_hex_str(
"f8cd23c2ab91237730770bbea08d61005cdda0984348f3f6eecb559638c0bba0",
)
.unwrap(),
},
DataSource {
chain_id: 26,
emitter_address: Bits256::from_hex_str(
"e101faedac5851e32b9b23b5f9411a8c2bac4aae3ed4dd7b811dd1a72ea4aa71",
)
.unwrap(),
},
DataSource {
chain_id: 1,
emitter_address: Bits256::from_hex_str(
"f346195ac02f37d60d4db8ffa6ef74cb1be3550047543a4a9ee9acf4d78697b0",
)
.unwrap(),
},
DataSource {
chain_id: 26,
emitter_address: Bits256::from_hex_str(
"a27839d641b07743c0cb5f68c51f8cd31d2c0762bec00dc6fcd25433ef1ab5b6",
)
.unwrap(),
},
]
}

View File

@ -0,0 +1,4 @@
pub(crate) mod pyth_core;
pub(crate) mod pyth_info;
pub(crate) mod pyth_init;
pub(crate) mod wormhole_guardians;

View File

@ -0,0 +1,102 @@
use crate::utils::interface::{
pyth_core::{ema_price, update_fee, update_price_feeds},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED, TEST_EXTENDED_TIME_PERIOD,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_ema_price_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
TEST_EXTENDED_TIME_PERIOD, //As the contract checks against the current timestamp, this allows unit testing with old but real price updates
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_ema_price = ema_price(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_ema_price = ema_price(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_ema_price_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
TEST_EXTENDED_TIME_PERIOD, //As the contract checks against the current timestamp, this allows unit testing with old but real price updates
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_ema_price = ema_price(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_ema_price = ema_price(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,118 @@
use crate::utils::interface::{
pyth_core::{update_fee, update_price_feeds},
pyth_init::constructor,
};
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED, TEST_EXTENDED_TIME_PERIOD,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
use crate::utils::{interface::pyth_core::ema_price_no_older_than, setup::setup_environment};
mod success {
use super::*;
#[tokio::test]
async fn gets_ema_price_no_older_than_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_ema_price = ema_price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[0],
)
.await
.value;
let usdc_usd_ema_price = ema_price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[1],
)
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_ema_price_no_older_than_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_ema_price = ema_price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[0],
)
.await
.value;
let usdc_usd_ema_price = ema_price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[1],
)
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,102 @@
use crate::utils::interface::{
pyth_core::{ema_price_unsafe, update_fee, update_price_feeds},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_ema_price_unsafe_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_ema_price = ema_price_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_ema_price = ema_price_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_ema_price_unsafe_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_ema_price = ema_price_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_ema_price = ema_price_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_ema_price.price as f64) * 10f64.powf(-(eth_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.ema_price.exponent as f64)),
);
assert_eq!(
(usdc_usd_ema_price.price as f64) * 10f64.powf(-(usdc_usd_ema_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.ema_price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,10 @@
pub(crate) mod ema_price;
pub(crate) mod ema_price_no_older_than;
pub(crate) mod ema_price_unsafe;
pub(crate) mod parse_price_feed_updates;
pub(crate) mod price;
pub(crate) mod price_no_older_than;
pub(crate) mod price_unsafe;
pub(crate) mod update_fee;
pub(crate) mod update_price_feeds;
pub(crate) mod update_price_feeds_if_necessary;

View File

@ -0,0 +1,87 @@
use crate::utils::interface::{
pyth_core::{parse_price_feed_updates, update_fee},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn parses_price_feed_batch_updates() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
let max_publish_time = TEST_BATCH_ETH_USD_PRICE_FEED.price.publish_time;
let price_feeds = parse_price_feed_updates(
&deployer.instance,
fee,
max_publish_time,
max_publish_time - DEFAULT_VALID_TIME_PERIOD,
default_price_feed_ids(),
test_batch_update_data_bytes(),
)
.await
.value;
assert_eq!(price_feeds[0], TEST_BATCH_ETH_USD_PRICE_FEED);
assert_eq!(price_feeds[1], TEST_BATCH_USDC_USD_PRICE_FEED);
}
#[tokio::test]
async fn parses_price_feed_accumulator_updates() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
let max_publish_time = TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.publish_time;
let price_feeds = parse_price_feed_updates(
&deployer.instance,
fee,
max_publish_time,
max_publish_time - DEFAULT_VALID_TIME_PERIOD,
default_price_feed_ids(),
test_accumulator_update_data_bytes(),
)
.await
.value;
assert_eq!(price_feeds[0], TEST_ACCUMULATOR_ETH_USD_PRICE_FEED);
assert_eq!(price_feeds[1], TEST_ACCUMULATOR_USDC_USD_PRICE_FEED);
}
}

View File

@ -0,0 +1,102 @@
use crate::utils::interface::{
pyth_core::{price, update_fee, update_price_feeds},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED, TEST_EXTENDED_TIME_PERIOD,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_price_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
TEST_EXTENDED_TIME_PERIOD, //As the contract checks against the current timestamp, this allows unit testing with old but real price updates
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_price = price(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price = price(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_price_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
TEST_EXTENDED_TIME_PERIOD, //As the contract checks against the current timestamp, this allows unit testing with old but real price updates
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_price = price(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price = price(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,119 @@
use crate::utils::interface::{
pyth_core::{update_fee, update_price_feeds},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED, TEST_EXTENDED_TIME_PERIOD,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use crate::utils::interface::pyth_core::price_no_older_than;
use super::*;
#[tokio::test]
async fn gets_price_no_older_than_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_price = price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[0],
)
.await
.value;
let usdc_usd_price = price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[1],
)
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_price_no_older_than_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_price = price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[0],
)
.await
.value;
let usdc_usd_price = price_no_older_than(
&deployer.instance,
TEST_EXTENDED_TIME_PERIOD,
default_price_feed_ids()[1],
)
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,101 @@
use crate::utils::interface::{
pyth_core::{price_unsafe, update_fee, update_price_feeds},
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_price_unsafe_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_price = price_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price = price_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_BATCH_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_BATCH_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_BATCH_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
#[tokio::test]
async fn gets_price_unsafe_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_price = price_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price = price_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(
(eth_usd_price.price as f64) * 10f64.powf(-(eth_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_ETH_USD_PRICE_FEED.price.exponent as f64)),
);
assert_eq!(
(usdc_usd_price.price as f64) * 10f64.powf(-(usdc_usd_price.exponent as f64)),
(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.price as f64)
* 10f64.powf(-(TEST_ACCUMULATOR_USDC_USD_PRICE_FEED.price.exponent as f64)),
);
}
}

View File

@ -0,0 +1,53 @@
use crate::utils::interface::{pyth_core::update_fee, pyth_init::constructor};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD},
pyth_utils::{
default_data_sources, guardian_set_upgrade_3_vaa, test_accumulator_update_data_bytes,
test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_update_fee_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
assert_eq!(fee, test_batch_update_data_bytes().len() as u64);
}
#[tokio::test]
async fn gets_update_fee_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
assert_eq!(fee, 2);
}
}

View File

@ -0,0 +1,115 @@
use crate::utils::interface::{
pyth_core::{update_fee, update_price_feeds},
pyth_info::price_feed_exists,
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn updates_price_feeds_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
// Initial values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(false, false)
);
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
// Final values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(true, true)
);
}
#[tokio::test]
async fn updates_price_feeds_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
// Initial values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(false, false)
);
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
// Final values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(true, true)
);
}
}

View File

@ -0,0 +1,123 @@
use crate::utils::interface::{
pyth_core::{update_fee, update_price_feeds_if_necessary},
pyth_info::price_feed_exists,
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn updates_price_feeds_if_necessary_for_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
// Initial values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(false, false)
);
update_price_feeds_if_necessary(
&deployer.instance,
fee,
vec![default_price_feed_ids()[0]],
vec![1],
test_batch_update_data_bytes(),
)
.await;
// Final values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(true, true)
);
}
#[tokio::test]
async fn updates_price_feeds_if_necessary_for_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
// Initial values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(false, false)
);
update_price_feeds_if_necessary(
&deployer.instance,
fee,
vec![default_price_feed_ids()[0]],
vec![1],
test_accumulator_update_data_bytes(),
)
.await;
// Final values
assert_eq!(
(
price_feed_exists(&deployer.instance, default_price_feed_ids()[0])
.await
.value,
price_feed_exists(&deployer.instance, default_price_feed_ids()[1])
.await
.value
),
(true, true)
);
}
}

View File

@ -0,0 +1 @@
pub(crate) mod price_feed_unsafe;

View File

@ -0,0 +1,88 @@
use crate::utils::interface::{
pyth_core::{update_fee, update_price_feeds},
pyth_info::price_feed_unsafe,
pyth_init::constructor,
};
use crate::utils::setup::setup_environment;
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED,
TEST_ACCUMULATOR_USDC_USD_PRICE_FEED, TEST_BATCH_ETH_USD_PRICE_FEED,
TEST_BATCH_USDC_USD_PRICE_FEED,
},
pyth_utils::{
default_data_sources, default_price_feed_ids, guardian_set_upgrade_3_vaa,
test_accumulator_update_data_bytes, test_batch_update_data_bytes,
},
};
mod success {
use super::*;
#[tokio::test]
async fn gets_price_feed_from_batch_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_batch_update_data_bytes())
.await
.value;
update_price_feeds(&deployer.instance, fee, test_batch_update_data_bytes()).await;
let eth_usd_price_feed = price_feed_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price_feed =
price_feed_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(eth_usd_price_feed, TEST_BATCH_ETH_USD_PRICE_FEED);
assert_eq!(usdc_usd_price_feed, TEST_BATCH_USDC_USD_PRICE_FEED);
}
#[tokio::test]
async fn gets_price_feed_from_accumulator_update() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let fee = update_fee(&deployer.instance, test_accumulator_update_data_bytes())
.await
.value;
update_price_feeds(
&deployer.instance,
fee,
test_accumulator_update_data_bytes(),
)
.await;
let eth_usd_price_feed = price_feed_unsafe(&deployer.instance, default_price_feed_ids()[0])
.await
.value;
let usdc_usd_price_feed =
price_feed_unsafe(&deployer.instance, default_price_feed_ids()[1])
.await
.value;
assert_eq!(eth_usd_price_feed, TEST_ACCUMULATOR_ETH_USD_PRICE_FEED);
assert_eq!(usdc_usd_price_feed, TEST_ACCUMULATOR_USDC_USD_PRICE_FEED);
}
}

View File

@ -0,0 +1,119 @@
use crate::utils::interface::{
pyth_core::valid_time_period,
pyth_info::{owner, single_update_fee, valid_data_source, valid_data_sources},
pyth_init::constructor,
wormhole_guardians::{
current_guardian_set_index, current_wormhole_provider, governance_action_is_consumed,
},
};
use pyth_sdk::{
constants::{
DEFAULT_SINGLE_UPDATE_FEE, DEFAULT_VALID_TIME_PERIOD, UPGRADE_3_VAA_GOVERNANCE_ACTION_HASH,
},
pyth_utils::{
default_data_sources, guardian_set_upgrade_3_vaa, ConstructedEvent, State, WormholeProvider,
},
};
use fuels::types::Bits256;
mod success {
use crate::utils::setup::setup_environment;
use super::*;
#[tokio::test]
async fn constructs() {
let (_oracle_contract_id, deployer) = setup_environment().await.unwrap();
// Initial values
assert!(
!valid_data_source(&deployer.instance, &default_data_sources()[0])
.await
.value
);
assert_eq!(valid_data_sources(&deployer.instance).await.value.len(), 0);
assert_eq!(valid_time_period(&deployer.instance).await.value, 0);
assert_eq!(single_update_fee(&deployer.instance).await.value, 0);
assert!(
!governance_action_is_consumed(
&deployer.instance,
UPGRADE_3_VAA_GOVERNANCE_ACTION_HASH
)
.await
.value
);
assert_eq!(
current_guardian_set_index(&deployer.instance,).await.value,
0
);
assert_eq!(
current_wormhole_provider(&deployer.instance,).await.value,
WormholeProvider {
governance_chain_id: 0,
governance_contract: Bits256::zeroed(),
}
);
assert_eq!(owner(&deployer.instance,).await.value, State::Uninitialized);
let response = constructor(
&deployer.instance,
default_data_sources(),
DEFAULT_SINGLE_UPDATE_FEE,
DEFAULT_VALID_TIME_PERIOD,
guardian_set_upgrade_3_vaa(),
)
.await;
let log = response
.decode_logs_with_type::<ConstructedEvent>()
.unwrap();
let event = log.first().unwrap();
assert_eq!(
*event,
ConstructedEvent {
guardian_set_index: 3,
}
);
// Final values
assert!(
valid_data_source(&deployer.instance, &default_data_sources()[0])
.await
.value
);
assert_eq!(
&valid_data_sources(&deployer.instance).await.value.len(),
&default_data_sources().len()
);
assert_eq!(
valid_time_period(&deployer.instance).await.value,
DEFAULT_VALID_TIME_PERIOD
);
assert_eq!(
single_update_fee(&deployer.instance).await.value,
DEFAULT_SINGLE_UPDATE_FEE
);
assert!(
governance_action_is_consumed(&deployer.instance, UPGRADE_3_VAA_GOVERNANCE_ACTION_HASH)
.await
.value
);
assert_eq!(
current_guardian_set_index(&deployer.instance,).await.value,
3
);
assert_eq!(
current_wormhole_provider(&deployer.instance,).await.value,
WormholeProvider {
governance_chain_id: 1,
governance_contract: Bits256([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 4
])
}
);
assert_eq!(owner(&deployer.instance).await.value, State::Revoked);
}
}

View File

@ -0,0 +1 @@
pub(crate) mod constuctor;

View File

@ -0,0 +1,2 @@
mod functions;
mod utils;

View File

@ -0,0 +1,4 @@
pub(crate) mod pyth_core;
pub(crate) mod pyth_info;
pub(crate) mod pyth_init;
pub(crate) mod wormhole_guardians;

View File

@ -0,0 +1,156 @@
use fuels::{
accounts::wallet::WalletUnlocked,
prelude::{Bytes, CallParameters, TxPolicies},
programs::call_response::FuelCallResponse,
types::Bits256,
};
use pyth_sdk::pyth_utils::{Price, PriceFeed, PythOracleContract};
pub(crate) async fn ema_price(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.ema_price(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn ema_price_no_older_than(
contract: &PythOracleContract<WalletUnlocked>,
time_period: u64,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.ema_price_no_older_than(time_period, price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn ema_price_unsafe(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.ema_price_unsafe(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn parse_price_feed_updates(
contract: &PythOracleContract<WalletUnlocked>,
fee: u64,
max_publish_time: u64,
min_publish_time: u64,
price_feed_ids: Vec<Bits256>,
update_data: Vec<Bytes>,
) -> FuelCallResponse<Vec<PriceFeed>> {
contract
.methods()
.parse_price_feed_updates(
max_publish_time,
min_publish_time,
price_feed_ids,
update_data,
)
.with_tx_policies(TxPolicies::default())
.call_params(CallParameters::default().with_amount(fee))
.unwrap()
.call()
.await
.unwrap()
}
pub(crate) async fn price(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.price(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn price_no_older_than(
contract: &PythOracleContract<WalletUnlocked>,
time_period: u64,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.price_no_older_than(time_period, price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn price_unsafe(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<Price> {
contract
.methods()
.price_unsafe(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn update_fee(
contract: &PythOracleContract<WalletUnlocked>,
update_data: Vec<Bytes>,
) -> FuelCallResponse<u64> {
contract
.methods()
.update_fee(update_data)
.call()
.await
.unwrap()
}
pub(crate) async fn update_price_feeds(
contract: &PythOracleContract<WalletUnlocked>,
fee: u64,
update_data: Vec<Bytes>,
) -> FuelCallResponse<()> {
contract
.methods()
.update_price_feeds(update_data)
.call_params(CallParameters::default().with_amount(fee))
.unwrap()
.call()
.await
.unwrap()
}
pub(crate) async fn update_price_feeds_if_necessary(
contract: &PythOracleContract<WalletUnlocked>,
fee: u64,
price_feed_ids: Vec<Bits256>,
publish_times: Vec<u64>,
update_data: Vec<Bytes>,
) -> FuelCallResponse<()> {
contract
.methods()
.update_price_feeds_if_necessary(price_feed_ids, publish_times, update_data)
.call_params(CallParameters::default().with_amount(fee))
.unwrap()
.call()
.await
.unwrap()
}
pub(crate) async fn valid_time_period(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<u64> {
contract.methods().valid_time_period().call().await.unwrap()
}

View File

@ -0,0 +1,64 @@
use fuels::{
accounts::wallet::WalletUnlocked, programs::call_response::FuelCallResponse, types::Bits256,
};
use pyth_sdk::pyth_utils::{DataSource, PriceFeed, PythOracleContract, State};
pub(crate) async fn owner(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<State> {
contract.methods().owner().call().await.unwrap()
}
pub(crate) async fn price_feed_exists(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<bool> {
contract
.methods()
.price_feed_exists(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn price_feed_unsafe(
contract: &PythOracleContract<WalletUnlocked>,
price_feed_id: Bits256,
) -> FuelCallResponse<PriceFeed> {
contract
.methods()
.price_feed_unsafe(price_feed_id)
.call()
.await
.unwrap()
}
pub(crate) async fn single_update_fee(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<u64> {
contract.methods().single_update_fee().call().await.unwrap()
}
pub(crate) async fn valid_data_source(
contract: &PythOracleContract<WalletUnlocked>,
data_source: &DataSource,
) -> FuelCallResponse<bool> {
contract
.methods()
.valid_data_source(data_source.clone())
.call()
.await
.unwrap()
}
pub(crate) async fn valid_data_sources(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<Vec<DataSource>> {
contract
.methods()
.valid_data_sources()
.call()
.await
.unwrap()
}

View File

@ -0,0 +1,25 @@
use fuels::{
accounts::wallet::WalletUnlocked, prelude::Bytes, programs::call_response::FuelCallResponse,
};
use pyth_sdk::pyth_utils::{DataSource, PythOracleContract};
pub(crate) async fn constructor(
contract: &PythOracleContract<WalletUnlocked>,
data_sources: Vec<DataSource>,
single_update_fee: u64,
valid_time_period_seconds: u64,
wormhole_guardian_set_upgrade: Bytes,
) -> FuelCallResponse<()> {
contract
.methods()
.constructor(
data_sources,
single_update_fee,
valid_time_period_seconds,
wormhole_guardian_set_upgrade,
)
.call()
.await
.unwrap()
}

View File

@ -0,0 +1,45 @@
use fuels::{
accounts::wallet::WalletUnlocked, programs::call_response::FuelCallResponse, types::Bits256,
};
use pyth_sdk::pyth_utils::{GuardianSet, PythOracleContract, WormholeProvider};
pub(crate) async fn current_guardian_set_index(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<u32> {
contract
.methods()
.current_guardian_set_index()
.call()
.await
.unwrap()
}
pub(crate) async fn current_wormhole_provider(
contract: &PythOracleContract<WalletUnlocked>,
) -> FuelCallResponse<WormholeProvider> {
contract
.methods()
.current_wormhole_provider()
.call()
.await
.unwrap()
}
pub(crate) async fn governance_action_is_consumed(
contract: &PythOracleContract<WalletUnlocked>,
governance_action_hash: Bits256,
) -> FuelCallResponse<bool> {
contract
.methods()
.governance_action_is_consumed(governance_action_hash)
.call()
.await
.unwrap()
}
pub(crate) async fn _guardian_set(
contract: &PythOracleContract<WalletUnlocked>,
index: u32,
) -> FuelCallResponse<GuardianSet> {
contract.methods().guardian_set(index).call().await.unwrap()
}

View File

@ -0,0 +1,2 @@
pub(crate) mod interface;
pub(crate) mod setup;

View File

@ -0,0 +1,27 @@
use fuels::{
test_helpers::{launch_custom_provider_and_get_wallets, WalletsConfig},
types::{errors::Error, ContractId},
};
use pyth_sdk::pyth_utils::Pyth;
pub(crate) async fn setup_environment() -> Result<(ContractId, Pyth), Error> {
// Launch a local network and deploy the contract
let mut wallets = launch_custom_provider_and_get_wallets(
WalletsConfig::new(
Some(1), /* Single wallet */
Some(1), /* Single coin (UTXO) */
Some(1_000_000_000), /* Amount per coin */
),
None,
None,
)
.await?;
let deployer_wallet = wallets
.pop()
.ok_or_else(|| Error::WalletError("No deployer wallet found".to_string()))?;
let pyth = Pyth::deploy(deployer_wallet).await?;
Ok((pyth.instance.contract_id().into(), pyth))
}