Ethereum: Move relayer (#3866)

* Ethereum: Move relayer

* JS/SDK: Handle relayer move

* More tweaks

* ci: fix relayer deploy

* Code review rework

* Hack to fix tilt

* Another hack

* ci: revert relayer addresses

* Fix sdk tests

* Move ethereum-relayer to relayer/ethereum

* Yet another fix for tilt

* Code review rework

---------

Co-authored-by: Evan Gray <battledingo@gmail.com>
This commit is contained in:
bruce-riley 2024-04-11 07:46:36 -05:00 committed by GitHub
parent b78317b9b5
commit 86ef5bff5a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
108 changed files with 51769 additions and 66 deletions

5
.github/CODEOWNERS vendored
View File

@ -9,10 +9,7 @@
/bitcoin/ @evan-gray
/cosmwasm/ @nik-suri @kcsongor @a5-pickle
/ethereum/ @a5-pickle @gator-boi @kcsongor
/ethereum/contracts/relayer @nonergodic @gator-boi
/ethereum/contracts/interfaces/relayer @nonergodic @gator-boi
/ethereum/forge-test/relayer @nonergodic @gator-boi
/ethereum/ts-scripts/relayer @nonergodic @gator-boi
/relayer/ethereum @nonergodic @gator-boi
/near/ @evan-gray @kcsongor
/solana/ @kcsongor @a5-pickle
/sui/ @kcsongor @a5-pickle @gator-boi

View File

@ -73,6 +73,17 @@ jobs:
uses: foundry-rs/foundry-toolchain@v1
- run: cd ethereum && make test-push0 && make test
relayer-ethereum:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: "16"
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@v1
- run: cd relayer/ethereum && make test-push0 && make test
ethereum-upgrade:
runs-on: ubuntu-latest
steps:
@ -316,7 +327,7 @@ jobs:
- uses: streetsidesoftware/cspell-action@214db1e3138f326d33b7a6a51c92852e89ab0618
with:
# For now, only lint markdown files
files: '**/*.md'
files: "**/*.md"
inline: warning
# Only check files in the PR or push
incremental_files_only: true

View File

@ -458,11 +458,12 @@ if solana or pythnet:
docker_build(
ref = "eth-node",
context = "./ethereum",
context = ".",
only = ["./ethereum", "./relayer/ethereum"],
dockerfile = "./ethereum/Dockerfile",
# ignore local node_modules (in case they're present)
ignore = ["./node_modules"],
ignore = ["./ethereum/node_modules","./relayer/ethereum/node_modules"],
build_args = {"num_guardians": str(num_guardians), "dev": str(not ci)},
# sync external scripts for incremental development
@ -522,7 +523,7 @@ if generic_relayer:
docker_build(
ref = "relayer-engine",
context = ".",
only = ["./relayer/generic_relayer", "./ethereum/ts-scripts/relayer/config"],
only = ["./relayer/generic_relayer", "./relayer/ethereum/ts-scripts/relayer/config"],
dockerfile = "relayer/generic_relayer/relayer-engine-v2/Dockerfile",
build_args = {"dev": str(not ci)}
)

View File

@ -57,7 +57,7 @@ spec:
command:
- /bin/sh
- -c
- "npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_all_chains.js && npm run deploy-relayers-evm1 && nc -lkn 2000"
- "cd ../../ethereum && npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_all_chains.js && cd ../relayer/ethereum && npm run deploy-relayers-evm1 && nc -lkn 2000"
readinessProbe:
periodSeconds: 1
failureThreshold: 300
@ -69,6 +69,6 @@ spec:
command:
- /bin/sh
- -c
- "npx truffle exec mine.js"
- "cd ../../ethereum && npx truffle exec mine.js"
---

View File

@ -58,7 +58,7 @@ spec:
command:
- /bin/sh
- -c
- "sed -i 's/CHAIN_ID=0x2/CHAIN_ID=0x4/g;s/EVM_CHAIN_ID=1337/EVM_CHAIN_ID=1397/g' .env && npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_all_chains.js && npm run deploy-relayers-evm2 && nc -lkn 2000"
- "cd ../../ethereum && sed -i 's/CHAIN_ID=0x2/CHAIN_ID=0x4/g;s/EVM_CHAIN_ID=1337/EVM_CHAIN_ID=1397/g' .env && npm run migrate && npx truffle exec scripts/deploy_test_token.js && npx truffle exec scripts/register_all_chains.js && cd ../relayer/ethereum && sed -i 's/CHAIN_ID=0x2/CHAIN_ID=0x4/g;s/EVM_CHAIN_ID=1337/EVM_CHAIN_ID=1397/g' .env && npm run deploy-relayers-evm2 && nc -lkn 2000"
readinessProbe:
periodSeconds: 1
failureThreshold: 300
@ -70,4 +70,4 @@ spec:
command:
- /bin/sh
- -c
- "npx truffle exec mine.js"
- "cd ../../ethereum && npx truffle exec mine.js"

View File

@ -26,11 +26,11 @@ WORKDIR /home/node/app
# Fix git ssh error
RUN git config --global url."https://".insteadOf ssh://
WORKDIR /home/node/app
WORKDIR /home/node/app/ethereum
# Only invalidate the npm install step if package.json changed
COPY --chown=node:node package.json .
COPY --chown=node:node package-lock.json .
COPY --chown=node:node ethereum/package.json .
COPY --chown=node:node ethereum/package-lock.json .
COPY --from=const-export --chown=node:node .env.0x .env
@ -49,7 +49,29 @@ RUN rm -rf node_modules && mv node_modules_cache node_modules
ARG dev
ENV DEV=$dev
COPY --chown=node:node . .
COPY --chown=node:node ethereum .
RUN make build
### RELAYER ###
WORKDIR /home/node/app/relayer/ethereum
COPY --chown=node:node relayer/ethereum/package.json .
COPY --chown=node:node relayer/ethereum/package-lock.json .
COPY --from=const-export --chown=node:node .env.0x .env
RUN --mount=type=cache,uid=1000,gid=1000,target=/home/node/.npm \
--mount=type=cache,uid=1000,gid=1000,target=node_modules \
npm ci && \
cp -R node_modules node_modules_cache
RUN rm -rf node_modules && mv node_modules_cache node_modules
ARG dev
ENV DEV=$dev
COPY --chown=node:node relayer/ethereum .
RUN make build
ARG num_guardians

View File

@ -19,8 +19,7 @@
"scripts": {
"build:core": "truffle compile",
"build:forge": "if [ \"$DEV\" = \"True\" ]; then forge build; else forge build --via-ir; fi",
"build": "npm run build:core && npm run build:forge && typechain --target=ethers-v5 --out-dir=./ethers-contracts \"build-forge/!(test).sol/*.json\"&& npm run erase-types",
"erase-types": "ts-node ./ts-scripts/relayer/eraseTypes.ts ./contracts/interfaces/relayer/IDeliveryProviderTyped.sol ./contracts/interfaces/relayer/IWormholeRelayerTyped.sol",
"build": "npm run build:core && npm run build:forge && typechain --target=ethers-v5 --out-dir=./ethers-contracts \"build-forge/!(test).sol/*.json\"",
"test": "npm run build:core && mkdir -p build/contracts && cp node_modules/@openzeppelin/contracts/build/contracts/* build/contracts/ && truffle test --network test --compile-none",
"migrate": "npm run build:core && mkdir -p build/contracts && cp node_modules/@openzeppelin/contracts/build/contracts/* build/contracts/ && truffle migrate --to 4 --compile-none",
"deploy-bridge-implementation-only": "npm run build:core && mkdir -p build/contracts && cp node_modules/@openzeppelin/contracts/build/contracts/* build/contracts/ && truffle migrate --f 6 --to 6 --compile-none",
@ -31,10 +30,8 @@
"verify": "patch -u -f node_modules/truffle-plugin-verify/constants.js -i truffle-verify-constants.patch; truffle run verify $npm_config_module@$npm_config_contract_address --network $npm_config_network",
"verify-token": "patch -u -f node_modules/truffle-plugin-verify/constants.js -i truffle-verify-constants.patch; truffle run verify BridgeToken@$npm_config_contract_address --forceConstructorArgs string:$npm_config_constructor_args --network $npm_config_network",
"abigen": "truffle run abigen",
"deploy-relayers-evm1": "ENV=kubernetes CONTAINER=evm1 bash ./ts-scripts/relayer/shell/deployInContainer.sh",
"deploy-relayers-evm2": "ENV=kubernetes CONTAINER=evm2 bash ./ts-scripts/relayer/shell/deployInContainer.sh",
"typecheck": "tsc --noEmit --skipLibCheck"
},
},
"author": "",
"license": "ISC",
"dependencies": {

4
relayer/ethereum/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
build-forge/
cache
ethers-contracts
lib

48
relayer/ethereum/Makefile Normal file
View File

@ -0,0 +1,48 @@
SOURCE_FILES:=$(shell find contracts -name "*.sol")
.PHONY: dependencies test clean all
all: build
node_modules: package-lock.json
touch -m node_modules
npm ci
# Note: Forge really wants to manage dependencies via submodules, but that
# workflow is a little awkward. There's currently no support for a more
# traditional package manager workflow (with a package manifest file and
# installation into a subdirectory that can be gitignored).
# Instead, we just specify the dependencies here. make will then take care of
# installing them if they are not yet present.
# When adding a new dependency, make sure to specify the exact commit hash, and
# the --no-git and --no-commit flags (see lib/forge-std below)
.PHONY: forge_dependencies
forge_dependencies: lib/forge-std lib/openzeppelin-contracts
lib/forge-std:
forge install foundry-rs/forge-std@v1.5.5 --no-git --no-commit
lib/openzeppelin-contracts:
forge install openzeppelin/openzeppelin-contracts@0457042d93d9dfd760dbaa06a4d2f1216fdbe297 --no-git --no-commit
dependencies: node_modules forge_dependencies
build: forge_dependencies node_modules ${SOURCE_FILES}
npm run build
.env: .env.test
cp $< $@
test: test-forge
.PHONY:
test-forge: dependencies
forge test --no-match-test .*_KEVM # ignore KEVM tests (see PROOFS.md)
# Verify that the contracts do not include PUSH0 opcodes
test-push0: dependencies
forge build --extra-output evm.bytecode.opcodes
@if grep -qr --include \*.json PUSH0 ./build-forge; then echo "Contract uses PUSH0 instruction" 1>&2; exit 1; fi
clean:
rm -rf .env node_modules build-forge ethers-contracts lib/forge-std lib/openzeppelin-contracts

View File

@ -0,0 +1,39 @@
# Relayer - Ethereum Contracts
These smart contracts allow for relaying on EVM chains.
### Dependencies
The relayer contracts are built with Forge. See below for details on installing forge.
For the required version of `solc`, the required EVM version and the solidity library dependencies see the [config file](foundry.toml)
### Building
To build the contracts do:
```sh
wormhole/relayer/ethereum$ make build
```
### Deploying
For details on deploying the relayer contracts see the scripts [readme](ts-scripts/relayer/README.md).
### Testing
The tests for the relayer contracts reside in `forge-test`. To run the tests do:
```sh
wormhole/relayer/ethereum$ forge test
```
### Installing Forge
Some tests and scripts use [Foundry](https://getfoundry.sh/). It can be installed via the official installer, or by running
```sh
wormhole/relayer/ethereum$ ../../scripts/install-foundry
```
The installer script installs foundry and the appropriate solc version to build the contracts.

View File

@ -0,0 +1,142 @@
// contracts/Messages.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
interface IWormhole {
struct GuardianSet {
address[] keys;
uint32 expirationTime;
}
struct Signature {
bytes32 r;
bytes32 s;
uint8 v;
uint8 guardianIndex;
}
struct VM {
uint8 version;
uint32 timestamp;
uint32 nonce;
uint16 emitterChainId;
bytes32 emitterAddress;
uint64 sequence;
uint8 consistencyLevel;
bytes payload;
uint32 guardianSetIndex;
Signature[] signatures;
bytes32 hash;
}
struct ContractUpgrade {
bytes32 module;
uint8 action;
uint16 chain;
address newContract;
}
struct GuardianSetUpgrade {
bytes32 module;
uint8 action;
uint16 chain;
GuardianSet newGuardianSet;
uint32 newGuardianSetIndex;
}
struct SetMessageFee {
bytes32 module;
uint8 action;
uint16 chain;
uint256 messageFee;
}
struct TransferFees {
bytes32 module;
uint8 action;
uint16 chain;
uint256 amount;
bytes32 recipient;
}
struct RecoverChainId {
bytes32 module;
uint8 action;
uint256 evmChainId;
uint16 newChainId;
}
event LogMessagePublished(address indexed sender, uint64 sequence, uint32 nonce, bytes payload, uint8 consistencyLevel);
event ContractUpgraded(address indexed oldContract, address indexed newContract);
event GuardianSetAdded(uint32 indexed index);
function publishMessage(
uint32 nonce,
bytes memory payload,
uint8 consistencyLevel
) external payable returns (uint64 sequence);
function initialize() external;
function parseAndVerifyVM(bytes calldata encodedVM) external view returns (VM memory vm, bool valid, string memory reason);
function verifyVM(VM memory vm) external view returns (bool valid, string memory reason);
function verifySignatures(bytes32 hash, Signature[] memory signatures, GuardianSet memory guardianSet) external pure returns (bool valid, string memory reason);
function parseVM(bytes memory encodedVM) external pure returns (VM memory vm);
function quorum(uint numGuardians) external pure returns (uint numSignaturesRequiredForQuorum);
function getGuardianSet(uint32 index) external view returns (GuardianSet memory);
function getCurrentGuardianSetIndex() external view returns (uint32);
function getGuardianSetExpiry() external view returns (uint32);
function governanceActionIsConsumed(bytes32 hash) external view returns (bool);
function isInitialized(address impl) external view returns (bool);
function chainId() external view returns (uint16);
function isFork() external view returns (bool);
function governanceChainId() external view returns (uint16);
function governanceContract() external view returns (bytes32);
function messageFee() external view returns (uint256);
function evmChainId() external view returns (uint256);
function nextSequence(address emitter) external view returns (uint64);
function parseContractUpgrade(bytes memory encodedUpgrade) external pure returns (ContractUpgrade memory cu);
function parseGuardianSetUpgrade(bytes memory encodedUpgrade) external pure returns (GuardianSetUpgrade memory gsu);
function parseSetMessageFee(bytes memory encodedSetMessageFee) external pure returns (SetMessageFee memory smf);
function parseTransferFees(bytes memory encodedTransferFees) external pure returns (TransferFees memory tf);
function parseRecoverChainId(bytes memory encodedRecoverChainId) external pure returns (RecoverChainId memory rci);
function submitContractUpgrade(bytes memory _vm) external;
function submitSetMessageFee(bytes memory _vm) external;
function submitNewGuardianSet(bytes memory _vm) external;
function submitTransferFees(bytes memory _vm) external;
function submitRecoverChainId(bytes memory _vm) external;
}

View File

@ -0,0 +1,510 @@
// SPDX-License-Identifier: Unlicense
/*
* @title Solidity Bytes Arrays Utils
* @author Gonçalo <goncalo.sa@consensys.net>
*
* @dev Bytes tightly packed arrays utility library for ethereum contracts written in Solidity.
* The library lets you concatenate, slice and type cast bytes arrays both in memory and storage.
*/
pragma solidity >=0.8.0 <0.9.0;
library BytesLib {
function concat(
bytes memory _preBytes,
bytes memory _postBytes
)
internal
pure
returns (bytes memory)
{
bytes memory tempBytes;
assembly {
// Get a location of some free memory and store it in tempBytes as
// Solidity does for memory variables.
tempBytes := mload(0x40)
// Store the length of the first bytes array at the beginning of
// the memory for tempBytes.
let length := mload(_preBytes)
mstore(tempBytes, length)
// Maintain a memory counter for the current write location in the
// temp bytes array by adding the 32 bytes for the array length to
// the starting location.
let mc := add(tempBytes, 0x20)
// Stop copying when the memory counter reaches the length of the
// first bytes array.
let end := add(mc, length)
for {
// Initialize a copy counter to the start of the _preBytes data,
// 32 bytes into its memory.
let cc := add(_preBytes, 0x20)
} lt(mc, end) {
// Increase both counters by 32 bytes each iteration.
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
// Write the _preBytes data into the tempBytes memory 32 bytes
// at a time.
mstore(mc, mload(cc))
}
// Add the length of _postBytes to the current length of tempBytes
// and store it as the new length in the first 32 bytes of the
// tempBytes memory.
length := mload(_postBytes)
mstore(tempBytes, add(length, mload(tempBytes)))
// Move the memory counter back from a multiple of 0x20 to the
// actual end of the _preBytes data.
mc := end
// Stop copying when the memory counter reaches the new combined
// length of the arrays.
end := add(mc, length)
for {
let cc := add(_postBytes, 0x20)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
mstore(mc, mload(cc))
}
// Update the free-memory pointer by padding our last write location
// to 32 bytes: add 31 bytes to the end of tempBytes to move to the
// next 32 byte block, then round down to the nearest multiple of
// 32. If the sum of the length of the two arrays is zero then add
// one before rounding down to leave a blank 32 bytes (the length block with 0).
mstore(0x40, and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
))
}
return tempBytes;
}
function concatStorage(bytes storage _preBytes, bytes memory _postBytes) internal {
assembly {
// Read the first 32 bytes of _preBytes storage, which is the length
// of the array. (We don't need to use the offset into the slot
// because arrays use the entire slot.)
let fslot := sload(_preBytes.slot)
// Arrays of 31 bytes or less have an even value in their slot,
// while longer arrays have an odd value. The actual length is
// the slot divided by two for odd values, and the lowest order
// byte divided by two for even values.
// If the slot is even, bitwise and the slot with 255 and divide by
// two to get the length. If the slot is odd, bitwise and the slot
// with -1 and divide by two.
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let mlength := mload(_postBytes)
let newlength := add(slength, mlength)
// slength can contain both the length and contents of the array
// if length < 32 bytes so let's prepare for that
// v. http://solidity.readthedocs.io/en/latest/miscellaneous.html#layout-of-state-variables-in-storage
switch add(lt(slength, 32), lt(newlength, 32))
case 2 {
// Since the new array still fits in the slot, we just need to
// update the contents of the slot.
// uint256(bytes_storage) = uint256(bytes_storage) + uint256(bytes_memory) + new_length
sstore(
_preBytes.slot,
// all the modifications to the slot are inside this
// next block
add(
// we can just add to the slot contents because the
// bytes we want to change are the LSBs
fslot,
add(
mul(
div(
// load the bytes from memory
mload(add(_postBytes, 0x20)),
// zero all bytes to the right
exp(0x100, sub(32, mlength))
),
// and now shift left the number of bytes to
// leave space for the length in the slot
exp(0x100, sub(32, newlength))
),
// increase length by the double of the memory
// bytes length
mul(mlength, 2)
)
)
)
}
case 1 {
// The stored value fits in the slot, but the combined value
// will exceed it.
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
let sc := add(keccak256(0x0, 0x20), div(slength, 32))
// save new length
sstore(_preBytes.slot, add(mul(newlength, 2), 1))
// The contents of the _postBytes array start 32 bytes into
// the structure. Our first read should obtain the `submod`
// bytes that can fit into the unused space in the last word
// of the stored array. To get this, we read 32 bytes starting
// from `submod`, so the data we read overlaps with the array
// contents by `submod` bytes. Masking the lowest-order
// `submod` bytes allows us to add that value directly to the
// stored value.
let submod := sub(32, slength)
let mc := add(_postBytes, submod)
let end := add(_postBytes, mlength)
let mask := sub(exp(0x100, submod), 1)
sstore(
sc,
add(
and(
fslot,
0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00
),
and(mload(mc), mask)
)
)
for {
mc := add(mc, 0x20)
sc := add(sc, 1)
} lt(mc, end) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
sstore(sc, mload(mc))
}
mask := exp(0x100, sub(mc, end))
sstore(sc, mul(div(mload(mc), mask), mask))
}
default {
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
// Start copying to the last used word of the stored array.
let sc := add(keccak256(0x0, 0x20), div(slength, 32))
// save new length
sstore(_preBytes.slot, add(mul(newlength, 2), 1))
// Copy over the first `submod` bytes of the new data as in
// case 1 above.
let slengthmod := mod(slength, 32)
let mlengthmod := mod(mlength, 32)
let submod := sub(32, slengthmod)
let mc := add(_postBytes, submod)
let end := add(_postBytes, mlength)
let mask := sub(exp(0x100, submod), 1)
sstore(sc, add(sload(sc), and(mload(mc), mask)))
for {
sc := add(sc, 1)
mc := add(mc, 0x20)
} lt(mc, end) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
sstore(sc, mload(mc))
}
mask := exp(0x100, sub(mc, end))
sstore(sc, mul(div(mload(mc), mask), mask))
}
}
}
function slice(
bytes memory _bytes,
uint256 _start,
uint256 _length
)
internal
pure
returns (bytes memory)
{
require(_length + 31 >= _length, "slice_overflow");
require(_bytes.length >= _start + _length, "slice_outOfBounds");
bytes memory tempBytes;
assembly {
switch iszero(_length)
case 0 {
// Get a location of some free memory and store it in tempBytes as
// Solidity does for memory variables.
tempBytes := mload(0x40)
// The first word of the slice result is potentially a partial
// word read from the original array. To read it, we calculate
// the length of that partial word and start copying that many
// bytes into the array. The first word we copy will start with
// data we don't care about, but the last `lengthmod` bytes will
// land at the beginning of the contents of the new array. When
// we're done copying, we overwrite the full first word with
// the actual length of the slice.
let lengthmod := and(_length, 31)
// The multiplication in the next line is necessary
// because when slicing multiples of 32 bytes (lengthmod == 0)
// the following copy loop was copying the origin's length
// and then ending prematurely not copying everything it should.
let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod)))
let end := add(mc, _length)
for {
// The multiplication in the next line has the same exact purpose
// as the one above.
let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
mstore(mc, mload(cc))
}
mstore(tempBytes, _length)
//update free-memory pointer
//allocating the array padded to 32 bytes like the compiler does now
mstore(0x40, and(add(mc, 31), not(31)))
}
//if we want a zero-length slice let's just return a zero-length array
default {
tempBytes := mload(0x40)
//zero out the 32 bytes slice we are about to return
//we need to do it because Solidity does not garbage collect
mstore(tempBytes, 0)
mstore(0x40, add(tempBytes, 0x20))
}
}
return tempBytes;
}
function toAddress(bytes memory _bytes, uint256 _start) internal pure returns (address) {
require(_bytes.length >= _start + 20, "toAddress_outOfBounds");
address tempAddress;
assembly {
tempAddress := div(mload(add(add(_bytes, 0x20), _start)), 0x1000000000000000000000000)
}
return tempAddress;
}
function toUint8(bytes memory _bytes, uint256 _start) internal pure returns (uint8) {
require(_bytes.length >= _start + 1 , "toUint8_outOfBounds");
uint8 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x1), _start))
}
return tempUint;
}
function toUint16(bytes memory _bytes, uint256 _start) internal pure returns (uint16) {
require(_bytes.length >= _start + 2, "toUint16_outOfBounds");
uint16 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x2), _start))
}
return tempUint;
}
function toUint32(bytes memory _bytes, uint256 _start) internal pure returns (uint32) {
require(_bytes.length >= _start + 4, "toUint32_outOfBounds");
uint32 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x4), _start))
}
return tempUint;
}
function toUint64(bytes memory _bytes, uint256 _start) internal pure returns (uint64) {
require(_bytes.length >= _start + 8, "toUint64_outOfBounds");
uint64 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x8), _start))
}
return tempUint;
}
function toUint96(bytes memory _bytes, uint256 _start) internal pure returns (uint96) {
require(_bytes.length >= _start + 12, "toUint96_outOfBounds");
uint96 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0xc), _start))
}
return tempUint;
}
function toUint128(bytes memory _bytes, uint256 _start) internal pure returns (uint128) {
require(_bytes.length >= _start + 16, "toUint128_outOfBounds");
uint128 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x10), _start))
}
return tempUint;
}
function toUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256) {
require(_bytes.length >= _start + 32, "toUint256_outOfBounds");
uint256 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x20), _start))
}
return tempUint;
}
function toBytes32(bytes memory _bytes, uint256 _start) internal pure returns (bytes32) {
require(_bytes.length >= _start + 32, "toBytes32_outOfBounds");
bytes32 tempBytes32;
assembly {
tempBytes32 := mload(add(add(_bytes, 0x20), _start))
}
return tempBytes32;
}
function equal(bytes memory _preBytes, bytes memory _postBytes) internal pure returns (bool) {
bool success = true;
assembly {
let length := mload(_preBytes)
// if lengths don't match the arrays are not equal
switch eq(length, mload(_postBytes))
case 1 {
// cb is a circuit breaker in the for loop since there's
// no said feature for inline assembly loops
// cb = 1 - don't breaker
// cb = 0 - break
let cb := 1
let mc := add(_preBytes, 0x20)
let end := add(mc, length)
for {
let cc := add(_postBytes, 0x20)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
} eq(add(lt(mc, end), cb), 2) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
// if any of these checks fails then arrays are not equal
if iszero(eq(mload(mc), mload(cc))) {
// unsuccess:
success := 0
cb := 0
}
}
}
default {
// unsuccess:
success := 0
}
}
return success;
}
function equalStorage(
bytes storage _preBytes,
bytes memory _postBytes
)
internal
view
returns (bool)
{
bool success = true;
assembly {
// we know _preBytes_offset is 0
let fslot := sload(_preBytes.slot)
// Decode the length of the stored array like in concatStorage().
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let mlength := mload(_postBytes)
// if lengths don't match the arrays are not equal
switch eq(slength, mlength)
case 1 {
// slength can contain both the length and contents of the array
// if length < 32 bytes so let's prepare for that
// v. http://solidity.readthedocs.io/en/latest/miscellaneous.html#layout-of-state-variables-in-storage
if iszero(iszero(slength)) {
switch lt(slength, 32)
case 1 {
// blank the last byte which is the length
fslot := mul(div(fslot, 0x100), 0x100)
if iszero(eq(fslot, mload(add(_postBytes, 0x20)))) {
// unsuccess:
success := 0
}
}
default {
// cb is a circuit breaker in the for loop since there's
// no said feature for inline assembly loops
// cb = 1 - don't breaker
// cb = 0 - break
let cb := 1
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
let sc := keccak256(0x0, 0x20)
let mc := add(_postBytes, 0x20)
let end := add(mc, mlength)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
for {} eq(add(lt(mc, end), cb), 2) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
if iszero(eq(sload(sc), mload(mc))) {
// unsuccess:
success := 0
cb := 0
}
}
}
}
}
default {
// unsuccess:
success := 0
}
}
return success;
}
}

View File

@ -0,0 +1,30 @@
[profile.default]
solc_version = "0.8.19"
optimizer = true
optimizer_runs = 200
via_ir = false
src = "contracts"
# We put the tests into the forge-test directory (instead of test) so that
# truffle doesn't try to build them
test = "forge-test"
evm_version = "london"
out = 'build-forge'
libs = ['lib', 'node_modules']
remappings = [
'@openzeppelin/=node_modules/@openzeppelin/',
'@solidity-parser/=node_modules/@solidity-parser/',
'ds-test/=lib/forge-std/lib/ds-test/src/',
'forge-std/=lib/forge-std/src/',
]
[fmt]
line_length = 100
multiline_func_header = "params_first"
# wrap_comments = true
[profile.production]
via_ir = true
# See more config options https://github.com/foundry-rs/foundry/tree/master/config

50844
relayer/ethereum/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
{
"name": "@wormhole-foundation/contracts-ethereum-relayer",
"version": "0.0.1",
"description": "",
"main": "networks.js",
"devDependencies": {
"@chainsafe/truffle-plugin-abigen": "0.0.1",
"@openzeppelin/cli": "^2.8.2",
"@openzeppelin/contracts": "^4.3.1",
"@truffle/hdwallet-provider": "^1.7.0",
"chai": "^4.3.7",
"mocha": "^8.4.0",
"truffle": "5.8.4",
"truffle-flattener": "^1.6.0",
"truffle-plugin-verify": "^0.5.11",
"ts-node": "^10.9.1",
"typescript": "^4.9.5"
},
"scripts": {
"build:forge": "if [ \"$DEV\" = \"True\" ]; then forge build; else forge build --via-ir; fi",
"build": "npm run build:forge && typechain --target=ethers-v5 --out-dir=./ethers-contracts \"build-forge/!(test).sol/*.json\"&& npm run erase-types",
"erase-types": "ts-node ./ts-scripts/relayer/eraseTypes.ts ./contracts/interfaces/relayer/IDeliveryProviderTyped.sol ./contracts/interfaces/relayer/IWormholeRelayerTyped.sol",
"deploy-relayers-evm1": "ENV=kubernetes CONTAINER=evm1 bash ./ts-scripts/relayer/shell/deployInContainer.sh",
"deploy-relayers-evm2": "ENV=kubernetes CONTAINER=evm2 bash ./ts-scripts/relayer/shell/deployInContainer.sh",
"typecheck": "tsc --noEmit --skipLibCheck"
},
"author": "",
"license": "ISC",
"dependencies": {
"@certusone/wormhole-sdk": "^0.9.11",
"@typechain/ethers-v5": "^10.2.0",
"dotenv": "^10.0.0",
"elliptic": "^6.5.2",
"jsonfile": "^4.0.0",
"truffle-hdwallet-provider-klaytn": "^1.4.2",
"typechain": "^8.1.1"
}
}

View File

@ -19,7 +19,7 @@ WORKDIR /home/node/app
# Fix git ssh error
RUN git config --global url."https://".insteadOf ssh://
COPY --chown=node:node ./ethereum/ts-scripts/relayer/config/ ./ethereum/ts-scripts/relayer/config/
COPY --chown=node:node ./relayer/ethereum/ts-scripts/relayer/config/ ./ethereum/ts-scripts/relayer/config/
#Path matters so as to not break imports
WORKDIR /home/node/app/relayer/generic_relayer/relayer-engine-v2/

1
sdk/js/.gitignore vendored
View File

@ -25,6 +25,7 @@ yarn-error.log*
# ethereum contracts
/contracts
/src/ethers-contracts
/src/ethers-relayer-contracts
# tsproto output
/src/proto

View File

@ -2,6 +2,7 @@
## 0.10.14
Support for move of relayer ethereum contracts
Blast Sepolia support
## 0.10.13

View File

@ -11,7 +11,7 @@
"sideEffects": false,
"repository": "https://github.com/certusone/wormhole/tree/main/sdk/js",
"scripts": {
"build-contracts": "cd ../../ethereum && make forge_dependencies && npm run build && cd ../sdk/js && node scripts/copyContracts.js",
"build-contracts": "cd ../../ethereum && make forge_dependencies && npm run build && cd ../relayer/ethereum && make forge_dependencies && npm run build && cd ../../sdk/js && node scripts/copyContracts.js",
"build-abis": "typechain --target=ethers-v5 --out-dir=src/ethers-contracts/abi src/abi/Wormhole.abi.json",
"build-idl": "node scripts/compileAnchorIdls.js",
"build-deps": "npm run build-abis && npm run build-contracts && npm run build-idl",

View File

@ -1,3 +1,7 @@
const copydir = require("copy-dir");
console.log("Copying from ../../ethereum/ethers-contracts");
copydir.sync("../../ethereum/ethers-contracts", "src/ethers-contracts");
copydir.sync(
"../../relayer/ethereum/ethers-contracts",
"src/ethers-relayer-contracts"
);

View File

@ -1,20 +1,26 @@
const fs = require("fs");
["lib/esm", "lib/cjs"].forEach((buildPath) => {
fs.readdirSync("src/ethers-contracts").forEach((file) => {
if (file.endsWith(".d.ts")) {
fs.copyFileSync(
`src/ethers-contracts/${file}`,
`${buildPath}/ethers-contracts/${file}`
);
}
});
fs.readdirSync("src/ethers-contracts/abi").forEach((file) => {
if (file.endsWith(".d.ts")) {
fs.copyFileSync(
`src/ethers-contracts/abi/${file}`,
`${buildPath}/ethers-contracts/abi/${file}`
);
}
function copyTypes(srcDir) {
["lib/esm", "lib/cjs"].forEach((buildPath) => {
fs.readdirSync(srcDir).forEach((file) => {
if (file.endsWith(".d.ts")) {
fs.copyFileSync(
`src/ethers-contracts/${file}`,
`${buildPath}/ethers-contracts/${file}`
);
}
});
fs.readdirSync(srcDir).forEach((file) => {
if (file.endsWith(".d.ts")) {
fs.copyFileSync(
`src/ethers-contracts/abi/${file}`,
`${buildPath}/ethers-contracts/abi/${file}`
);
}
});
});
});
}
copyTypes("src/ethers-contracts");
copyTypes("src/ethers-relayer-contracts");

View File

@ -7,6 +7,7 @@ export * from "./vaa";
export * as cosmos from "./cosmos";
export * as ethers_contracts from "./ethers-contracts";
export * as ethers_relayer_contracts from "./ethers-relayer-contracts";
export * as solana from "./solana";
export * as terra from "./terra";
export * as rpc from "./rpc";

Some files were not shown because too many files have changed in this diff Show More