add wormhole receiver

This commit is contained in:
Evan Gray 2022-06-03 12:45:46 -04:00
parent b70b5b3f1c
commit 23b488770c
28 changed files with 32552 additions and 0 deletions

3
receiver/evm/.env.devnet Normal file
View File

@ -0,0 +1,3 @@
INIT_SIGNERS=["0xbeFA429d57cD18b7F8A4d91A2da9AB4AF05d0FBe"]
INIT_GOV_CHAIN_ID=0x1
INIT_GOV_CONTRACT=0x0000000000000000000000000000000000000000000000000000000000000004

View File

@ -0,0 +1,3 @@
INIT_SIGNERS=["0x58CC3AE5C097b213cE3c81979e1B9f9570746AA5"]
INIT_GOV_CHAIN_ID=0x1
INIT_GOV_CONTRACT=0x0000000000000000000000000000000000000000000000000000000000000004

View File

@ -0,0 +1,3 @@
INIT_SIGNERS=["0x13947Bd48b18E53fdAeEe77F3473391aC727C638"]
INIT_GOV_CHAIN_ID=0x1
INIT_GOV_CONTRACT=0x0000000000000000000000000000000000000000000000000000000000000004

1
receiver/evm/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
ganache.log

13
receiver/evm/LICENSE Normal file
View File

@ -0,0 +1,13 @@
Copyright 2020 Wormhole Project Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

29
receiver/evm/Makefile Normal file
View File

@ -0,0 +1,29 @@
SOURCE_FILES:=$(shell find contracts -name "*.sol")
.PHONY: dependencies test clean all
all: build
node_modules: package-lock.json
touch -m node_modules
npm ci
dependencies: node_modules
build: node_modules ${SOURCE_FILES}
mkdir -p build
touch -m build
npm run build
.env: .env.devnet
cp $< $@
test: build .env dependencies
@if pgrep ganache-cli; then echo "Error: ganache-cli already running. Stop it before running tests"; exit 1; fi
npx ganache-cli -e 10000 --deterministic --time="1970-01-01T00:00:00+00:00" > ganache.log &
sleep 5
npm test || (pkill ganache-cli && exit 1)
pkill ganache-cli || true
clean:
rm -rf ganache.log .env node_modules build

12
receiver/evm/README.md Normal file
View File

@ -0,0 +1,12 @@
# Wormhole Receiver
This contract can be used to receive Wormhole messages on chains that don't have a [core bridge contract](https://docs.wormholenetwork.com/wormhole/contracts#core-bridge) deployed.
## Deploy
```bash
npm ci
cp .env.mainnet .env
MNEMONIC="[YOUR_KEY_HERE]" npm run migrate -- --network [NETWORK_KEY_FROM_TRUFFLE_CONFIG]
MNEMONIC="[YOUR_KEY_HERE]" npm run submit-guardian-sets -- --network [NETWORK_KEY_FROM_TRUFFLE_CONFIG]
```

View File

@ -0,0 +1,40 @@
// contracts/Getters.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "./State.sol";
contract Getters is State {
function owner() public view returns (address) {
return _state.owner;
}
function getGuardianSet(uint32 index) public view returns (Structs.GuardianSet memory) {
return _state.guardianSets[index];
}
function getCurrentGuardianSetIndex() public view returns (uint32) {
return _state.guardianSetIndex;
}
function getGuardianSetExpiry() public view returns (uint32) {
return _state.guardianSetExpiry;
}
function governanceActionIsConsumed(bytes32 hash) public view returns (bool) {
return _state.consumedGovernanceActions[hash];
}
function isInitialized(address impl) public view returns (bool) {
return _state.initializedImplementations[impl];
}
function governanceChainId() public view returns (uint16){
return _state.provider.governanceChainId;
}
function governanceContract() public view returns (bytes32){
return _state.provider.governanceContract;
}
}

View File

@ -0,0 +1,95 @@
// contracts/Governance.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "./Structs.sol";
import "./GovernanceStructs.sol";
import "./Messages.sol";
import "./Setters.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
abstract contract Governance is GovernanceStructs, Messages, Setters, ERC1967Upgrade {
event ContractUpgraded(address indexed oldContract, address indexed newContract);
event OwnershipTransfered(address indexed oldOwner, address indexed newOwner);
// "Core" (left padded)
bytes32 constant module = 0x00000000000000000000000000000000000000000000000000000000436f7265;
function submitNewGuardianSet(bytes memory _vm) public {
Structs.VM memory vm = parseVM(_vm);
(bool isValid, string memory reason) = verifyGovernanceVM(vm);
require(isValid, reason);
GovernanceStructs.GuardianSetUpgrade memory upgrade = parseGuardianSetUpgrade(vm.payload);
require(upgrade.module == module, "invalid Module");
require(upgrade.newGuardianSet.keys.length > 0, "new guardian set is empty");
require(upgrade.newGuardianSetIndex == getCurrentGuardianSetIndex() + 1, "index must increase in steps of 1");
setGovernanceActionConsumed(vm.hash);
expireGuardianSet(getCurrentGuardianSetIndex());
storeGuardianSet(upgrade.newGuardianSet, upgrade.newGuardianSetIndex);
updateGuardianSetIndex(upgrade.newGuardianSetIndex);
}
function upgradeImplementation(address newImplementation) public onlyOwner {
address currentImplementation = _getImplementation();
_upgradeTo(newImplementation);
// Call initialize function of the new implementation
(bool success, bytes memory reason) = newImplementation.delegatecall(abi.encodeWithSignature("initialize()"));
require(success, string(reason));
emit ContractUpgraded(currentImplementation, newImplementation);
}
function verifyGovernanceVM(Structs.VM memory vm) internal view returns (bool, string memory){
// validate vm
(bool isValid, string memory reason) = verifyVM(vm);
if (!isValid){
return (false, reason);
}
// only current guardianset can sign governance packets
if (vm.guardianSetIndex != getCurrentGuardianSetIndex()) {
return (false, "not signed by current guardian set");
}
// verify source
if (uint16(vm.emitterChainId) != governanceChainId()) {
return (false, "wrong governance chain");
}
if (vm.emitterAddress != governanceContract()) {
return (false, "wrong governance contract");
}
// prevent re-entry
if (governanceActionIsConsumed(vm.hash)){
return (false, "governance action already consumed");
}
return (true, "");
}
function transferOwnership(address newOwner) public onlyOwner {
require(newOwner != address(0), "new owner cannot be the zero address");
address currentOwner = owner();
setOwner(newOwner);
emit OwnershipTransfered(currentOwner, newOwner);
}
modifier onlyOwner() {
require(owner() == msg.sender, "caller is not the owner");
_;
}
}

View File

@ -0,0 +1,59 @@
// contracts/GovernanceStructs.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "./libraries/external/BytesLib.sol";
import "./Structs.sol";
contract GovernanceStructs {
using BytesLib for bytes;
enum GovernanceAction {
UpgradeContract,
UpgradeGuardianset
}
struct GuardianSetUpgrade {
bytes32 module;
uint8 action;
uint16 chain;
Structs.GuardianSet newGuardianSet;
uint32 newGuardianSetIndex;
}
function parseGuardianSetUpgrade(bytes memory encodedUpgrade) public pure returns (GuardianSetUpgrade memory gsu) {
uint index = 0;
gsu.module = encodedUpgrade.toBytes32(index);
index += 32;
gsu.action = encodedUpgrade.toUint8(index);
index += 1;
require(gsu.action == 2, "invalid GuardianSetUpgrade");
gsu.chain = encodedUpgrade.toUint16(index);
index += 2;
gsu.newGuardianSetIndex = encodedUpgrade.toUint32(index);
index += 4;
uint8 guardianLength = encodedUpgrade.toUint8(index);
index += 1;
gsu.newGuardianSet = Structs.GuardianSet({
keys : new address[](guardianLength),
expirationTime : 0
});
for(uint i = 0; i < guardianLength; i++) {
gsu.newGuardianSet.keys[i] = encodedUpgrade.toAddress(index);
index += 20;
}
require(encodedUpgrade.length == index, "invalid GuardianSetUpgrade");
}
}

View File

@ -0,0 +1,29 @@
// contracts/Implementation.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
pragma experimental ABIEncoderV2;
import "./Governance.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
contract Implementation is Governance {
modifier initializer() {
address implementation = ERC1967Upgrade._getImplementation();
require(
!isInitialized(implementation),
"already initialized"
);
setInitialized(implementation);
_;
}
fallback() external payable {revert("unsupported");}
receive() external payable {revert("the Wormhole Receiver contract does not accept assets");}
}

View File

@ -0,0 +1,148 @@
// contracts/Messages.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
pragma experimental ABIEncoderV2;
import "./Getters.sol";
import "./Structs.sol";
import "./libraries/external/BytesLib.sol";
contract Messages is Getters {
using BytesLib for bytes;
/// @dev parseAndVerifyVM serves to parse an encodedVM and wholy validate it for consumption
function parseAndVerifyVM(bytes calldata encodedVM) public view returns (Structs.VM memory vm, bool valid, string memory reason) {
vm = parseVM(encodedVM);
(valid, reason) = verifyVM(vm);
}
/**
* @dev `verifyVM` serves to validate an arbitrary vm against a valid Guardian set
* - it aims to make sure the VM is for a known guardianSet
* - it aims to ensure the guardianSet is not expired
* - it aims to ensure the VM has reached quorum
* - it aims to verify the signatures provided against the guardianSet
*/
function verifyVM(Structs.VM memory vm) public view returns (bool valid, string memory reason) {
/// @dev Obtain the current guardianSet for the guardianSetIndex provided
Structs.GuardianSet memory guardianSet = getGuardianSet(vm.guardianSetIndex);
/**
* @dev Checks whether the guardianSet has zero keys
* WARNING: This keys check is critical to ensure the guardianSet has keys present AND to ensure
* that guardianSet key size doesn't fall to zero and negatively impact quorum assessment. If guardianSet
* key length is 0 and vm.signatures length is 0, this could compromise the integrity of both vm and
* signature verification.
*/
if(guardianSet.keys.length == 0){
return (false, "invalid guardian set");
}
/// @dev Checks if VM guardian set index matches the current index (unless the current set is expired).
if(vm.guardianSetIndex != getCurrentGuardianSetIndex() && guardianSet.expirationTime < block.timestamp){
return (false, "guardian set has expired");
}
/**
* @dev We're using a fixed point number transformation with 1 decimal to deal with rounding.
* WARNING: This quorum check is critical to assessing whether we have enough Guardian signatures to validate a VM
* if making any changes to this, obtain additional peer review. If guardianSet key length is 0 and
* vm.signatures length is 0, this could compromise the integrity of both vm and signature verification.
*/
if(((guardianSet.keys.length * 10 / 3) * 2) / 10 + 1 > vm.signatures.length){
return (false, "no quorum");
}
/// @dev Verify the proposed vm.signatures against the guardianSet
(bool signaturesValid, string memory invalidReason) = verifySignatures(vm.hash, vm.signatures, guardianSet);
if(!signaturesValid){
return (false, invalidReason);
}
/// If we are here, we've validated the VM is a valid multi-sig that matches the guardianSet.
return (true, "");
}
/**
* @dev verifySignatures serves to validate arbitrary sigatures against an arbitrary guardianSet
* - it intentionally does not solve for expectations within guardianSet (you should use verifyVM if you need these protections)
* - it intentioanlly does not solve for quorum (you should use verifyVM if you need these protections)
* - it intentionally returns true when signatures is an empty set (you should use verifyVM if you need these protections)
*/
function verifySignatures(bytes32 hash, Structs.Signature[] memory signatures, Structs.GuardianSet memory guardianSet) public pure returns (bool valid, string memory reason) {
uint8 lastIndex = 0;
for (uint i = 0; i < signatures.length; i++) {
Structs.Signature memory sig = signatures[i];
/// Ensure that provided signature indices are ascending only
require(i == 0 || sig.guardianIndex > lastIndex, "signature indices must be ascending");
lastIndex = sig.guardianIndex;
/// Check to see if the signer of the signature does not match a specific Guardian key at the provided index
if(ecrecover(hash, sig.v, sig.r, sig.s) != guardianSet.keys[sig.guardianIndex]){
return (false, "VM signature invalid");
}
}
/// If we are here, we've validated that the provided signatures are valid for the provided guardianSet
return (true, "");
}
/**
* @dev parseVM serves to parse an encodedVM into a vm struct
* - it intentionally performs no validation functions, it simply parses raw into a struct
*/
function parseVM(bytes memory encodedVM) public pure virtual returns (Structs.VM memory vm) {
uint index = 0;
vm.version = encodedVM.toUint8(index);
index += 1;
require(vm.version == 1, "VM version incompatible");
vm.guardianSetIndex = encodedVM.toUint32(index);
index += 4;
// Parse Signatures
uint256 signersLen = encodedVM.toUint8(index);
index += 1;
vm.signatures = new Structs.Signature[](signersLen);
for (uint i = 0; i < signersLen; i++) {
vm.signatures[i].guardianIndex = encodedVM.toUint8(index);
index += 1;
vm.signatures[i].r = encodedVM.toBytes32(index);
index += 32;
vm.signatures[i].s = encodedVM.toBytes32(index);
index += 32;
vm.signatures[i].v = encodedVM.toUint8(index) + 27;
index += 1;
}
// Hash the body
bytes memory body = encodedVM.slice(index, encodedVM.length - index);
vm.hash = keccak256(abi.encodePacked(keccak256(body)));
// Parse the body
vm.timestamp = encodedVM.toUint32(index);
index += 4;
vm.nonce = encodedVM.toUint32(index);
index += 4;
vm.emitterChainId = encodedVM.toUint16(index);
index += 2;
vm.emitterAddress = encodedVM.toBytes32(index);
index += 32;
vm.sequence = encodedVM.toUint64(index);
index += 8;
vm.consistencyLevel = encodedVM.toUint8(index);
index += 1;
vm.payload = encodedVM.slice(index, encodedVM.length - index);
}
}

View File

@ -0,0 +1,19 @@
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.22 <0.9.0;
contract Migrations {
address public owner = msg.sender;
uint public last_completed_migration;
modifier restricted() {
require(
msg.sender == owner,
"This function is restricted to the contract's owner"
);
_;
}
function setCompleted(uint completed) public restricted {
last_completed_migration = completed;
}
}

View File

@ -0,0 +1,41 @@
// contracts/Setters.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "./State.sol";
contract Setters is State {
function setOwner(address owner_) internal {
_state.owner = owner_;
}
function updateGuardianSetIndex(uint32 newIndex) internal {
_state.guardianSetIndex = newIndex;
}
function expireGuardianSet(uint32 index) internal {
_state.guardianSets[index].expirationTime = uint32(block.timestamp) + 86400;
}
function storeGuardianSet(Structs.GuardianSet memory set, uint32 index) internal {
_state.guardianSets[index] = set;
}
function setInitialized(address implementatiom) internal {
_state.initializedImplementations[implementatiom] = true;
}
function setGovernanceActionConsumed(bytes32 hash) internal {
_state.consumedGovernanceActions[hash] = true;
}
function setGovernanceChainId(uint16 chainId) internal {
_state.provider.governanceChainId = chainId;
}
function setGovernanceContract(bytes32 governanceContract) internal {
_state.provider.governanceContract = governanceContract;
}
}

View File

@ -0,0 +1,35 @@
// contracts/Implementation.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
pragma experimental ABIEncoderV2;
import "./Governance.sol";
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Upgrade.sol";
contract Setup is Setters, ERC1967Upgrade {
function setup(
address implementation,
address[] memory initialGuardians,
uint16 governanceChainId,
bytes32 governanceContract
) public {
require(initialGuardians.length > 0, "no guardians specified");
setOwner(msg.sender);
Structs.GuardianSet memory initialGuardianSet = Structs.GuardianSet({
keys : initialGuardians,
expirationTime : 0
});
storeGuardianSet(initialGuardianSet, 0);
// initial guardian set index is 0, which is the default value of the storage slot anyways
setGovernanceChainId(governanceChainId);
setGovernanceContract(governanceContract);
_upgradeTo(implementation);
}
}

View File

@ -0,0 +1,47 @@
// contracts/State.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "./Structs.sol";
contract Events {
event LogGuardianSetChanged(
uint32 oldGuardianIndex,
uint32 newGuardianIndex
);
event LogMessagePublished(
address emitter_address,
uint32 nonce,
bytes payload
);
}
contract Storage {
struct WormholeState {
Structs.Provider provider;
// contract deployer
address owner;
// Mapping of guardian_set_index => guardian set
mapping(uint32 => Structs.GuardianSet) guardianSets;
// Current active guardian set index
uint32 guardianSetIndex;
// Period for which a guardian set stays active after it has been replaced
uint32 guardianSetExpiry;
// Mapping of consumed governance actions
mapping(bytes32 => bool) consumedGovernanceActions;
// Mapping of initialized implementations
mapping(address => bool) initializedImplementations;
}
}
contract State {
Storage.WormholeState _state;
}

View File

@ -0,0 +1,39 @@
// contracts/Structs.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
interface Structs {
struct Provider {
uint16 governanceChainId;
bytes32 governanceContract;
}
struct GuardianSet {
address[] keys;
uint32 expirationTime;
}
struct Signature {
bytes32 r;
bytes32 s;
uint8 v;
uint8 guardianIndex;
}
struct VM {
uint8 version;
uint32 timestamp;
uint32 nonce;
uint16 emitterChainId;
bytes32 emitterAddress;
uint64 sequence;
uint8 consistencyLevel;
bytes payload;
uint32 guardianSetIndex;
Signature[] signatures;
bytes32 hash;
}
}

View File

@ -0,0 +1,13 @@
// contracts/Wormhole.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol";
contract WormholeReceiver is ERC1967Proxy {
constructor (address setup, bytes memory initData) ERC1967Proxy(
setup,
initData
) { }
}

View File

@ -0,0 +1,42 @@
// contracts/Messages.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "../Structs.sol";
interface IWormhole is Structs {
event LogMessagePublished(address indexed sender, uint64 sequence, uint32 nonce, bytes payload, uint8 consistencyLevel);
function publishMessage(
uint32 nonce,
bytes memory payload,
uint8 consistencyLevel
) external payable returns (uint64 sequence);
function parseAndVerifyVM(bytes calldata encodedVM) external view returns (Structs.VM memory vm, bool valid, string memory reason);
function verifyVM(Structs.VM memory vm) external view returns (bool valid, string memory reason);
function verifySignatures(bytes32 hash, Structs.Signature[] memory signatures, Structs.GuardianSet memory guardianSet) external pure returns (bool valid, string memory reason) ;
function parseVM(bytes memory encodedVM) external pure returns (Structs.VM memory vm);
function getGuardianSet(uint32 index) external view returns (Structs.GuardianSet memory) ;
function getCurrentGuardianSetIndex() external view returns (uint32) ;
function getGuardianSetExpiry() external view returns (uint32) ;
function governanceActionIsConsumed(bytes32 hash) external view returns (bool) ;
function isInitialized(address impl) external view returns (bool) ;
function chainId() external view returns (uint16) ;
function governanceChainId() external view returns (uint16);
function governanceContract() external view returns (bytes32);
function messageFee() external view returns (uint256) ;
}

View File

@ -0,0 +1,510 @@
// SPDX-License-Identifier: Unlicense
/*
* @title Solidity Bytes Arrays Utils
* @author Gonçalo <goncalo.sa@consensys.net>
*
* @dev Bytes tightly packed arrays utility library for ethereum contracts written in Solidity.
* The library lets you concatenate, slice and type cast bytes arrays both in memory and storage.
*/
pragma solidity >=0.8.0 <0.9.0;
library BytesLib {
function concat(
bytes memory _preBytes,
bytes memory _postBytes
)
internal
pure
returns (bytes memory)
{
bytes memory tempBytes;
assembly {
// Get a location of some free memory and store it in tempBytes as
// Solidity does for memory variables.
tempBytes := mload(0x40)
// Store the length of the first bytes array at the beginning of
// the memory for tempBytes.
let length := mload(_preBytes)
mstore(tempBytes, length)
// Maintain a memory counter for the current write location in the
// temp bytes array by adding the 32 bytes for the array length to
// the starting location.
let mc := add(tempBytes, 0x20)
// Stop copying when the memory counter reaches the length of the
// first bytes array.
let end := add(mc, length)
for {
// Initialize a copy counter to the start of the _preBytes data,
// 32 bytes into its memory.
let cc := add(_preBytes, 0x20)
} lt(mc, end) {
// Increase both counters by 32 bytes each iteration.
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
// Write the _preBytes data into the tempBytes memory 32 bytes
// at a time.
mstore(mc, mload(cc))
}
// Add the length of _postBytes to the current length of tempBytes
// and store it as the new length in the first 32 bytes of the
// tempBytes memory.
length := mload(_postBytes)
mstore(tempBytes, add(length, mload(tempBytes)))
// Move the memory counter back from a multiple of 0x20 to the
// actual end of the _preBytes data.
mc := end
// Stop copying when the memory counter reaches the new combined
// length of the arrays.
end := add(mc, length)
for {
let cc := add(_postBytes, 0x20)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
mstore(mc, mload(cc))
}
// Update the free-memory pointer by padding our last write location
// to 32 bytes: add 31 bytes to the end of tempBytes to move to the
// next 32 byte block, then round down to the nearest multiple of
// 32. If the sum of the length of the two arrays is zero then add
// one before rounding down to leave a blank 32 bytes (the length block with 0).
mstore(0x40, and(
add(add(end, iszero(add(length, mload(_preBytes)))), 31),
not(31) // Round down to the nearest 32 bytes.
))
}
return tempBytes;
}
function concatStorage(bytes storage _preBytes, bytes memory _postBytes) internal {
assembly {
// Read the first 32 bytes of _preBytes storage, which is the length
// of the array. (We don't need to use the offset into the slot
// because arrays use the entire slot.)
let fslot := sload(_preBytes.slot)
// Arrays of 31 bytes or less have an even value in their slot,
// while longer arrays have an odd value. The actual length is
// the slot divided by two for odd values, and the lowest order
// byte divided by two for even values.
// If the slot is even, bitwise and the slot with 255 and divide by
// two to get the length. If the slot is odd, bitwise and the slot
// with -1 and divide by two.
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let mlength := mload(_postBytes)
let newlength := add(slength, mlength)
// slength can contain both the length and contents of the array
// if length < 32 bytes so let's prepare for that
// v. http://solidity.readthedocs.io/en/latest/miscellaneous.html#layout-of-state-variables-in-storage
switch add(lt(slength, 32), lt(newlength, 32))
case 2 {
// Since the new array still fits in the slot, we just need to
// update the contents of the slot.
// uint256(bytes_storage) = uint256(bytes_storage) + uint256(bytes_memory) + new_length
sstore(
_preBytes.slot,
// all the modifications to the slot are inside this
// next block
add(
// we can just add to the slot contents because the
// bytes we want to change are the LSBs
fslot,
add(
mul(
div(
// load the bytes from memory
mload(add(_postBytes, 0x20)),
// zero all bytes to the right
exp(0x100, sub(32, mlength))
),
// and now shift left the number of bytes to
// leave space for the length in the slot
exp(0x100, sub(32, newlength))
),
// increase length by the double of the memory
// bytes length
mul(mlength, 2)
)
)
)
}
case 1 {
// The stored value fits in the slot, but the combined value
// will exceed it.
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
let sc := add(keccak256(0x0, 0x20), div(slength, 32))
// save new length
sstore(_preBytes.slot, add(mul(newlength, 2), 1))
// The contents of the _postBytes array start 32 bytes into
// the structure. Our first read should obtain the `submod`
// bytes that can fit into the unused space in the last word
// of the stored array. To get this, we read 32 bytes starting
// from `submod`, so the data we read overlaps with the array
// contents by `submod` bytes. Masking the lowest-order
// `submod` bytes allows us to add that value directly to the
// stored value.
let submod := sub(32, slength)
let mc := add(_postBytes, submod)
let end := add(_postBytes, mlength)
let mask := sub(exp(0x100, submod), 1)
sstore(
sc,
add(
and(
fslot,
0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00
),
and(mload(mc), mask)
)
)
for {
mc := add(mc, 0x20)
sc := add(sc, 1)
} lt(mc, end) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
sstore(sc, mload(mc))
}
mask := exp(0x100, sub(mc, end))
sstore(sc, mul(div(mload(mc), mask), mask))
}
default {
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
// Start copying to the last used word of the stored array.
let sc := add(keccak256(0x0, 0x20), div(slength, 32))
// save new length
sstore(_preBytes.slot, add(mul(newlength, 2), 1))
// Copy over the first `submod` bytes of the new data as in
// case 1 above.
let slengthmod := mod(slength, 32)
let mlengthmod := mod(mlength, 32)
let submod := sub(32, slengthmod)
let mc := add(_postBytes, submod)
let end := add(_postBytes, mlength)
let mask := sub(exp(0x100, submod), 1)
sstore(sc, add(sload(sc), and(mload(mc), mask)))
for {
sc := add(sc, 1)
mc := add(mc, 0x20)
} lt(mc, end) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
sstore(sc, mload(mc))
}
mask := exp(0x100, sub(mc, end))
sstore(sc, mul(div(mload(mc), mask), mask))
}
}
}
function slice(
bytes memory _bytes,
uint256 _start,
uint256 _length
)
internal
pure
returns (bytes memory)
{
require(_length + 31 >= _length, "slice_overflow");
require(_bytes.length >= _start + _length, "slice_outOfBounds");
bytes memory tempBytes;
assembly {
switch iszero(_length)
case 0 {
// Get a location of some free memory and store it in tempBytes as
// Solidity does for memory variables.
tempBytes := mload(0x40)
// The first word of the slice result is potentially a partial
// word read from the original array. To read it, we calculate
// the length of that partial word and start copying that many
// bytes into the array. The first word we copy will start with
// data we don't care about, but the last `lengthmod` bytes will
// land at the beginning of the contents of the new array. When
// we're done copying, we overwrite the full first word with
// the actual length of the slice.
let lengthmod := and(_length, 31)
// The multiplication in the next line is necessary
// because when slicing multiples of 32 bytes (lengthmod == 0)
// the following copy loop was copying the origin's length
// and then ending prematurely not copying everything it should.
let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod)))
let end := add(mc, _length)
for {
// The multiplication in the next line has the same exact purpose
// as the one above.
let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start)
} lt(mc, end) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
mstore(mc, mload(cc))
}
mstore(tempBytes, _length)
//update free-memory pointer
//allocating the array padded to 32 bytes like the compiler does now
mstore(0x40, and(add(mc, 31), not(31)))
}
//if we want a zero-length slice let's just return a zero-length array
default {
tempBytes := mload(0x40)
//zero out the 32 bytes slice we are about to return
//we need to do it because Solidity does not garbage collect
mstore(tempBytes, 0)
mstore(0x40, add(tempBytes, 0x20))
}
}
return tempBytes;
}
function toAddress(bytes memory _bytes, uint256 _start) internal pure returns (address) {
require(_bytes.length >= _start + 20, "toAddress_outOfBounds");
address tempAddress;
assembly {
tempAddress := div(mload(add(add(_bytes, 0x20), _start)), 0x1000000000000000000000000)
}
return tempAddress;
}
function toUint8(bytes memory _bytes, uint256 _start) internal pure returns (uint8) {
require(_bytes.length >= _start + 1 , "toUint8_outOfBounds");
uint8 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x1), _start))
}
return tempUint;
}
function toUint16(bytes memory _bytes, uint256 _start) internal pure returns (uint16) {
require(_bytes.length >= _start + 2, "toUint16_outOfBounds");
uint16 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x2), _start))
}
return tempUint;
}
function toUint32(bytes memory _bytes, uint256 _start) internal pure returns (uint32) {
require(_bytes.length >= _start + 4, "toUint32_outOfBounds");
uint32 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x4), _start))
}
return tempUint;
}
function toUint64(bytes memory _bytes, uint256 _start) internal pure returns (uint64) {
require(_bytes.length >= _start + 8, "toUint64_outOfBounds");
uint64 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x8), _start))
}
return tempUint;
}
function toUint96(bytes memory _bytes, uint256 _start) internal pure returns (uint96) {
require(_bytes.length >= _start + 12, "toUint96_outOfBounds");
uint96 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0xc), _start))
}
return tempUint;
}
function toUint128(bytes memory _bytes, uint256 _start) internal pure returns (uint128) {
require(_bytes.length >= _start + 16, "toUint128_outOfBounds");
uint128 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x10), _start))
}
return tempUint;
}
function toUint256(bytes memory _bytes, uint256 _start) internal pure returns (uint256) {
require(_bytes.length >= _start + 32, "toUint256_outOfBounds");
uint256 tempUint;
assembly {
tempUint := mload(add(add(_bytes, 0x20), _start))
}
return tempUint;
}
function toBytes32(bytes memory _bytes, uint256 _start) internal pure returns (bytes32) {
require(_bytes.length >= _start + 32, "toBytes32_outOfBounds");
bytes32 tempBytes32;
assembly {
tempBytes32 := mload(add(add(_bytes, 0x20), _start))
}
return tempBytes32;
}
function equal(bytes memory _preBytes, bytes memory _postBytes) internal pure returns (bool) {
bool success = true;
assembly {
let length := mload(_preBytes)
// if lengths don't match the arrays are not equal
switch eq(length, mload(_postBytes))
case 1 {
// cb is a circuit breaker in the for loop since there's
// no said feature for inline assembly loops
// cb = 1 - don't breaker
// cb = 0 - break
let cb := 1
let mc := add(_preBytes, 0x20)
let end := add(mc, length)
for {
let cc := add(_postBytes, 0x20)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
} eq(add(lt(mc, end), cb), 2) {
mc := add(mc, 0x20)
cc := add(cc, 0x20)
} {
// if any of these checks fails then arrays are not equal
if iszero(eq(mload(mc), mload(cc))) {
// unsuccess:
success := 0
cb := 0
}
}
}
default {
// unsuccess:
success := 0
}
}
return success;
}
function equalStorage(
bytes storage _preBytes,
bytes memory _postBytes
)
internal
view
returns (bool)
{
bool success = true;
assembly {
// we know _preBytes_offset is 0
let fslot := sload(_preBytes.slot)
// Decode the length of the stored array like in concatStorage().
let slength := div(and(fslot, sub(mul(0x100, iszero(and(fslot, 1))), 1)), 2)
let mlength := mload(_postBytes)
// if lengths don't match the arrays are not equal
switch eq(slength, mlength)
case 1 {
// slength can contain both the length and contents of the array
// if length < 32 bytes so let's prepare for that
// v. http://solidity.readthedocs.io/en/latest/miscellaneous.html#layout-of-state-variables-in-storage
if iszero(iszero(slength)) {
switch lt(slength, 32)
case 1 {
// blank the last byte which is the length
fslot := mul(div(fslot, 0x100), 0x100)
if iszero(eq(fslot, mload(add(_postBytes, 0x20)))) {
// unsuccess:
success := 0
}
}
default {
// cb is a circuit breaker in the for loop since there's
// no said feature for inline assembly loops
// cb = 1 - don't breaker
// cb = 0 - break
let cb := 1
// get the keccak hash to get the contents of the array
mstore(0x0, _preBytes.slot)
let sc := keccak256(0x0, 0x20)
let mc := add(_postBytes, 0x20)
let end := add(mc, mlength)
// the next line is the loop condition:
// while(uint256(mc < end) + cb == 2)
for {} eq(add(lt(mc, end), cb), 2) {
sc := add(sc, 1)
mc := add(mc, 0x20)
} {
if iszero(eq(sload(sc), mload(mc))) {
// unsuccess:
success := 0
cb := 0
}
}
}
}
}
default {
// unsuccess:
success := 0
}
}
return success;
}
}

View File

@ -0,0 +1,16 @@
// contracts/Implementation.sol
// SPDX-License-Identifier: Apache 2
pragma solidity ^0.8.0;
import "../Implementation.sol";
contract MockImplementation is Implementation {
function initialize() initializer public {
// this function needs to be exposed for an upgrade to pass
}
function testNewImplementationActive() external pure returns (bool) {
return true;
}
}

View File

@ -0,0 +1,6 @@
var Migrations = artifacts.require("Migrations");
module.exports = function(deployer) {
// Deploy the Migrations contract as our only task
deployer.deploy(Migrations);
};

View File

@ -0,0 +1,32 @@
require("dotenv").config({ path: "../.env" });
const Setup = artifacts.require("Setup");
const Implementation = artifacts.require("Implementation");
const WormholeReceiver = artifacts.require("WormholeReceiver");
// CONFIG
const initialSigners = JSON.parse(process.env.INIT_SIGNERS);
const governanceChainId = process.env.INIT_GOV_CHAIN_ID;
const governanceContract = process.env.INIT_GOV_CONTRACT; // bytes32
module.exports = async function (deployer) {
// deploy setup
await deployer.deploy(Setup);
// deploy implementation
await deployer.deploy(Implementation);
// encode initialisation data
const setup = new web3.eth.Contract(Setup.abi, Setup.address);
const initData = setup.methods
.setup(
Implementation.address,
initialSigners,
governanceChainId,
governanceContract
)
.encodeABI();
// deploy proxy
await deployer.deploy(WormholeReceiver, Setup.address, initData);
};

30230
receiver/evm/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

25
receiver/evm/package.json Normal file
View File

@ -0,0 +1,25 @@
{
"name": "wormhole-receiver",
"version": "0.0.1",
"description": "",
"author": "certusone",
"license": "Apache-2.0",
"scripts": {
"build": "truffle compile",
"migrate": "truffle migrate",
"submit-guardian-sets": "truffle exec scripts/submitGuardianSetUpgrades.js",
"test": "truffle test --network test"
},
"devDependencies": {
"@openzeppelin/contracts": "^4.6.0",
"@truffle/hdwallet-provider": "^2.0.8",
"chai": "^4.3.6",
"dotenv": "^16.0.0",
"ganache-cli": "^6.12.2",
"mocha": "^10.0.0",
"solc": "^0.8.4",
"truffle": "^5.3.14",
"truffle-assertions": "^0.9.2",
"truffle-plugin-verify": "^0.5.25"
}
}

View File

@ -0,0 +1,46 @@
// run this script with truffle exec
const jsonfile = require("jsonfile");
const WormholeReceiver = artifacts.require("WormholeReceiver");
const WormholeReceiverImplementationFullABI = jsonfile.readFileSync(
"../build/contracts/Implementation.json"
).abi;
const GUARDIAN_SET_UPGRADE_1_VAA =
"010000000001007ac31b282c2aeeeb37f3385ee0de5f8e421d30b9e5ae8ba3d4375c1c77a86e77159bb697d9c456d6f8c02d22a94b1279b65b0d6a9957e7d3857423845ac758e300610ac1d2000000030001000000000000000000000000000000000000000000000000000000000000000400000000000005390000000000000000000000000000000000000000000000000000000000436f7265020000000000011358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cdeb5f7389fa26941519f0863349c223b73a6ddee774a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d";
const GUARDIAN_SET_UPGRADE_2_VAA =
"01000000010d0012e6b39c6da90c5dfd3c228edbb78c7a4c97c488ff8a346d161a91db067e51d638c17216f368aa9bdf4836b8645a98018ca67d2fec87d769cabfdf2406bf790a0002ef42b288091a670ef3556596f4f47323717882881eaf38e03345078d07a156f312b785b64dae6e9a87e3d32872f59cb1931f728cecf511762981baf48303668f0103cef2616b84c4e511ff03329e0853f1bd7ee9ac5ba71d70a4d76108bddf94f69c2a8a84e4ee94065e8003c334e899184943634e12043d0dda78d93996da073d190104e76d166b9dac98f602107cc4b44ac82868faf00b63df7d24f177aa391e050902413b71046434e67c770b19aecdf7fce1d1435ea0be7262e3e4c18f50ddc8175c0105d9450e8216d741e0206a50f93b750a47e0a258b80eb8fed1314cc300b3d905092de25cd36d366097b7103ae2d184121329ba3aa2d7c6cc53273f11af14798110010687477c8deec89d36a23e7948feb074df95362fc8dcbd8ae910ac556a1dee1e755c56b9db5d710c940938ed79bc1895a3646523a58bc55f475a23435a373ecfdd0107fb06734864f79def4e192497362513171530daea81f07fbb9f698afe7e66c6d44db21323144f2657d4a5386a954bb94eef9f64148c33aef6e477eafa2c5c984c01088769e82216310d1827d9bd48645ec23e90de4ef8a8de99e2d351d1df318608566248d80cdc83bdcac382b3c30c670352be87f9069aab5037d0b747208eae9c650109e9796497ff9106d0d1c62e184d83716282870cef61a1ee13d6fc485b521adcce255c96f7d1bca8d8e7e7d454b65783a830bddc9d94092091a268d311ecd84c26010c468c9fb6d41026841ff9f8d7368fa309d4dbea3ea4bbd2feccf94a92cc8a20a226338a8e2126cd16f70eaf15b4fc9be2c3fa19def14e071956a605e9d1ac4162010e23fcb6bd445b7c25afb722250c1acbc061ed964ba9de1326609ae012acdfb96942b2a102a2de99ab96327859a34a2b49a767dbdb62e0a1fb26af60fe44fd496a00106bb0bac77ac68b347645f2fb1ad789ea9bd76fb9b2324f25ae06f97e65246f142df717f662e73948317182c62ce87d79c73def0dba12e5242dfc038382812cfe00126da03c5e56cb15aeeceadc1e17a45753ab4dc0ec7bf6a75ca03143ed4a294f6f61bc3f478a457833e43084ecd7c985bf2f55a55f168aac0e030fc49e845e497101626e9d9a5d9e343f00010000000000000000000000000000000000000000000000000000000000000004c1759167c43f501c2000000000000000000000000000000000000000000000000000000000436f7265020000000000021358cc3ae5c097b213ce3c81979e1b9f9570746aa5ff6cb952589bde862c25ef4392132fb9d4a42157114de8460193bdf3a2fcf81f86a09765f4762fd1107a0086b32d7a0977926a205131d8731d39cbeb8c82b2fd82faed2711d59af0f2499d16e726f6b211b39756c042441be6d8650b69b54ebe715e234354ce5b4d348fb74b958e8966e2ec3dbd4958a7cd66b9590e1c41e0b226937bf9217d1d67fd4e91f574a3bf913953d695260d88bc1aa25a4eee363ef0000ac0076727b35fbea2dac28fee5ccb0fea768eaf45ced136b9d9e24903464ae889f5c8a723fc14f93124b7c738843cbb89e864c862c38cddcccf95d2cc37a4dc036a8d232b48f62cdd4731412f4890da798f6896a3331f64b48c12d1d57fd9cbe7081171aa1be1d36cafe3867910f99c09e347899c19c38192b6e7387ccd768277c17dab1b7a5027c0b3cf178e21ad2e77ae06711549cfbb1f9c7a9d8096e85e1487f35515d02a92753504a8d75471b9f49edb6fbebc898f403e4773e95feb15e80c9a99c8348d";
// const TEST_VAA =
// "01000000020d005f7c6d5d57806e39e2b72f1b35e105b560dcbaa53ca159713897f666bbcca9566a3153bec04131423d31b3c612b0036711a8f3e092d382ee33666310ce9c13f00001f2bb445b90ce41374692d79037ae2fc76d45de890328404ccde3137a244774ca23cc0f74a3b4e89739cdc78a21e7605ec7f2e082e849d74ea284729916e430f40102fac6f17962e6225becdc69d4f3dbef29f7eda52cf189c3cbdec4d1fad98ba63e05aa8d446bd348fbf3dfeeb1753f857421f4d9b47f10a5eccb8927a289fa2e200103a5f7768647a609d20aaf90e09370f7261e2055b6eaded0941d8222a01d2618c11ef5912d8c00f571dd63157579a8ab39584186d5c6995d70ca255ee97d3f9b390104d529bd9ae735d480822cc094cbc74fe66010d233d81bf84278f0b439bc98df956361dd85b8a8ccb2f55bf94606ffb1dfc2260499c25c1027f51a5f7e7d4240ba010584ba8ebace2a0f39e4ca01c0d3f5b8686d18652cc0cd0f6516ce20ded88c796e002231f1198b7501839eb7fe442db09745d34d58c8a8f107a34dc50e19312eea0106744ce85d12622933bb7ffececee1d7eb27a1460f8a2062c2b39fe1524baebe9d2c543cdb9a762ef233fb3fe874f810ae0457ed1be3b087096d377feab781c44e010c972bab8988fb8df3864f0946771ad80affe3d46a9fc8a1ac5377cb14137fb9ec4e6f61e0deffe103cb090dde734edd72885c84023b2ed10d81a1edddfb13d9f7000db6bfe9f7a0a0c9088b9fc5ead7520af1e22dc58034e46d6a90e75a3dc4f9eb4026940bc9b0ce421cf1b3ea61f5e1863b7075e0c0baeeb9bc5793173e9777f6ac000e452480aa2500b30bd3dbc87f3f9f78b6b0c221ec0343db3bbc22833798ff1d8a480dbc9ae10960623c29373d0ac48f42ec33de03c935019f5fc73bc02b95b7b2010f252ddc2ffaecf009f77ce9e57844211723f13b2300c5791114a319943b3ed6c23f54f121061347973a0b23a8a40e8351a7ce848bd0d24581a1d763032e70062e001090ee9b3e84ea1eac58c043a683aa3c1e8b47a94bda3c1db4109e8d92e3f0f2f50562ef7cca4a90008970ec1e0975c9cefc5bd506d823c0d5f2bad70fb582bd9f011110515f473681be0b8457a9c930736520c62687c393f9f12ddc5daa858951ffee5b569a863561d85e5d76f3b0d0c7febdba8272563383ef2fb48cd23c2a856287006279c15f708b000000050000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde000000000000aff20f010000000000000000000000000000000000000000000000000000000a6830e3a20000000000000000000000000d500b1d8e8ef31e21c99d1db9a6444d3adf1270000500000000000000000000000031adfc3e96825ecb4e6a6bc09349b4a1a9080c1300060000000000000000000000000000000000000000000000000000000000000000";
module.exports = async function (callback) {
try {
const accounts = await web3.eth.getAccounts();
const initialized = new web3.eth.Contract(
WormholeReceiverImplementationFullABI,
WormholeReceiver.address
);
// Upgrade set 0 to set 1
await initialized.methods
.submitNewGuardianSet("0x" + GUARDIAN_SET_UPGRADE_1_VAA)
.send({
value: 0,
from: accounts[0],
gasLimit: 2000000,
});
// Upgrade set 1 to set 2
await initialized.methods
.submitNewGuardianSet("0x" + GUARDIAN_SET_UPGRADE_2_VAA)
.send({
value: 0,
from: accounts[0],
gasLimit: 2000000,
});
// console.log(
// await initialized.methods.parseAndVerifyVM("0x" + TEST_VAA).call()
// );
callback();
} catch (e) {
callback(e);
}
};

View File

@ -0,0 +1,924 @@
const jsonfile = require("jsonfile");
const elliptic = require("elliptic");
const path = require("path");
const WormholeReceiver = artifacts.require("WormholeReceiver");
const MockImplementation = artifacts.require("MockImplementation");
const Implementation = artifacts.require("Implementation");
const testSigner1PK =
"cfb12303a19cde580bb4dd771639b0d26bc68353645571a8cff516ab2ee113a0";
const testSigner2PK =
"892330666a850761e7370376430bb8c2aa1494072d3bfeaed0c4fa3d5a9135fe";
const testSigner3PK =
"87b45997ea577b93073568f06fc4838cffc1d01f90fc4d57f936957f3c4d99fb";
const testBadSigner1PK =
"87b45997ea577b93073568f06fc4838cffc1d01f90fc4d57f936957f3c4d99fc";
const core = "0x" + Buffer.from("Core").toString("hex").padStart(64, 0);
const actionGuardianSetUpgrade = "02";
const ImplementationFullABI = jsonfile.readFileSync(
"build/contracts/Implementation.json"
).abi;
// Taken from https://medium.com/fluidity/standing-the-time-of-test-b906fcc374a9
advanceTimeAndBlock = async (time) => {
await advanceTime(time);
await advanceBlock();
return Promise.resolve(web3.eth.getBlock("latest"));
};
advanceTime = (time) => {
return new Promise((resolve, reject) => {
web3.currentProvider.send(
{
jsonrpc: "2.0",
method: "evm_increaseTime",
params: [time],
id: new Date().getTime(),
},
(err, result) => {
if (err) {
return reject(err);
}
return resolve(result);
}
);
});
};
advanceBlock = () => {
return new Promise((resolve, reject) => {
web3.currentProvider.send(
{
jsonrpc: "2.0",
method: "evm_mine",
id: new Date().getTime(),
},
(err, result) => {
if (err) {
return reject(err);
}
const newBlockHash = web3.eth.getBlock("latest").hash;
return resolve(newBlockHash);
}
);
});
};
contract("WormholeReceiver", function () {
const testSigner1 = web3.eth.accounts.privateKeyToAccount(testSigner1PK);
const testSigner2 = web3.eth.accounts.privateKeyToAccount(testSigner2PK);
const testSigner3 = web3.eth.accounts.privateKeyToAccount(testSigner3PK);
const testChainId = "2";
const testGovernanceChainId = "1";
const testGovernanceContract =
"0x0000000000000000000000000000000000000000000000000000000000000004";
it("should be initialized with the correct signers and values", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const index = await initialized.methods.getCurrentGuardianSetIndex().call();
const set = await initialized.methods.getGuardianSet(index).call();
// check set
assert.lengthOf(set[0], 1);
assert.equal(set[0][0], testSigner1.address);
// check expiration
assert.equal(set.expirationTime, "0");
// governance
const governanceChainId = await initialized.methods
.governanceChainId()
.call();
assert.equal(governanceChainId, testGovernanceChainId);
const governanceContract = await initialized.methods
.governanceContract()
.call();
assert.equal(governanceContract, testGovernanceContract);
});
it("parses VMs correctly", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
1337,
data,
[testSigner1PK],
0,
2
);
let result;
try {
result = await initialized.methods.parseAndVerifyVM("0x" + vm).call();
} catch (err) {
console.log(err);
assert.fail("parseAndVerifyVM failed");
}
assert.equal(result.vm.version, 1);
assert.equal(result.vm.timestamp, timestamp);
assert.equal(result.vm.nonce, nonce);
assert.equal(result.vm.emitterChainId, emitterChainId);
assert.equal(result.vm.emitterAddress, emitterAddress);
assert.equal(result.vm.payload, data);
assert.equal(result.vm.guardianSetIndex, 0);
assert.equal(result.vm.sequence, 1337);
assert.equal(result.vm.consistencyLevel, 2);
assert.equal(result.valid, true);
assert.equal(result.reason, "");
});
it("should fail quorum on VMs with no signers", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
1337,
data,
[], // no valid signers present
0,
2
);
let result = await initialized.methods.parseAndVerifyVM("0x" + vm).call();
assert.equal(result[1], false);
assert.equal(result[2], "no quorum");
});
it("should fail to verify on VMs with bad signer", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
1337,
data,
[
testBadSigner1PK, // not a valid signer
],
0,
2
);
let result = await initialized.methods.parseAndVerifyVM("0x" + vm).call();
assert.equal(result[1], false);
assert.equal(result[2], "VM signature invalid");
});
it("should error on VMs with invalid guardian set index", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
1337,
data,
[testSigner1PK],
200,
2
);
let result = await initialized.methods.parseAndVerifyVM("0x" + vm).call();
assert.equal(result[1], false);
assert.equal(result[2], "invalid guardian set");
});
it("should revert on VMs with duplicate non-monotonic signature indexes", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVMFixedIndex(
timestamp,
nonce,
emitterChainId,
emitterAddress,
1337,
data,
[testSigner1PK, testSigner1PK, testSigner1PK],
0,
2
);
try {
await initialized.methods.parseAndVerifyVM("0x" + vm).call();
assert.fail("accepted signature indexes being the same in a VM");
} catch (e) {
assert.equal(
e.data[Object.keys(e.data)[0]].reason,
"signature indices must be ascending"
);
}
});
it("should accept a new guardian set", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = testGovernanceChainId;
const emitterAddress = testGovernanceContract;
let oldIndex = Number(
await initialized.methods.getCurrentGuardianSetIndex().call()
);
data = [
// Core
core,
// Action 2 (Guardian Set Upgrade)
actionGuardianSetUpgrade,
web3.eth.abi
.encodeParameter("uint16", testChainId)
.substring(2 + (64 - 4)),
web3.eth.abi
.encodeParameter("uint32", oldIndex + 1)
.substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint8", 3).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("address", testSigner1.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner2.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner3.address)
.substring(2 + (64 - 40)),
].join("");
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
0,
data,
[testSigner1PK],
0,
2
);
let set = await initialized.methods.submitNewGuardianSet("0x" + vm).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
let index = await initialized.methods.getCurrentGuardianSetIndex().call();
assert.equal(oldIndex + 1, index);
assert.equal(index, 1);
let guardians = await initialized.methods.getGuardianSet(index).call();
assert.equal(guardians.expirationTime, 0);
assert.lengthOf(guardians[0], 3);
assert.equal(guardians[0][0], testSigner1.address);
assert.equal(guardians[0][1], testSigner2.address);
assert.equal(guardians[0][2], testSigner3.address);
let oldGuardians = await initialized.methods
.getGuardianSet(oldIndex)
.call();
const time = (await web3.eth.getBlock("latest")).timestamp;
// old guardian set expiry is set
assert.ok(
oldGuardians.expirationTime > Number(time) + 86000 &&
oldGuardians.expirationTime < Number(time) + 88000
);
});
it("should accept smart contract upgrades", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
const mock = await MockImplementation.new();
let before = await web3.eth.getStorageAt(
WormholeReceiver.address,
"0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc"
);
assert.equal(before.toLowerCase(), Implementation.address.toLowerCase());
// attempt to upgrade the implementation from non-owner account
let failed = false;
try {
await initialized.methods.upgradeImplementation(mock.address).send({
value: 0,
from: accounts[1],
gasLimit: 1000000,
});
} catch (e) {
assert.equal(
e.message,
"Returned error: VM Exception while processing transaction: revert caller is not the owner"
);
failed = true;
}
assert.ok(failed);
let set = await initialized.methods
.upgradeImplementation(mock.address)
.send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
let after = await web3.eth.getStorageAt(
WormholeReceiver.address,
"0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc"
);
assert.equal(after.toLowerCase(), mock.address.toLowerCase());
const mockImpl = new web3.eth.Contract(
MockImplementation.abi,
WormholeReceiver.address
);
let isUpgraded = await mockImpl.methods
.testNewImplementationActive()
.call();
assert.ok(isUpgraded);
});
it("should allow the owner to transfer ownership", async function () {
const accounts = await web3.eth.getAccounts();
const currentOwner = accounts[0];
const newOwner = accounts[1];
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
// transfer ownership
await initialized.methods.transferOwnership(newOwner).send({
value: "0",
from: currentOwner,
gasLimit: 1000000,
});
// check getters after the action
let contractOwner = await initialized.methods.owner().call();
assert.equal(contractOwner, newOwner);
// make sure only the owner can transfer ownership
let failed = false;
try {
await initialized.methods.transferOwnership(newOwner).send({
value: "0",
from: currentOwner, // no longer the current owner
gasLimit: 1000000,
});
} catch (e) {
assert.equal(
e.message,
"Returned error: VM Exception while processing transaction: revert caller is not the owner"
);
failed = true;
}
assert.ok(failed);
// revert ownership back to currentOwner
await initialized.methods.transferOwnership(currentOwner).send({
value: "0",
from: newOwner,
gasLimit: 1000000,
});
// check getters before the action
contractOwner = await initialized.methods.owner().call();
assert.equal(contractOwner, currentOwner);
});
it("should revert governance packets from old guardian set", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
let oldIndex = Number(
await initialized.methods.getCurrentGuardianSetIndex().call()
);
data = [
// Core
core,
// Action 2 (Guardian Set Upgrade)
actionGuardianSetUpgrade,
web3.eth.abi
.encodeParameter("uint16", testChainId)
.substring(2 + (64 - 4)),
web3.eth.abi
.encodeParameter("uint32", oldIndex + 1)
.substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint8", 3).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("address", testSigner1.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner2.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner3.address)
.substring(2 + (64 - 40)),
].join("");
const vm = await signAndEncodeVM(
0,
0,
testGovernanceChainId,
testGovernanceContract,
0,
data,
[testSigner1PK],
0,
2
);
let failed = false;
try {
await initialized.methods.submitNewGuardianSet("0x" + vm).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
assert.fail("governance packet of old guardian set accepted");
} catch (e) {
assert.equal(
e.data[Object.keys(e.data)[0]].reason,
"not signed by current guardian set"
);
}
});
it("should time out old gardians", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const timestamp = 1000;
const nonce = 1001;
const emitterChainId = 11;
const emitterAddress =
"0x0000000000000000000000000000000000000000000000000000000000000eee";
const data = "0xaaaaaa";
const vm = await signAndEncodeVM(
timestamp,
nonce,
emitterChainId,
emitterAddress,
0,
data,
[testSigner1PK],
0,
2
);
// this should pass
const current = await initialized.methods
.parseAndVerifyVM("0x" + vm)
.call();
assert.equal(current.valid, true);
await advanceTimeAndBlock(100000);
const expired = await initialized.methods
.parseAndVerifyVM("0x" + vm)
.call();
assert.equal(expired.valid, false);
});
it("should revert governance packets from wrong governance chain", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
let oldIndex = Number(
await initialized.methods.getCurrentGuardianSetIndex().call()
);
data = [
// Core
core,
// Action 2 (Guardian Set Upgrade)
actionGuardianSetUpgrade,
web3.eth.abi
.encodeParameter("uint16", testChainId)
.substring(2 + (64 - 4)),
web3.eth.abi
.encodeParameter("uint32", oldIndex + 1)
.substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint8", 3).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("address", testSigner1.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner2.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner3.address)
.substring(2 + (64 - 40)),
].join("");
const vm = await signAndEncodeVM(
0,
0,
999,
testGovernanceContract,
0,
data,
[testSigner1PK, testSigner2PK, testSigner3PK],
1,
2
);
try {
await initialized.methods.submitNewGuardianSet("0x" + vm).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
assert.fail("governance packet from wrong governance chain accepted");
} catch (e) {
assert.equal(
e.data[Object.keys(e.data)[0]].reason,
"wrong governance chain"
);
}
});
it("should revert governance packets from wrong governance contract", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
let oldIndex = Number(
await initialized.methods.getCurrentGuardianSetIndex().call()
);
data = [
// Core
core,
// Action 2 (Guardian Set Upgrade)
actionGuardianSetUpgrade,
web3.eth.abi
.encodeParameter("uint16", testChainId)
.substring(2 + (64 - 4)),
web3.eth.abi
.encodeParameter("uint32", oldIndex + 1)
.substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint8", 3).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("address", testSigner1.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner2.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner3.address)
.substring(2 + (64 - 40)),
].join("");
const vm = await signAndEncodeVM(
0,
0,
testGovernanceChainId,
core,
0,
data,
[testSigner1PK, testSigner2PK, testSigner3PK],
1,
2
);
try {
await initialized.methods.submitNewGuardianSet("0x" + vm).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
assert.fail("governance packet from wrong governance contract accepted");
} catch (e) {
assert.equal(
e.data[Object.keys(e.data)[0]].reason,
"wrong governance contract"
);
}
});
it("should revert on governance packets that already have been applied", async function () {
const initialized = new web3.eth.Contract(
ImplementationFullABI,
WormholeReceiver.address
);
const accounts = await web3.eth.getAccounts();
let oldIndex = Number(
await initialized.methods.getCurrentGuardianSetIndex().call()
);
data = [
// Core
core,
// Action 2 (Guardian Set Upgrade)
actionGuardianSetUpgrade,
web3.eth.abi
.encodeParameter("uint16", testChainId)
.substring(2 + (64 - 4)),
web3.eth.abi
.encodeParameter("uint32", oldIndex + 1)
.substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint8", 3).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("address", testSigner1.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner2.address)
.substring(2 + (64 - 40)),
web3.eth.abi
.encodeParameter("address", testSigner3.address)
.substring(2 + (64 - 40)),
].join("");
const vm = await signAndEncodeVM(
0,
0,
testGovernanceChainId,
testGovernanceContract,
0,
data,
[testSigner1PK, testSigner2PK, testSigner3PK],
1,
2
);
await initialized.methods.submitNewGuardianSet("0x" + vm).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
// create another vaa because we had to update the guardian set
const vm2 = await signAndEncodeVM(
0,
0,
testGovernanceChainId,
testGovernanceContract,
0,
data,
[testSigner1PK, testSigner2PK, testSigner3PK],
2,
2
);
try {
await initialized.methods.submitNewGuardianSet("0x" + vm2).send({
value: 0,
from: accounts[0],
gasLimit: 1000000,
});
assert.fail("governance packet accepted twice");
} catch (e) {
assert.equal(
e.data[Object.keys(e.data)[0]].reason,
"governance action already consumed"
);
}
});
});
const signAndEncodeVM = async function (
timestamp,
nonce,
emitterChainId,
emitterAddress,
sequence,
data,
signers,
guardianSetIndex,
consistencyLevel
) {
const body = [
web3.eth.abi.encodeParameter("uint32", timestamp).substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint32", nonce).substring(2 + (64 - 8)),
web3.eth.abi
.encodeParameter("uint16", emitterChainId)
.substring(2 + (64 - 4)),
web3.eth.abi.encodeParameter("bytes32", emitterAddress).substring(2),
web3.eth.abi.encodeParameter("uint64", sequence).substring(2 + (64 - 16)),
web3.eth.abi
.encodeParameter("uint8", consistencyLevel)
.substring(2 + (64 - 2)),
data.substr(2),
];
const hash = web3.utils.soliditySha3(
web3.utils.soliditySha3("0x" + body.join(""))
);
let signatures = "";
for (let i in signers) {
const ec = new elliptic.ec("secp256k1");
const key = ec.keyFromPrivate(signers[i]);
const signature = key.sign(hash.substr(2), { canonical: true });
const packSig = [
web3.eth.abi.encodeParameter("uint8", i).substring(2 + (64 - 2)),
zeroPadBytes(signature.r.toString(16), 32),
zeroPadBytes(signature.s.toString(16), 32),
web3.eth.abi
.encodeParameter("uint8", signature.recoveryParam)
.substr(2 + (64 - 2)),
];
signatures += packSig.join("");
}
const vm = [
web3.eth.abi.encodeParameter("uint8", 1).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("uint32", guardianSetIndex)
.substring(2 + (64 - 8)),
web3.eth.abi
.encodeParameter("uint8", signers.length)
.substring(2 + (64 - 2)),
signatures,
body.join(""),
].join("");
return vm;
};
const signAndEncodeVMFixedIndex = async function (
timestamp,
nonce,
emitterChainId,
emitterAddress,
sequence,
data,
signers,
guardianSetIndex,
consistencyLevel
) {
const body = [
web3.eth.abi.encodeParameter("uint32", timestamp).substring(2 + (64 - 8)),
web3.eth.abi.encodeParameter("uint32", nonce).substring(2 + (64 - 8)),
web3.eth.abi
.encodeParameter("uint16", emitterChainId)
.substring(2 + (64 - 4)),
web3.eth.abi.encodeParameter("bytes32", emitterAddress).substring(2),
web3.eth.abi.encodeParameter("uint64", sequence).substring(2 + (64 - 16)),
web3.eth.abi
.encodeParameter("uint8", consistencyLevel)
.substring(2 + (64 - 2)),
data.substr(2),
];
const hash = web3.utils.soliditySha3(
web3.utils.soliditySha3("0x" + body.join(""))
);
let signatures = "";
for (let i in signers) {
const ec = new elliptic.ec("secp256k1");
const key = ec.keyFromPrivate(signers[i]);
const signature = key.sign(hash.substr(2), { canonical: true });
const packSig = [
// Fixing the index to be zero to product a non-monotonic VM
web3.eth.abi.encodeParameter("uint8", 0).substring(2 + (64 - 2)),
zeroPadBytes(signature.r.toString(16), 32),
zeroPadBytes(signature.s.toString(16), 32),
web3.eth.abi
.encodeParameter("uint8", signature.recoveryParam)
.substr(2 + (64 - 2)),
];
signatures += packSig.join("");
}
const vm = [
web3.eth.abi.encodeParameter("uint8", 1).substring(2 + (64 - 2)),
web3.eth.abi
.encodeParameter("uint32", guardianSetIndex)
.substring(2 + (64 - 8)),
web3.eth.abi
.encodeParameter("uint8", signers.length)
.substring(2 + (64 - 2)),
signatures,
body.join(""),
].join("");
return vm;
};
function zeroPadBytes(value, length) {
while (value.length < 2 * length) {
value = "0" + value;
}
return value;
}

View File

@ -0,0 +1,92 @@
const HDWalletProvider = require("@truffle/hdwallet-provider");
module.exports = {
networks: {
development: {
host: "127.0.0.1",
port: 8545,
network_id: "*",
},
// test network is the same as development but allows us to omit certain migrations
test: {
host: "127.0.0.1",
port: 8545,
network_id: "*",
},
mainnet: {
provider: () =>
new HDWalletProvider(
process.env.MNEMONIC,
`https://mainnet.infura.io/v3/` + process.env.INFURA_KEY
),
network_id: 1,
gas: 10000000,
gasPrice: 191000000000,
confirmations: 1,
timeoutBlocks: 200,
skipDryRun: false,
},
rinkeby: {
provider: () =>
new HDWalletProvider(
process.env.MNEMONIC,
`https://rinkeby.infura.io/v3/` + process.env.INFURA_KEY
),
network_id: 4,
gas: 5500000,
confirmations: 2,
timeoutBlocks: 200,
skipDryRun: true,
},
goerli: {
provider: () => {
return new HDWalletProvider(
process.env.MNEMONIC,
"https://goerli.infura.io/v3/" + process.env.INFURA_KEY
);
},
network_id: "5",
gas: 4465030,
gasPrice: 10000000000,
},
binance: {
provider: () => {
return new HDWalletProvider(
process.env.MNEMONIC,
"https://bsc-dataseed.binance.org/"
);
},
network_id: "56",
gas: 70000000,
gasPrice: 8000000000,
},
binance_testnet: {
provider: () =>
new HDWalletProvider(
process.env.MNEMONIC,
"https://data-seed-prebsc-2-s1.binance.org:8545/"
),
network_id: "97",
gas: 20000000,
},
},
compilers: {
solc: {
version: "0.8.4",
settings: {
optimizer: {
enabled: true,
runs: 200,
},
},
},
},
plugins: ["truffle-plugin-verify"],
api_keys: {
etherscan: process.env.ETHERSCAN_KEY,
bscscan: process.env.ETHERSCAN_KEY,
},
};