clients/js: prep for release (#2862)

* clients/js: prep for release

* clients/js: make worm directory agnostic
This commit is contained in:
Evan Gray 2023-05-05 05:39:53 -04:00 committed by GitHub
parent afc174e8ca
commit b23e7ce8e8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
81 changed files with 614 additions and 543 deletions

11
clients/js/CHANGELOG.md Normal file
View File

@ -0,0 +1,11 @@
# Changelog
## 0.0.2
### Changed
Make `worm` directory agnostic
## 0.0.1
Initial release

View File

@ -1,383 +0,0 @@
import { assertChain, CHAIN_ID_APTOS, coalesceChainId, CONTRACTS } from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
import { BCS, FaucetClient } from "aptos";
import { spawnSync } from 'child_process';
import fs from 'fs';
import sha3 from 'js-sha3';
import yargs from "yargs";
import { callEntryFunc, deriveResourceAccount, deriveWrappedAssetAddress } from "../aptos";
import { GOVERNANCE_CHAIN, GOVERNANCE_EMITTER, NAMED_ADDRESSES_OPTIONS, NETWORK_OPTIONS, RPC_OPTIONS } from "../consts";
import { NETWORKS } from "../networks";
import { assertNetwork, checkBinary, evm_address, hex } from "../utils";
import { runCommand, validator_args } from '../start-validator';
import { config } from "../config";
interface Package {
meta_file: string,
mv_files: string[]
}
interface PackageBCS {
meta: Uint8Array,
bytecodes: Uint8Array,
codeHash: Uint8Array
}
exports.command = 'aptos';
exports.desc = 'Aptos utilities';
exports.builder = function(y: typeof yargs) {
return y
// NOTE: there's no init-nft-bridge, because the native module initialiser
// functionality has stabilised on mainnet, so we just use that one (which
// gets called automatically)
.command("init-token-bridge", "Init token bridge contract", (yargs) => {
return yargs
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const contract_address = evm_address(CONTRACTS[network].aptos.token_bridge);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(network, rpc, `${contract_address}::token_bridge`, "init", [], []);
})
.command("init-wormhole", "Init Wormhole core contract", (yargs) => {
return yargs
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("chain-id", {
describe: "Chain id",
type: "number",
default: CHAIN_ID_APTOS,
required: false
})
.option("governance-chain-id", {
describe: "Governance chain id",
type: "number",
default: GOVERNANCE_CHAIN,
required: false
})
.option("governance-address", {
describe: "Governance address",
type: "string",
default: GOVERNANCE_EMITTER,
required: false
})
.option("guardian-address", {
alias: "g",
required: true,
describe: "Initial guardian's addresses (CSV)",
type: "string",
})
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const contract_address = evm_address(CONTRACTS[network].aptos.core);
const guardian_addresses = argv["guardian-address"].split(",").map(address => evm_address(address).substring(24));
const chain_id = argv["chain-id"];
const governance_address = evm_address(argv["governance-address"]);
const governance_chain_id = argv["governance-chain-id"];
const guardians_serializer = new BCS.Serializer();
guardians_serializer.serializeU32AsUleb128(guardian_addresses.length);
guardian_addresses.forEach(address => guardians_serializer.serializeBytes(Buffer.from(address, "hex")));
const args = [
BCS.bcsSerializeUint64(chain_id),
BCS.bcsSerializeUint64(governance_chain_id),
BCS.bcsSerializeBytes(Buffer.from(governance_address, "hex")),
guardians_serializer.getBytes()
]
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(network, rpc, `${contract_address}::wormhole`, "init", [], args);
})
.command("deploy <package-dir>", "Deploy an Aptos package", (yargs) => {
return yargs
.positional("package-dir", {
type: "string"
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", "aptos");
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(network, rpc, "0x1::code", "publish_package_txn", [], [b.meta, b.bytecodes])
console.log("Deployed:", p.mv_files)
})
.command("deploy-resource <seed> <package-dir>", "Deploy an Aptos package using a resource account", (yargs) => {
return yargs
.positional("seed", {
type: "string"
})
.positional("package-dir", {
type: "string"
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", "aptos");
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const seed = Buffer.from(argv["seed"], "ascii")
// TODO(csongor): use deployer address from sdk (when it's there)
let module_name = "0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b::deployer";
if (network == "TESTNET" || network == "MAINNET") {
module_name = "0x0108bc32f7de18a5f6e1e7d6ee7aff9f5fc858d0d87ac0da94dd8d2a5d267d6b::deployer";
}
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(
network,
rpc,
module_name,
"deploy_derived",
[],
[
b.meta,
b.bytecodes,
BCS.bcsSerializeBytes(seed)
])
console.log("Deployed:", p.mv_files)
})
.command("send-example-message <message>", "Send example message", (yargs) => {
return yargs
.positional("message", {
type: "string"
})
.option("network", NETWORK_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const rpc = NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use sdk address
let module_name = "0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b::sender";
if (network == "TESTNET" || network == "MAINNET") {
module_name = "0x0108bc32f7de18a5f6e1e7d6ee7aff9f5fc858d0d87ac0da94dd8d2a5d267d6b::sender";
}
await callEntryFunc(network, rpc, module_name, "send_message", [], [BCS.bcsSerializeBytes(Buffer.from(argv["message"], "ascii"))])
})
.command("derive-resource-account <account> <seed>", "Derive resource account address", (yargs) => {
return yargs
.positional("account", {
type: "string"
})
.positional("seed", {
type: "string"
})
}, async (argv) => {
console.log(deriveResourceAccount(Buffer.from(hex(argv['account']).substring(2), 'hex'), argv['seed']))
})
.command("derive-wrapped-address <chain> <origin-address>", "Derive wrapped coin type", (yargs) => {
return yargs
.positional("chain", {
type: "string"
})
.positional("origin-address", {
type: "string"
})
.option("network", NETWORK_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
let address = CONTRACTS[network].aptos.token_bridge;
if (address.startsWith("0x")) address = address.substring(2);
const token_bridge_address = Buffer.from(address, "hex");
assertChain(argv["chain"]);
const chain = coalesceChainId(argv["chain"]);
const origin_address = Buffer.from(evm_address(argv["origin-address"]), "hex");
console.log(deriveWrappedAssetAddress(token_bridge_address, chain, origin_address))
})
.command("hash-contracts <package-dir>", "Hash contract bytecodes for upgrade", (yargs) => {
return yargs
.positional("seed", {
type: "string"
})
.positional("package-dir", {
type: "string"
})
.option("named-addresses", NAMED_ADDRESSES_OPTIONS)
}, (argv) => {
checkBinary("aptos", "aptos");
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
console.log(Buffer.from(b.codeHash).toString("hex"));
})
.command("upgrade <package-dir>", "Perform upgrade after VAA has been submitted", (_yargs) => {
return yargs
.positional("package-dir", {
type: "string"
})
// TODO(csongor): once the sdk has the addresses, just look that up
// based on the module
.option("contract-address", {
alias: "a",
required: true,
describe: "Address where the wormhole module is deployed",
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", "aptos");
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use deployer address from sdk (when it's there)
const hash = await callEntryFunc(
network,
rpc,
`${argv["contract-address"]}::contract_upgrade`,
"upgrade",
[],
[
b.meta,
b.bytecodes,
])
console.log("Deployed:", p.mv_files)
console.log(hash)
})
.command("migrate", "Perform migration after contract upgrade", (_yargs) => {
return yargs
// TODO(csongor): once the sdk has the addresses, just look that up
// based on the module
.option("contract-address", {
alias: "a",
required: true,
describe: "Address where the wormhole module is deployed",
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
}, async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", "aptos");
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use deployer address from sdk (when it's there)
const hash = await callEntryFunc(
network,
rpc,
`${argv["contract-address"]}::contract_upgrade`,
"migrate",
[],
[])
console.log(hash)
})
// TODO - make faucet support testnet in additional to localnet
.command("faucet", "Request money from the faucet for a given account", (yargs) => {
return yargs
.option("rpc", RPC_OPTIONS)
.option("faucet", {
alias: "f",
required: false,
describe: "Faucet url",
type: "string",
})
.option("amount", {
alias: "m",
required: false,
describe: "Amount to request",
type: "number",
})
.option("account", {
alias: "a",
required: false,
describe: "Account to fund",
type: "string",
})
},
async (argv) => {
let NODE_URL = "http://0.0.0.0:8080/v1";
let FAUCET_URL = "http://0.0.0.0:8081";
let account = "0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b";
let amount = 40000000;
if (argv.faucet != undefined) {
FAUCET_URL = argv.faucet as string;
}
if (argv.rpc != undefined) {
NODE_URL = argv.rpc as string;
}
if (argv.amount != undefined) {
amount = argv.amount as number;
}
if (argv.account != undefined) {
account = argv.account as string;
}
const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL);
await faucetClient.fundAccount(account, amount);
console.log(`Funded ${account} with ${amount} coins`);
})
.command("start-validator", "Start a local aptos validator", (yargs) => {
return yargs
.option("validator-args", validator_args)
}, (argv) => {
const dir = `${config.wormholeDir}/aptos`;
checkBinary("aptos", "aptos");
const cmd = `cd ${dir} && aptos node run-local-testnet --with-faucet --force-restart --assume-yes`;
runCommand(cmd, argv['validator-args']);
})
.strict().demandCommand();
}
function buildPackage(dir: string, addrs?: string): Package {
const named_addresses =
addrs
? ["--named-addresses", addrs]
: [];
const aptos = spawnSync("aptos",
["move", "compile", "--save-metadata", "--included-artifacts", "none", "--package-dir", dir, ...named_addresses])
if (aptos.status !== 0) {
console.error(aptos.stderr.toString('utf8'))
console.error(aptos.stdout.toString('utf8'))
process.exit(1)
}
const result: any = JSON.parse(aptos.stdout.toString('utf8'))
const buildDirs =
fs.readdirSync(`${dir}/build`, { withFileTypes: true })
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
if (buildDirs.length !== 1) {
console.error(`Unexpected directory structure in ${dir}/build: expected a single directory`)
process.exit(1)
}
const buildDir = `${dir}/build/${buildDirs[0]}`
return {
meta_file: `${buildDir}/package-metadata.bcs`,
mv_files: result["Result"].map((mod: string) => `${buildDir}/bytecode_modules/${mod.split("::")[1]}.mv`)
}
}
function serializePackage(p: Package): PackageBCS {
const metaBytes = fs.readFileSync(p.meta_file);
const packageMetadataSerializer = new BCS.Serializer();
packageMetadataSerializer.serializeBytes(metaBytes)
const serializedPackageMetadata = packageMetadataSerializer.getBytes();
const modules = p.mv_files.map(file => fs.readFileSync(file))
const serializer = new BCS.Serializer();
serializer.serializeU32AsUleb128(modules.length);
modules.forEach(module => serializer.serializeBytes(module));
const serializedModules = serializer.getBytes();
const hashes = [metaBytes].concat(modules).map((x) => Buffer.from(sha3.keccak256(x), "hex"));
const codeHash = Buffer.from(sha3.keccak256(Buffer.concat(hashes)), "hex")
return {
meta: serializedPackageMetadata,
bytecodes: serializedModules,
codeHash
}
}

View File

@ -1,20 +0,0 @@
import { spawnSync } from "child_process";
import { config } from "../config";
let dir = `${config.wormholeDir}/clients/js`;
exports.command = "update";
exports.desc = "Update this tool by rebuilding it";
exports.handler = function (_argv: any) {
if (isOutdated()) {
console.log(`Building in ${dir}...`);
spawnSync(`make build -C ${dir}`, { shell: true, stdio: "inherit" });
} else {
console.log("'worm' is up to date");
}
};
export function isOutdated(): boolean {
const result = spawnSync(`make build -C ${dir} --question`, { shell: true });
return result.status !== 0;
}

View File

@ -1,74 +0,0 @@
const CONFIG_DIR = `${process.env.HOME}/.wormhole`;
const CONFIG_FILE = `${CONFIG_DIR}/default.json`;
process.env["NODE_CONFIG_DIR"] = CONFIG_DIR;
process.env["SUPPRESS_NO_CONFIG_WARNING"] = "y";
import c from 'config';
import fs from 'fs';
export interface Config {
// Path to the wormhole repository
wormholeDir: string;
}
const defaultConfig: Required<Config> = {
wormholeDir: computeRepoRootPath(),
}
/**
* Global config object.
* Importing this module will read the config file and update it if necessary.
*/
export const config: Readonly<Config> = readAndUpdateConfig();
// Computes the path to the root of the wormhole repository based on the
// location of this file (well, the compiled version of this file).
function computeRepoRootPath(): string {
let rel = "/clients/js/build/config.js";
// check if mainPath matches $DIR/clients/js/build/config.js
if (__filename.endsWith(rel)) {
// if so, grab $DIR from mainPath
return __filename.substring(0, __filename.length - rel.length);
} else {
// otherwise, throw an error
throw new Error(`Could not compute repo root path for ${__filename}`);
}
}
function readAndUpdateConfig(): Readonly<Config> {
if (config !== undefined) {
return config;
}
let conf = defaultConfig;
// iterate through all the keys in defaultConfig
for (const key in conf) {
// if the key is not in config, set it to the default value
if (c.has(key)) {
conf[key] = c.get(key);
}
}
let json_conf = JSON.stringify(conf, null, 2) + "\n";
// if the config file does not exist or does not have some of the default
// values, create/update it
let write = false;
if (!fs.existsSync(CONFIG_FILE)) {
console.error('\x1b[33m%s\x1b[0m', `NOTE: Created config file at ${CONFIG_FILE}`);
write = true;
} else if (json_conf !== fs.readFileSync(CONFIG_FILE, "utf8")) {
// ^ this will also normalise the config file, but the main thing is
// that it writes out defaults if they are missing
console.error('\x1b[33m%s\x1b[0m', `NOTE: Updated config file at ${CONFIG_FILE}`);
write = true;
}
if (write) {
if (!fs.existsSync(CONFIG_DIR)){
fs.mkdirSync(CONFIG_DIR, { recursive: true });
}
fs.writeFileSync(CONFIG_FILE, json_conf);
}
return conf;
}

View File

@ -1,6 +1,27 @@
{
"name": "@wormhole-foundation/wormhole-client",
"version": "0.0.3",
"name": "@wormhole-foundation/wormhole-cli",
"version": "0.0.2",
"description": "CLI for Wormhole related activities",
"homepage": "https://wormhole.com",
"bin": {
"worm": "./build/main.js"
},
"files": [
"build/"
],
"repository": "https://github.com/certusone/wormhole/tree/main/clients/js",
"scripts": {
"start": "ts-node main.ts",
"build": "tsc",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Wormhole Contributors",
"license": "Apache-2.0",
"keywords": [
"wormhole",
"bridge",
"cli"
],
"dependencies": {
"@celo-tools/celo-ethers-wrapper": "^0.1.0",
"@certusone/wormhole-sdk": "^0.9.15-beta.4",
@ -30,15 +51,6 @@
"near-seed-phrase": "^0.2.0",
"yargs": "^17.0.1"
},
"bin": {
"worm": "./build/main.js",
"worm-fetch-governance": "./worm-fetch-governance"
},
"scripts": {
"start": "ts-node main.ts",
"build": "tsc",
"test": "echo \"Error: no test specified\" && exit 1"
},
"devDependencies": {
"@truffle/hdwallet-provider": "^2.0.15",
"@types/bn.js": "^5.1.0",

View File

@ -1,5 +1,7 @@
#!/bin/bash
# TODO: move this into the client
set -uo pipefail
test_directory="parse_tests"

View File

@ -0,0 +1,526 @@
import {
assertChain,
CHAIN_ID_APTOS,
coalesceChainId,
CONTRACTS,
} from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
import { BCS, FaucetClient } from "aptos";
import { spawnSync } from "child_process";
import fs from "fs";
import sha3 from "js-sha3";
import yargs from "yargs";
import {
callEntryFunc,
deriveResourceAccount,
deriveWrappedAssetAddress,
} from "../aptos";
import {
GOVERNANCE_CHAIN,
GOVERNANCE_EMITTER,
NAMED_ADDRESSES_OPTIONS,
NETWORK_OPTIONS,
RPC_OPTIONS,
} from "../consts";
import { NETWORKS } from "../networks";
import { runCommand, validator_args } from "../start-validator";
import { assertNetwork, checkBinary, evm_address, hex } from "../utils";
const README_URL =
"https://github.com/wormhole-foundation/wormhole/blob/main/aptos/README.md";
interface Package {
meta_file: string;
mv_files: string[];
}
interface PackageBCS {
meta: Uint8Array;
bytecodes: Uint8Array;
codeHash: Uint8Array;
}
exports.command = "aptos";
exports.desc = "Aptos utilities";
exports.builder = function (y: typeof yargs) {
return (
y
// NOTE: there's no init-nft-bridge, because the native module initialiser
// functionality has stabilised on mainnet, so we just use that one (which
// gets called automatically)
.command(
"init-token-bridge",
"Init token bridge contract",
(yargs) => {
return yargs
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const contract_address = evm_address(
CONTRACTS[network].aptos.token_bridge
);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(
network,
rpc,
`${contract_address}::token_bridge`,
"init",
[],
[]
);
}
)
.command(
"init-wormhole",
"Init Wormhole core contract",
(yargs) => {
return yargs
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("chain-id", {
describe: "Chain id",
type: "number",
default: CHAIN_ID_APTOS,
required: false,
})
.option("governance-chain-id", {
describe: "Governance chain id",
type: "number",
default: GOVERNANCE_CHAIN,
required: false,
})
.option("governance-address", {
describe: "Governance address",
type: "string",
default: GOVERNANCE_EMITTER,
required: false,
})
.option("guardian-address", {
alias: "g",
required: true,
describe: "Initial guardian's addresses (CSV)",
type: "string",
});
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const contract_address = evm_address(CONTRACTS[network].aptos.core);
const guardian_addresses = argv["guardian-address"]
.split(",")
.map((address) => evm_address(address).substring(24));
const chain_id = argv["chain-id"];
const governance_address = evm_address(argv["governance-address"]);
const governance_chain_id = argv["governance-chain-id"];
const guardians_serializer = new BCS.Serializer();
guardians_serializer.serializeU32AsUleb128(guardian_addresses.length);
guardian_addresses.forEach((address) =>
guardians_serializer.serializeBytes(Buffer.from(address, "hex"))
);
const args = [
BCS.bcsSerializeUint64(chain_id),
BCS.bcsSerializeUint64(governance_chain_id),
BCS.bcsSerializeBytes(Buffer.from(governance_address, "hex")),
guardians_serializer.getBytes(),
];
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(
network,
rpc,
`${contract_address}::wormhole`,
"init",
[],
args
);
}
)
.command(
"deploy <package-dir>",
"Deploy an Aptos package",
(yargs) => {
return yargs
.positional("package-dir", {
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", README_URL);
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(
network,
rpc,
"0x1::code",
"publish_package_txn",
[],
[b.meta, b.bytecodes]
);
console.log("Deployed:", p.mv_files);
}
)
.command(
"deploy-resource <seed> <package-dir>",
"Deploy an Aptos package using a resource account",
(yargs) => {
return yargs
.positional("seed", {
type: "string",
})
.positional("package-dir", {
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", README_URL);
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const seed = Buffer.from(argv["seed"], "ascii");
// TODO(csongor): use deployer address from sdk (when it's there)
let module_name =
"0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b::deployer";
if (network == "TESTNET" || network == "MAINNET") {
module_name =
"0x0108bc32f7de18a5f6e1e7d6ee7aff9f5fc858d0d87ac0da94dd8d2a5d267d6b::deployer";
}
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
await callEntryFunc(
network,
rpc,
module_name,
"deploy_derived",
[],
[b.meta, b.bytecodes, BCS.bcsSerializeBytes(seed)]
);
console.log("Deployed:", p.mv_files);
}
)
.command(
"send-example-message <message>",
"Send example message",
(yargs) => {
return yargs
.positional("message", {
type: "string",
})
.option("network", NETWORK_OPTIONS);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
const rpc = NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use sdk address
let module_name =
"0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b::sender";
if (network == "TESTNET" || network == "MAINNET") {
module_name =
"0x0108bc32f7de18a5f6e1e7d6ee7aff9f5fc858d0d87ac0da94dd8d2a5d267d6b::sender";
}
await callEntryFunc(
network,
rpc,
module_name,
"send_message",
[],
[BCS.bcsSerializeBytes(Buffer.from(argv["message"], "ascii"))]
);
}
)
.command(
"derive-resource-account <account> <seed>",
"Derive resource account address",
(yargs) => {
return yargs
.positional("account", {
type: "string",
})
.positional("seed", {
type: "string",
});
},
async (argv) => {
console.log(
deriveResourceAccount(
Buffer.from(hex(argv["account"]).substring(2), "hex"),
argv["seed"]
)
);
}
)
.command(
"derive-wrapped-address <chain> <origin-address>",
"Derive wrapped coin type",
(yargs) => {
return yargs
.positional("chain", {
type: "string",
})
.positional("origin-address", {
type: "string",
})
.option("network", NETWORK_OPTIONS);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
let address = CONTRACTS[network].aptos.token_bridge;
if (address.startsWith("0x")) address = address.substring(2);
const token_bridge_address = Buffer.from(address, "hex");
assertChain(argv["chain"]);
const chain = coalesceChainId(argv["chain"]);
const origin_address = Buffer.from(
evm_address(argv["origin-address"]),
"hex"
);
console.log(
deriveWrappedAssetAddress(
token_bridge_address,
chain,
origin_address
)
);
}
)
.command(
"hash-contracts <package-dir>",
"Hash contract bytecodes for upgrade",
(yargs) => {
return yargs
.positional("seed", {
type: "string",
})
.positional("package-dir", {
type: "string",
})
.option("named-addresses", NAMED_ADDRESSES_OPTIONS);
},
(argv) => {
checkBinary("aptos", README_URL);
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
console.log(Buffer.from(b.codeHash).toString("hex"));
}
)
.command(
"upgrade <package-dir>",
"Perform upgrade after VAA has been submitted",
(_yargs) => {
return (
yargs
.positional("package-dir", {
type: "string",
})
// TODO(csongor): once the sdk has the addresses, just look that up
// based on the module
.option("contract-address", {
alias: "a",
required: true,
describe: "Address where the wormhole module is deployed",
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
.option("named-addresses", NAMED_ADDRESSES_OPTIONS)
);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", README_URL);
const p = buildPackage(argv["package-dir"], argv["named-addresses"]);
const b = serializePackage(p);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use deployer address from sdk (when it's there)
const hash = await callEntryFunc(
network,
rpc,
`${argv["contract-address"]}::contract_upgrade`,
"upgrade",
[],
[b.meta, b.bytecodes]
);
console.log("Deployed:", p.mv_files);
console.log(hash);
}
)
.command(
"migrate",
"Perform migration after contract upgrade",
(_yargs) => {
return (
yargs
// TODO(csongor): once the sdk has the addresses, just look that up
// based on the module
.option("contract-address", {
alias: "a",
required: true,
describe: "Address where the wormhole module is deployed",
type: "string",
})
.option("network", NETWORK_OPTIONS)
.option("rpc", RPC_OPTIONS)
);
},
async (argv) => {
const network = argv.network.toUpperCase();
assertNetwork(network);
checkBinary("aptos", README_URL);
const rpc = argv.rpc ?? NETWORKS[network]["aptos"].rpc;
// TODO(csongor): use deployer address from sdk (when it's there)
const hash = await callEntryFunc(
network,
rpc,
`${argv["contract-address"]}::contract_upgrade`,
"migrate",
[],
[]
);
console.log(hash);
}
)
// TODO - make faucet support testnet in additional to localnet
.command(
"faucet",
"Request money from the faucet for a given account",
(yargs) => {
return yargs
.option("rpc", RPC_OPTIONS)
.option("faucet", {
alias: "f",
required: false,
describe: "Faucet url",
type: "string",
})
.option("amount", {
alias: "m",
required: false,
describe: "Amount to request",
type: "number",
})
.option("account", {
alias: "a",
required: false,
describe: "Account to fund",
type: "string",
});
},
async (argv) => {
let NODE_URL = "http://0.0.0.0:8080/v1";
let FAUCET_URL = "http://0.0.0.0:8081";
let account =
"0x277fa055b6a73c42c0662d5236c65c864ccbf2d4abd21f174a30c8b786eab84b";
let amount = 40000000;
if (argv.faucet != undefined) {
FAUCET_URL = argv.faucet as string;
}
if (argv.rpc != undefined) {
NODE_URL = argv.rpc as string;
}
if (argv.amount != undefined) {
amount = argv.amount as number;
}
if (argv.account != undefined) {
account = argv.account as string;
}
const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL);
await faucetClient.fundAccount(account, amount);
console.log(`Funded ${account} with ${amount} coins`);
}
)
.command(
"start-validator",
"Start a local aptos validator",
(yargs) => {
return yargs.option("validator-args", validator_args);
},
(argv) => {
checkBinary("aptos", README_URL);
const os = require("os");
const dir = os.homedir();
const cmd = `cd ${dir} && aptos node run-local-testnet --with-faucet --force-restart --assume-yes`;
runCommand(cmd, argv["validator-args"]);
}
)
.strict()
.demandCommand()
);
};
function buildPackage(dir: string, addrs?: string): Package {
const named_addresses = addrs ? ["--named-addresses", addrs] : [];
const aptos = spawnSync("aptos", [
"move",
"compile",
"--save-metadata",
"--included-artifacts",
"none",
"--package-dir",
dir,
...named_addresses,
]);
if (aptos.status !== 0) {
console.error(aptos.stderr.toString("utf8"));
console.error(aptos.stdout.toString("utf8"));
process.exit(1);
}
const result: any = JSON.parse(aptos.stdout.toString("utf8"));
const buildDirs = fs
.readdirSync(`${dir}/build`, { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.map((dirent) => dirent.name);
if (buildDirs.length !== 1) {
console.error(
`Unexpected directory structure in ${dir}/build: expected a single directory`
);
process.exit(1);
}
const buildDir = `${dir}/build/${buildDirs[0]}`;
return {
meta_file: `${buildDir}/package-metadata.bcs`,
mv_files: result["Result"].map(
(mod: string) => `${buildDir}/bytecode_modules/${mod.split("::")[1]}.mv`
),
};
}
function serializePackage(p: Package): PackageBCS {
const metaBytes = fs.readFileSync(p.meta_file);
const packageMetadataSerializer = new BCS.Serializer();
packageMetadataSerializer.serializeBytes(metaBytes);
const serializedPackageMetadata = packageMetadataSerializer.getBytes();
const modules = p.mv_files.map((file) => fs.readFileSync(file));
const serializer = new BCS.Serializer();
serializer.serializeU32AsUleb128(modules.length);
modules.forEach((module) => serializer.serializeBytes(module));
const serializedModules = serializer.getBytes();
const hashes = [metaBytes]
.concat(modules)
.map((x) => Buffer.from(sha3.keccak256(x), "hex"));
const codeHash = Buffer.from(sha3.keccak256(Buffer.concat(hashes)), "hex");
return {
meta: serializedPackageMetadata,
bytecodes: serializedModules,
codeHash,
};
}

View File

@ -16,12 +16,12 @@
// worm edit-vaa --vaa $VAA --gs $TESTNET_GUARDIAN_SECRET
//
import yargs from "yargs";
import axios from "axios";
import { ethers } from "ethers";
import { Other } from "@certusone/wormhole-sdk";
import { Implementation__factory } from "@certusone/wormhole-sdk/lib/cjs/ethers-contracts";
import { CONTRACTS } from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
import { Other } from "@certusone/wormhole-sdk/lib/cjs/vaa";
import axios from "axios";
import { ethers } from "ethers";
import yargs from "yargs";
import { NETWORKS } from "../networks";
import { parse, Payload, serialiseVAA, sign, Signature, VAA } from "../vaa";

View File

@ -9,9 +9,8 @@ import {
import { ethers } from "ethers";
import yargs from "yargs";
import { NETWORKS } from "../networks";
import { runCommand, validator_args } from "../start-validator";
import { evm_address } from "../utils";
import { config } from '../config';
import { runCommand, validator_args } from '../start-validator';
exports.command = "evm";
exports.desc = "EVM utilities";
@ -195,14 +194,19 @@ exports.builder = function (y: typeof yargs) {
);
}
)
.command("start-validator", "Start a local EVM validator", (yargs) => {
return yargs
.option("validator-args", validator_args)
}, (argv) => {
const dir = `${config.wormholeDir}/ethereum`;
.command(
"start-validator",
"Start a local EVM validator",
(yargs) => {
return yargs.option("validator-args", validator_args);
},
(argv) => {
const os = require("os");
const dir = os.homedir();
const cmd = `cd ${dir} && npx ganache-cli -e 10000 --deterministic --time="1970-01-01T00:00:00+00:00"`;
runCommand(cmd, argv['validator-args'])
})
runCommand(cmd, argv["validator-args"]);
}
)
.strict()
.demandCommand();
};

View File

@ -6,6 +6,9 @@ import { buildCoin, getProvider } from "../../sui";
import { assertNetwork, checkBinary } from "../../utils";
import { YargsAddCommandsFn } from "../Yargs";
const README_URL =
"https://github.com/wormhole-foundation/wormhole/blob/main/sui/README.md";
export const addBuildCommands: YargsAddCommandsFn = (y: typeof yargs) =>
y.command(
"build-coin",
@ -49,7 +52,7 @@ export const addBuildCommands: YargsAddCommandsFn = (y: typeof yargs) =>
})
.option("rpc", RPC_OPTIONS),
async (argv) => {
checkBinary("sui", "sui");
checkBinary("sui", README_URL);
const network = argv.network.toUpperCase();
assertNetwork(network);

View File

@ -20,6 +20,9 @@ import {
import { Network, assertNetwork, checkBinary } from "../../utils";
import { YargsAddCommandsFn } from "../Yargs";
const README_URL =
"https://github.com/wormhole-foundation/wormhole/blob/main/sui/README.md";
export const addDeployCommands: YargsAddCommandsFn = (y: typeof yargs) =>
y.command(
"deploy <package-dir>",
@ -35,7 +38,7 @@ export const addDeployCommands: YargsAddCommandsFn = (y: typeof yargs) =>
.option("rpc", RPC_OPTIONS);
},
async (argv) => {
checkBinary("sui", "sui");
checkBinary("sui", README_URL);
const packageDir = argv["package-dir"];
const network = argv.network.toUpperCase();

View File

@ -1,8 +1,8 @@
import {
ChainId,
coalesceChainName,
parseTokenBridgeRegisterChainVaa,
} from "@certusone/wormhole-sdk";
} from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
import { parseTokenBridgeRegisterChainVaa } from "@certusone/wormhole-sdk/lib/cjs/vaa/tokenBridge";
import {
JsonRpcProvider,
TransactionBlock,

View File

@ -19,14 +19,6 @@ console.info = function (x: string) {
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import { isOutdated } from "./cmds/update";
if (isOutdated()) {
console.error(
"\x1b[33m%s\x1b[0m",
"WARNING: 'worm' is out of date. Run 'worm update' to update."
);
}
yargs(hideBin(process.argv))
.commandDir("cmds", { recurse: true })

View File

@ -1,6 +1,8 @@
import { ChainName } from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
require("dotenv").config({ path: `${process.env.HOME}/.wormhole/.env` });
const os = require("os");
const dir = os.homedir();
require("dotenv").config({ path: `${dir}/.wormhole/.env` });
function get_env_var(env: string): string | undefined {
const v = process.env[env];

View File

@ -1,10 +1,10 @@
import { parseAttestMetaVaa } from "@certusone/wormhole-sdk/lib/cjs/vaa/tokenBridge";
import { getForeignAssetSui } from "@certusone/wormhole-sdk/lib/cjs/token_bridge/getForeignAsset";
import {
assertChain,
createWrappedOnSui,
createWrappedOnSuiPrepare,
getForeignAssetSui,
parseAttestMetaVaa,
} from "@certusone/wormhole-sdk";
} from "@certusone/wormhole-sdk/lib/cjs/token_bridge/createWrapped";
import { assertChain } from "@certusone/wormhole-sdk/lib/cjs/utils/consts";
import { getWrappedCoinType } from "@certusone/wormhole-sdk/lib/cjs/sui";
import {
CHAIN_ID_SUI,

View File

@ -1,6 +1,5 @@
import { spawnSync } from "child_process";
import { ethers } from "ethers";
import { config } from "./config";
export type Network = "MAINNET" | "TESTNET" | "DEVNET";
@ -10,16 +9,14 @@ export function assertNetwork(n: string): asserts n is Network {
}
}
export const checkBinary = (binaryName: string, dirName?: string): void => {
export const checkBinary = (binaryName: string, readmeUrl?: string): void => {
const binary = spawnSync(binaryName, ["--version"]);
if (binary.status !== 0) {
console.error(
`${binaryName} is not installed. Please install ${binaryName} and try again.`
);
if (dirName) {
console.error(
`See ${config.wormholeDir}/${dirName}/README.md for instructions.`
);
if (readmeUrl) {
console.error(`See ${readmeUrl} for instructions.`);
}
process.exit(1);
}

View File

@ -3,9 +3,9 @@
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Basic Options */
"incremental": true, /* Enable incremental compilation */
"target": "es2019", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
"incremental": true /* Enable incremental compilation */,
"target": "es2019" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
// "lib": [], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
@ -14,19 +14,19 @@
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./build", /* Redirect output structure to the directory. */
"outDir": "./build" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
"downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
"downlevelIteration": true /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */,
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": false, /* Enable all strict type-checking options. */
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
"strict": false /* Enable all strict type-checking options. */,
"noImplicitAny": false /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
@ -44,14 +44,14 @@
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
/* Module Resolution Options */
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"moduleResolution": "node" /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */,
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
@ -66,7 +66,8 @@
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
}
"skipLibCheck": true /* Skip type checking of declaration files. */,
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"include": ["src"]
}

View File

@ -1,7 +0,0 @@
{
"indentSize": 2,
"tabSize": 2,
"insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces": false,
"placeOpenBraceOnNewLineForFunctions": false,
"placeOpenBraceOnNewLineForControlBlocks": false
}

View File

@ -1,5 +1,7 @@
#!/bin/bash
# TODO: move this into the client
usage="Usage:
$(basename "$0") [sequence]