base code for event-watcher (#625)

* first commit event-watcher in ts

* add docker
This commit is contained in:
gipsh 2023-08-11 10:15:55 -03:00 committed by GitHub
parent 7426612aff
commit 01d935a17c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
83 changed files with 15636 additions and 0 deletions

View File

@ -0,0 +1,2 @@
node_modules/
dist/

14
event-watcher/.env.sample Normal file
View File

@ -0,0 +1,14 @@
LOG_DIR=.
LOG_LEVEL=info
ETH_RPC=
DB_SOURCE=
JSON_DB_FILE=db.json
FIRESTORE_ACCOUNT_KEY_PATH=
FIRESTORE_COLLECTION=
FIRESTORE_LATEST_COLLECTION=
GOOGLE_APPLICATION_CREDENTIALS=
BIGTABLE_TABLE_ID=
BIGTABLE_INSTANCE_ID=
BIGTABLE_SIGNED_VAAS_TABLE_ID=
BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID=

18
event-watcher/Dockerfile Normal file
View File

@ -0,0 +1,18 @@
FROM node:18.17-alpine AS build
RUN apk add g++ make py3-pip
WORKDIR /app
COPY package*.json .
RUN npm install
COPY . .
RUN npm run build
############################
# STEP 2 build a small image
############################
FROM node:18.17-alpine AS run
COPY --from=build /app/dist ./dist
CMD ["node", "dist/src/index.js"]

16
event-watcher/README.md Normal file
View File

@ -0,0 +1,16 @@
### Event-watcher
Observe contract logs on many blockchains and capture VAAs creation
Code partially borrowed from wormhole-dashboard :)
## build
```
npm install
```
## run
```
npm run dev
```

10437
event-watcher/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,50 @@
{
"name": "@wormhole-foundation/wormhole-monitor-watcher",
"version": "0.0.1",
"private": true,
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"start": "node dist/src/index.js",
"dev": "ts-node src/index.ts",
"test": "jest",
"backfill": "ts-node scripts/backfill.ts",
"backfill-arbitrum": "ts-node scripts/backfillArbitrum.ts",
"backfill-near": "ts-node scripts/backfillNear.ts",
"backfill-signed-vaas": "ts-node scripts/backfillSignedVAAs",
"backfill-vaas-by-tx-hash": "ts-node scripts/backfillVAAsByTxHash.ts",
"locate-message-gaps": "ts-node scripts/locateMessageGaps.ts",
"fetch-missing-vaas": "ts-node scripts/fetchMissingVAAs.ts",
"update-found-vaas": "ts-node scripts/updateFoundVAAs.ts",
"read-bigtable": "ts-node scripts/readBigtable.ts",
"read-firestore": "ts-node scripts/readFirestore.ts"
},
"dependencies": {
"@certusone/wormhole-sdk": "^0.9.22",
"@celo-tools/celo-ethers-wrapper": "^0.3.0",
"@google-cloud/bigtable": "^4.1.0",
"@google-cloud/pubsub": "^3.4.1",
"@mysten/sui.js": "^0.33.0",
"@solana/web3.js": "^1.73.0",
"algosdk": "^2.4.0",
"aptos": "^1.4.0",
"axios": "^1.2.1",
"bs58": "^5.0.0",
"dotenv": "^16.0.3",
"firebase-admin": "^11.4.0",
"js-sha512": "^0.8.0",
"near-api-js": "^1.1.0",
"ora": "^5.4.1",
"winston": "^3.8.2",
"zod": "^3.20.2"
},
"devDependencies": {
"@jest/globals": "^29.3.1",
"jest": "^29.3.1",
"prettier": "2.8.1",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1",
"typescript": "^4.9.4"
}
}

View File

@ -0,0 +1,39 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainId, coalesceChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { chunkArray, sleep } from '../src/common';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import { JsonDatabase } from '../src/databases/JsonDatabase';
import { VaasByBlock } from '../src/databases/types';
// This script backfills the bigtable db from a json db
(async () => {
const localDb = new JsonDatabase();
const remoteDb = new BigtableDatabase();
const dbEntries = Object.entries(localDb.db);
for (const [chain, vaasByBlock] of dbEntries) {
console.log('backfilling', chain);
const chunkedKeys = chunkArray(Object.keys(vaasByBlock), 1000);
let chunk = 1;
for (const chunkeyKeys of chunkedKeys) {
console.log('chunk', chunk++, 'of', chunkedKeys.length);
const chunkedVaasByBlock = chunkeyKeys.reduce<VaasByBlock>((obj, curr) => {
obj[curr] = vaasByBlock[curr];
return obj;
}, {});
await remoteDb.storeVaasByBlock(
coalesceChainName(Number(chain) as ChainId),
chunkedVaasByBlock
);
await sleep(500);
}
}
const lastBlockEntries = Object.entries(localDb.lastBlockByChain);
for (const [chain, blockKey] of lastBlockEntries) {
console.log('backfilling last block for', chain, blockKey);
await remoteDb.storeLatestBlock(coalesceChainName(Number(chain) as ChainId), blockKey);
await sleep(500);
}
})();

View File

@ -0,0 +1,33 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainName, CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import axios from 'axios';
import ora from 'ora';
import { initDb } from '../src/databases/utils';
import { AXIOS_CONFIG_JSON } from '../src/consts';
import { ArbitrumWatcher } from '../src/watchers/ArbitrumWatcher';
import { LOG_MESSAGE_PUBLISHED_TOPIC } from '../src/watchers/EVMWatcher';
// This script exists because the Arbitrum RPC node only supports a 10 block range which is super slow
(async () => {
const db = initDb();
const chain: ChainName = 'arbitrum';
const endpoint = `https://api.arbiscan.io/api?module=logs&action=getLogs&address=${CONTRACTS.MAINNET.arbitrum.core}&topic0=${LOG_MESSAGE_PUBLISHED_TOPIC}&apikey=YourApiKeyToken`;
// fetch all message publish logs for core bridge contract from explorer
let log = ora('Fetching logs from Arbiscan...').start();
const blockNumbers = (await axios.get(endpoint, AXIOS_CONFIG_JSON)).data.result.map((x: any) =>
parseInt(x.blockNumber, 16)
);
log.succeed(`Fetched ${blockNumbers.length} logs from Arbiscan`);
// use the watcher to fetch corresponding blocks
log = ora('Fetching blocks...').start();
const watcher = new ArbitrumWatcher();
for (const blockNumber of blockNumbers) {
log.text = `Fetching block ${blockNumber}`;
const vaasByBlock = await watcher.getMessagesForBlocks(blockNumber, blockNumber);
await db.storeVaasByBlock(chain, vaasByBlock);
}
log.succeed('Uploaded messages to db successfully');
})();

View File

@ -0,0 +1,55 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainName, CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../src/common';
import { BlockResult } from 'near-api-js/lib/providers/provider';
import ora from 'ora';
import { initDb } from '../src/databases/utils';
import { getNearProvider, getTransactionsByAccountId, NEAR_ARCHIVE_RPC } from '../src/utils/near';
import { getMessagesFromBlockResults } from '../src/watchers/NearWatcher';
// This script exists because NEAR RPC nodes do not support querying blocks older than 5 epochs
// (~2.5 days): https://docs.near.org/api/rpc/setup#querying-historical-data. This script fetches
// all transactions for the core bridge contract from the NEAR Explorer backend API and then uses
// the archival RPC node to backfill messages in the given range.
//
// Ensure `DB_SOURCE` and Bigtable environment variables are set to backfill Bigtable database.
// Otherwise, the script will backfill the local JSON database.
const BATCH_SIZE = 1000;
(async () => {
const db = initDb();
const chain: ChainName = 'near';
const provider = await getNearProvider(NEAR_ARCHIVE_RPC);
const fromBlock = Number(
(await db.getLastBlockByChain(chain)) ?? INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN[chain] ?? 0
);
// fetch all transactions for core bridge contract from explorer
let log = ora('Fetching transactions from NEAR Explorer...').start();
const toBlock = await provider.block({ finality: 'final' });
const transactions = await getTransactionsByAccountId(
CONTRACTS.MAINNET.near.core,
BATCH_SIZE,
toBlock.header.timestamp.toString().padEnd(19, '9') // pad to nanoseconds
);
log.succeed(`Fetched ${transactions.length} transactions from NEAR Explorer`);
// filter out transactions that precede last seen block
const blocks: BlockResult[] = [];
const blockHashes = [...new Set(transactions.map((tx) => tx.blockHash))]; // de-dup blocks
log = ora('Fetching blocks...').start();
for (let i = 0; i < blockHashes.length; i++) {
log.text = `Fetching blocks... ${i + 1}/${blockHashes.length}`;
const block = await provider.block({ blockId: blockHashes[i] });
if (block.header.height > fromBlock && block.header.height <= toBlock.header.height) {
blocks.push(block);
}
}
log.succeed(`Fetched ${blocks.length} blocks`);
const vaasByBlock = await getMessagesFromBlockResults(provider, blocks, true);
await db.storeVaasByBlock(chain, vaasByBlock);
log.succeed('Uploaded messages to db successfully');
})();

View File

@ -0,0 +1,80 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { createReadStream } from 'fs';
import { createInterface } from 'readline';
import { assertEnvironmentVariable } from '@wormhole-foundation/wormhole-monitor-common/src/utils';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import ora from 'ora';
import { makeSignedVAAsRowKey } from '../src/databases/utils';
import { ChainId } from '@certusone/wormhole-sdk';
// This script writes all VAAs from a csv file compatible with the guardian `sign-existing-vaas-csv` admin command to bigtable
const CHUNK_SIZE = 10000;
interface SignedVAAsRow {
key: string;
data: {
info: {
bytes: { value: Buffer; timestamp: '0' };
};
};
}
(async () => {
try {
const vaaCsvFilename = assertEnvironmentVariable('VAA_CSV_FILE');
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const vaaTableId = assertEnvironmentVariable('BIGTABLE_SIGNED_VAAS_TABLE_ID');
const instance = bt.bigtable.instance(bt.instanceId);
const vaaTable = instance.table(vaaTableId);
const fileStream = createReadStream(vaaCsvFilename, { encoding: 'utf8' });
const rl = createInterface({
input: fileStream,
crlfDelay: Infinity,
});
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
let rows: SignedVAAsRow[] = [];
let numWritten = 0;
let log = ora('Writing VAAs to bigtable...').start();
for await (const line of rl) {
const split = line.split(',');
const key = split[0];
const vaa = split[1];
const splitKey = key.split(':');
const chain = Number(splitKey[0]);
const emitter = splitKey[1];
const sequence = splitKey[2];
const rowKey = makeSignedVAAsRowKey(chain as ChainId, emitter, sequence);
rows.push({
key: rowKey,
data: {
info: {
bytes: { value: Buffer.from(vaa, 'hex'), timestamp: '0' },
},
},
});
if (rows.length == CHUNK_SIZE) {
await vaaTable.insert(rows);
numWritten += rows.length;
log.text = `Wrote ${numWritten} VAAs`;
rows = [];
}
}
if (rows.length > 0) {
await vaaTable.insert(rows);
numWritten += rows.length;
}
log.succeed(`Wrote ${numWritten} VAAs`);
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,56 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import ora from 'ora';
import { BigtableVAAsByTxHashRow } from '../src/databases/types';
import {
makeSignedVAAsRowKey,
makeVAAsByTxHashRowKey,
parseMessageId,
} from '../src/databases/utils';
import { chunkArray } from '../src/common';
const CHUNK_SIZE = 10000;
(async () => {
try {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const instance = bt.bigtable.instance(bt.instanceId);
const messageTable = instance.table(bt.msgTableId);
const vaasByTxHashTable = instance.table(bt.vaasByTxHashTableId);
let log = ora(`Reading rows from ${bt.msgTableId}...`).start();
const observedMessages = await messageTable.getRows(); // TODO: pagination
const vaasByTxHash: { [key: string]: string[] } = {};
for (const msg of observedMessages[0]) {
const txHash = msg.data.info.txHash[0].value;
const { chain, emitter, sequence } = parseMessageId(msg.id);
const txHashRowKey = makeVAAsByTxHashRowKey(txHash, chain);
const vaaRowKey = makeSignedVAAsRowKey(chain, emitter, sequence.toString());
vaasByTxHash[txHashRowKey] = [...(vaasByTxHash[txHashRowKey] || []), vaaRowKey];
}
const rowsToInsert = Object.entries(vaasByTxHash).map<BigtableVAAsByTxHashRow>(
([txHashRowKey, vaaRowKeys]) => ({
key: txHashRowKey,
data: {
info: {
vaaKeys: { value: JSON.stringify(vaaRowKeys), timestamp: '0' },
},
},
})
);
const rowChunks = chunkArray(rowsToInsert, CHUNK_SIZE);
let numWritten = 0;
for (const rowChunk of rowChunks) {
await vaasByTxHashTable.insert(rowChunk);
numWritten += rowChunk.length;
log.text = `Wrote ${numWritten}/${rowsToInsert.length} rows to ${bt.vaasByTxHashTableId}`;
}
log.succeed(`Wrote ${numWritten} rows to ${bt.vaasByTxHashTableId}`);
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,22 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { CHAIN_ID_SOLANA, coalesceChainId, coalesceChainName } from '@certusone/wormhole-sdk';
import { padUint16 } from '../src/common';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
// Script to delete all messages for the chain given by the CHAIN variable below
const CHAIN = CHAIN_ID_SOLANA;
(async () => {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const instance = bt.bigtable.instance(bt.instanceId);
const messageTable = instance.table(bt.msgTableId);
await messageTable.deleteRows(`${padUint16(coalesceChainId(CHAIN).toString())}/`);
console.log('Deleted all rows starting with', coalesceChainName(CHAIN));
})();

View File

@ -0,0 +1,78 @@
import * as dotenv from 'dotenv';
dotenv.config();
import axios from 'axios';
import { writeFileSync } from 'fs';
import ora from 'ora';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import { makeSignedVAAsRowKey, parseMessageId } from '../src/databases/utils';
import { AXIOS_CONFIG_JSON, GUARDIAN_RPC_HOSTS } from '../src/consts';
import { parseVaa } from '@certusone/wormhole-sdk';
// This script checks for messages which don't have VAAs and attempts to fetch the VAAs from the guardians
// This is useful for cases where the VAA doesn't exist in bigtable (perhaps due to an outage) but is available
// Found messages should be backfilled with https://github.com/wormhole-foundation/bigtable-backfill-guardian-rpc for completions sake
// Missing message should be re-observed by the guardians
// TODO: At some point this all should be automated in the watcher to self-heal the db
const foundVaas: { [id: string]: string } = {};
const missingVaas: { [id: string]: string | undefined } = {};
(async () => {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const now = Math.floor(Date.now() / 1000);
try {
let log = ora('Fetching messages without a signed VAA...').start();
const missingVaaMessages = await bt.fetchMissingVaaMessages();
log.succeed();
const total = missingVaaMessages.length;
let found = 0;
let search = 0;
let tooNew = 0;
log = ora(`Searching for VAA...`).start();
for (const observedMessage of missingVaaMessages) {
log.text = `Searching for VAA ${++search}/${total}...`;
const { chain, emitter, sequence } = parseMessageId(observedMessage.id);
const id = makeSignedVAAsRowKey(chain, emitter, sequence.toString());
let vaaBytes: string | null = null;
for (const host of GUARDIAN_RPC_HOSTS) {
log.text = `Searching for VAA ${search}/${total} (${host})...`;
try {
const result = await axios.get(
`${host}/v1/signed_vaa/${chain}/${emitter}/${sequence.toString()}`,
AXIOS_CONFIG_JSON
);
if (result.data.vaaBytes) {
vaaBytes = result.data.vaaBytes;
break;
}
} catch (e) {}
}
if (vaaBytes) {
found++;
const signedVAA = Buffer.from(vaaBytes, 'base64');
const vaa = parseVaa(signedVAA);
const vaaTime = vaa.timestamp;
if (now - vaaTime > 3600) {
// More than one hour old.
foundVaas[id] = Buffer.from(vaaBytes, 'base64').toString('hex');
} else {
tooNew++;
}
} else {
missingVaas[id] = observedMessage.data.info.txHash?.[0].value;
}
}
log.succeed();
console.log('Total:', total);
console.log('Found:', found);
console.log('Too New:', tooNew);
console.log('Missing:', total - found);
writeFileSync('./found.json', JSON.stringify(foundVaas, undefined, 2));
writeFileSync('./missing.json', JSON.stringify(missingVaas, undefined, 2));
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,116 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainId, coalesceChainName } from '@certusone/wormhole-sdk';
import { sleep } from '../src/common';
import { TIMEOUT } from '../src/consts';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import { parseMessageId } from '../src/databases/utils';
import { makeFinalizedWatcher } from '../src/watchers/utils';
import { Watcher } from '../src/watchers/Watcher';
// This script checks for gaps in the message sequences for an emitter.
// Ideally this shouldn't happen, but there seems to be an issue with Oasis, Karura, and Celo
(async () => {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const instance = bt.bigtable.instance(bt.instanceId);
const messageTable = instance.table(bt.msgTableId);
try {
// Find gaps in sequence numbers with the same chain and emitter
// Sort by ascending sequence number
const observedMessages = (await messageTable.getRows())[0].sort((a, b) =>
Number(parseMessageId(a.id).sequence - parseMessageId(b.id).sequence)
);
const total = observedMessages.length;
console.log(`processing ${total} messages`);
const gaps = [];
const latestEmission: { [emitter: string]: { sequence: bigint; block: number } } = {};
for (const observedMessage of observedMessages) {
const {
chain: emitterChain,
block,
emitter: emitterAddress,
sequence,
} = parseMessageId(observedMessage.id);
const emitter = `${emitterChain}/${emitterAddress}`;
if (!latestEmission[emitter]) {
latestEmission[emitter] = { sequence: 0n, block: 0 };
}
while (sequence > latestEmission[emitter].sequence + 1n) {
latestEmission[emitter].sequence += 1n;
gaps.push(
[
emitterChain,
`${latestEmission[emitter].block}-${block}`,
emitterAddress,
latestEmission[emitter].sequence.toString(),
].join('/')
);
}
latestEmission[emitter] = { sequence, block };
}
// console.log(latestEmission);
// Sort by chain, emitter, sequence
gaps.sort((a, b) => {
const [aChain, _aBlocks, aEmitter, aSequence] = a.split('/');
const [bChain, _bBlocks, bEmitter, bSequence] = b.split('/');
return (
aChain.localeCompare(bChain) ||
aEmitter.localeCompare(bEmitter) ||
Number(BigInt(aSequence) - BigInt(bSequence))
);
});
console.log(gaps);
// Search misses and submit them to the db
let prevChain = '0';
let fromBlock = -1;
for (const gap of gaps) {
const [chain, blockRange, emitter, sequence] = gap.split('/');
const chainName = coalesceChainName(Number(chain) as ChainId);
let watcher: Watcher;
try {
watcher = makeFinalizedWatcher(chainName);
} catch (e) {
console.error('skipping gap for unsupported chain', chainName);
continue;
}
const range = blockRange.split('-');
const rangeStart = parseInt(range[0]);
const rangeEnd = parseInt(range[1]);
if (prevChain === chain && rangeStart < fromBlock) {
// don't reset on consecutive ranges of missing sequence numbers
console.log('resuming at', fromBlock, 'on', chain);
} else {
fromBlock = rangeStart;
prevChain = chain;
console.log('starting at', fromBlock, 'on', chain);
}
let found = false;
while (fromBlock <= rangeEnd && !found) {
const toBlock = Math.min(fromBlock + watcher.maximumBatchSize - 1, rangeEnd);
const messages = await watcher.getMessagesForBlocks(fromBlock, toBlock);
for (const message of Object.entries(messages).filter(([key, value]) => value.length > 0)) {
const locatedMessages = message[1].filter((msgKey) => {
const [_transaction, vaaKey] = msgKey.split(':');
const [_chain, msgEmitter, msgSeq] = vaaKey.split('/');
return emitter === msgEmitter && sequence === msgSeq;
});
if (locatedMessages.length > 0) {
await bt.storeVaasByBlock(chainName, { [message[0]]: locatedMessages }, false);
console.log('located', message[0], locatedMessages);
found = true;
}
}
if (!found) {
fromBlock = toBlock + 1;
await sleep(TIMEOUT);
}
}
}
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,53 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainId, CHAINS, coalesceChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { MAX_UINT_64, padUint16 } from '../src/common';
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
// This script provides a summary of the message db
(async () => {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const mainnetInstance = bt.bigtable.instance(bt.instanceId);
const messageTable = mainnetInstance.table(bt.msgTableId);
try {
const chain: ChainId = 22;
const prefix = `${padUint16(chain.toString())}/`;
const observedMessages = await messageTable.getRows({ prefix, limit: 100 });
console.log(
coalesceChainName(chain).padEnd(12),
observedMessages[0].length.toString().padStart(6)
);
if (observedMessages[0][0]) {
console.log(' id ', observedMessages[0][0]?.id);
console.log(' chain ', parseInt(observedMessages[0][0]?.id.split('/')[0]));
console.log(
' block ',
BigInt(MAX_UINT_64) - BigInt(observedMessages[0][0]?.id.split('/')[1] || 0)
);
console.log(' emitter ', observedMessages[0][0]?.id.split('/')[2]);
console.log(' seq ', parseInt(observedMessages[0][0]?.id.split('/')[3]));
console.log(' timestamp ', observedMessages[0][0]?.data.info.timestamp[0].value);
console.log(' txHash ', observedMessages[0][0]?.data.info.txHash[0].value);
console.log(' hasSignedVaa ', observedMessages[0][0]?.data.info.hasSignedVaa[0].value);
}
if (observedMessages[0][1]) {
console.log(' id ', observedMessages[0][1]?.id);
console.log(' chain ', parseInt(observedMessages[0][1]?.id.split('/')[0]));
console.log(
' block ',
BigInt(MAX_UINT_64) - BigInt(observedMessages[0][1]?.id.split('/')[1] || 0)
);
console.log(' emitter ', observedMessages[0][1]?.id.split('/')[2]);
console.log(' seq ', parseInt(observedMessages[0][1]?.id.split('/')[3]));
console.log(' timestamp ', observedMessages[0][1]?.data.info.timestamp[0].value);
console.log(' txHash ', observedMessages[0][1]?.data.info.txHash[0].value);
console.log(' hasSignedVaa ', observedMessages[0][1]?.data.info.hasSignedVaa[0].value);
}
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,21 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
// This script provides a summary of the latest block db
(async () => {
const bt = new BigtableDatabase();
try {
const collectionRef = bt.firestoreDb.collection(bt.latestCollectionName);
const snapshot = await collectionRef.get();
snapshot.docs
.sort((a, b) => Number(a.id) - Number(b.id))
.forEach((doc) => {
const [block, timestamp] = doc.data().lastBlockKey.split('/');
console.log(doc.id.padEnd(2), '=>', timestamp, block.padStart(10));
});
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,30 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
// This script takes the output of fetchMissingVAAs and writes the found records back to the VAA big table
(async () => {
const found: { [id: string]: string } = require('../found.json');
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
try {
bt.storeSignedVAAs(
Object.entries(found).map(([id, vaaBytes]) => {
const vaa = Buffer.from(vaaBytes, 'hex');
return {
key: id,
data: {
info: {
bytes: { value: vaa, timestamp: '0' },
},
},
};
})
);
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,58 @@
import { padUint16, sleep } from '../src/common';
import * as dotenv from 'dotenv';
dotenv.config();
import { BigtableDatabase } from '../src/databases/BigtableDatabase';
import { parseMessageId } from '../src/databases/utils';
// This script updates the getSignedVaa value for a given list of vaa rowkeys
export function parseVaaId(vaaRowKey: string) {
let [chain, emitter, sequence] = vaaRowKey.split(':');
// chain: number, emitter: string, sequence: bigint
return [chain, emitter, sequence];
}
(async () => {
const bt = new BigtableDatabase();
if (!bt.bigtable) {
throw new Error('bigtable is undefined');
}
const instance = bt.bigtable.instance(bt.instanceId);
const messageTable = instance.table(bt.msgTableId);
const rowKeysToUpdate: string[] = [
'5:0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde:0000000000006840',
'7:00000000000000000000000004952d522ff217f40b5ef3cbf659eca7b952a6c1:0000000000000002',
'7:0000000000000000000000005848c791e09901b40a9ef749f2a6735b418d7564:0000000000006971',
'15:148410499d3fcda4dcfd68a1ebfcdddda16ab28326448d4aae4d2f0465cdfcb7:0000000000000001',
];
try {
// STEP 1
console.log(`processing ${rowKeysToUpdate.length} rowKeys`);
for (const rowKey of rowKeysToUpdate) {
let [chain, targetEmitter, targetSequence] = parseVaaId(rowKey);
const formattedChainId = padUint16(chain);
const [rowsByChain] = await messageTable.getRows({ prefix: formattedChainId });
let messageRowKey = '';
//filter to find sequence numbers:
rowsByChain.forEach((row) => {
const { chain, block, emitter, sequence } = parseMessageId(row.id);
if (targetEmitter === emitter && BigInt(targetSequence) === sequence) {
console.log(`found ${row.id} for rowKey=${rowKey}`);
//update rowKey
messageRowKey = row.id;
}
});
if (messageRowKey !== '') {
console.log(`updating ${messageRowKey} to value=${2}`);
await bt.updateMessageStatuses([messageRowKey], 2);
}
}
} catch (e) {
console.error(e);
}
})();

View File

@ -0,0 +1,262 @@
[
{
"inputs": [
{ "internalType": "address", "name": "_libAddressManager", "type": "address" },
{ "internalType": "uint256", "name": "_maxTransactionGasLimit", "type": "uint256" },
{ "internalType": "uint256", "name": "_l2GasDiscountDivisor", "type": "uint256" },
{ "internalType": "uint256", "name": "_enqueueGasCost", "type": "uint256" }
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint256",
"name": "l2GasDiscountDivisor",
"type": "uint256"
},
{ "indexed": false, "internalType": "uint256", "name": "enqueueGasCost", "type": "uint256" },
{
"indexed": false,
"internalType": "uint256",
"name": "enqueueL2GasPrepaid",
"type": "uint256"
}
],
"name": "L2GasParamsUpdated",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint256",
"name": "_startingQueueIndex",
"type": "uint256"
},
{
"indexed": false,
"internalType": "uint256",
"name": "_numQueueElements",
"type": "uint256"
},
{ "indexed": false, "internalType": "uint256", "name": "_totalElements", "type": "uint256" }
],
"name": "QueueBatchAppended",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint256",
"name": "_startingQueueIndex",
"type": "uint256"
},
{
"indexed": false,
"internalType": "uint256",
"name": "_numQueueElements",
"type": "uint256"
},
{ "indexed": false, "internalType": "uint256", "name": "_totalElements", "type": "uint256" }
],
"name": "SequencerBatchAppended",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{ "indexed": true, "internalType": "uint256", "name": "_batchIndex", "type": "uint256" },
{ "indexed": false, "internalType": "bytes32", "name": "_batchRoot", "type": "bytes32" },
{ "indexed": false, "internalType": "uint256", "name": "_batchSize", "type": "uint256" },
{
"indexed": false,
"internalType": "uint256",
"name": "_prevTotalElements",
"type": "uint256"
},
{ "indexed": false, "internalType": "bytes", "name": "_extraData", "type": "bytes" }
],
"name": "TransactionBatchAppended",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{ "indexed": true, "internalType": "address", "name": "_l1TxOrigin", "type": "address" },
{ "indexed": true, "internalType": "address", "name": "_target", "type": "address" },
{ "indexed": false, "internalType": "uint256", "name": "_gasLimit", "type": "uint256" },
{ "indexed": false, "internalType": "bytes", "name": "_data", "type": "bytes" },
{ "indexed": true, "internalType": "uint256", "name": "_queueIndex", "type": "uint256" },
{ "indexed": false, "internalType": "uint256", "name": "_timestamp", "type": "uint256" }
],
"name": "TransactionEnqueued",
"type": "event"
},
{
"inputs": [],
"name": "MAX_ROLLUP_TX_SIZE",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "MIN_ROLLUP_TX_GAS",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "appendSequencerBatch",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "batches",
"outputs": [
{ "internalType": "contract IChainStorageContainer", "name": "", "type": "address" }
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{ "internalType": "address", "name": "_target", "type": "address" },
{ "internalType": "uint256", "name": "_gasLimit", "type": "uint256" },
{ "internalType": "bytes", "name": "_data", "type": "bytes" }
],
"name": "enqueue",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "enqueueGasCost",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "enqueueL2GasPrepaid",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getLastBlockNumber",
"outputs": [{ "internalType": "uint40", "name": "", "type": "uint40" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getLastTimestamp",
"outputs": [{ "internalType": "uint40", "name": "", "type": "uint40" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getNextQueueIndex",
"outputs": [{ "internalType": "uint40", "name": "", "type": "uint40" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getNumPendingQueueElements",
"outputs": [{ "internalType": "uint40", "name": "", "type": "uint40" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [{ "internalType": "uint256", "name": "_index", "type": "uint256" }],
"name": "getQueueElement",
"outputs": [
{
"components": [
{ "internalType": "bytes32", "name": "transactionHash", "type": "bytes32" },
{ "internalType": "uint40", "name": "timestamp", "type": "uint40" },
{ "internalType": "uint40", "name": "blockNumber", "type": "uint40" }
],
"internalType": "struct Lib_OVMCodec.QueueElement",
"name": "_element",
"type": "tuple"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getQueueLength",
"outputs": [{ "internalType": "uint40", "name": "", "type": "uint40" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getTotalBatches",
"outputs": [{ "internalType": "uint256", "name": "_totalBatches", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "getTotalElements",
"outputs": [{ "internalType": "uint256", "name": "_totalElements", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "l2GasDiscountDivisor",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "libAddressManager",
"outputs": [{ "internalType": "contract Lib_AddressManager", "name": "", "type": "address" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "maxTransactionGasLimit",
"outputs": [{ "internalType": "uint256", "name": "", "type": "uint256" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [{ "internalType": "string", "name": "_name", "type": "string" }],
"name": "resolve",
"outputs": [{ "internalType": "address", "name": "", "type": "address" }],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{ "internalType": "uint256", "name": "_l2GasDiscountDivisor", "type": "uint256" },
{ "internalType": "uint256", "name": "_enqueueGasCost", "type": "uint256" }
],
"name": "setGasParams",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
}
]

View File

@ -0,0 +1 @@
export declare function chunkArray<T>(arr: T[], size: number): T[][];

12
event-watcher/src/common/dist/arrays.js vendored Normal file
View File

@ -0,0 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.chunkArray = void 0;
function chunkArray(arr, size) {
const chunks = [];
for (let i = 0; i < arr.length; i += size) {
chunks.push(arr.slice(i, i + size));
}
return chunks;
}
exports.chunkArray = chunkArray;
//# sourceMappingURL=arrays.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"arrays.js","sourceRoot":"","sources":["../src/arrays.ts"],"names":[],"mappings":";;;AAAA,SAAgB,UAAU,CAAI,GAAQ,EAAE,IAAY;IAClD,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,IAAI,EAAE;QACzC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC;KACrC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAND,gCAMC"}

View File

@ -0,0 +1,12 @@
import { ChainId, ChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
export declare const INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN: {
[key in ChainName]?: string;
};
export declare const TOKEN_BRIDGE_EMITTERS: {
[key in ChainName]?: string;
};
export declare const isTokenBridgeEmitter: (chain: ChainId | ChainName, emitter: string) => boolean;
export declare const NFT_BRIDGE_EMITTERS: {
[key in ChainName]?: string;
};
export declare const isNFTBridgeEmitter: (chain: ChainId | ChainName, emitter: string) => boolean;

79
event-watcher/src/common/dist/consts.js vendored Normal file
View File

@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isNFTBridgeEmitter = exports.NFT_BRIDGE_EMITTERS = exports.isTokenBridgeEmitter = exports.TOKEN_BRIDGE_EMITTERS = exports.INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN = void 0;
const consts_1 = require("@certusone/wormhole-sdk/lib/cjs/utils/consts");
exports.INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN = {
ethereum: '12959638',
terra: '4810000',
bsc: '9745450',
polygon: '20629146',
avalanche: '8237163',
oasis: '1757',
algorand: '22931277',
fantom: '31817467',
karura: '1824665',
acala: '1144161',
klaytn: '90563824',
celo: '12947144',
moonbeam: '1486591',
terra2: '399813',
injective: '20908376',
arbitrum: '18128584',
optimism: '69401779',
aptos: '0',
near: '72767136',
xpla: '777549',
solana: '94401321',
sui: '1485552',
base: '1422314',
};
exports.TOKEN_BRIDGE_EMITTERS = {
solana: 'ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5',
ethereum: '0000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585',
terra: '0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2',
terra2: 'a463ad028fb79679cfc8ce1efba35ac0e77b35080a1abe9bebe83461f176b0a3',
bsc: '000000000000000000000000b6f6d86a8f9879a9c87f643768d9efc38c1da6e7',
polygon: '0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
avalanche: '0000000000000000000000000e082f06ff657d94310cb8ce8b0d9a04541d8052',
oasis: '0000000000000000000000005848c791e09901b40a9ef749f2a6735b418d7564',
algorand: '67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45',
aptos: '0000000000000000000000000000000000000000000000000000000000000001',
aurora: '00000000000000000000000051b5123a7b0f9b2ba265f9c4c8de7d78d52f510f',
fantom: '0000000000000000000000007c9fc5741288cdfdd83ceb07f3ea7e22618d79d2',
karura: '000000000000000000000000ae9d7fe007b3327aa64a32824aaac52c42a6e624',
acala: '000000000000000000000000ae9d7fe007b3327aa64a32824aaac52c42a6e624',
klaytn: '0000000000000000000000005b08ac39eaed75c0439fc750d9fe7e1f9dd0193f',
celo: '000000000000000000000000796dff6d74f3e27060b71255fe517bfb23c93eed',
near: '148410499d3fcda4dcfd68a1ebfcdddda16ab28326448d4aae4d2f0465cdfcb7',
moonbeam: '000000000000000000000000b1731c586ca89a23809861c6103f0b96b3f57d92',
arbitrum: '0000000000000000000000000b2402144bb366a632d14b83f244d2e0e21bd39c',
optimism: '0000000000000000000000001d68124e65fafc907325e3edbf8c4d84499daa8b',
xpla: '8f9cf727175353b17a5f574270e370776123d90fd74956ae4277962b4fdee24c',
injective: '00000000000000000000000045dbea4617971d93188eda21530bc6503d153313',
sui: 'ccceeb29348f71bdd22ffef43a2a19c1f5b5e17c5cca5411529120182672ade5',
base: '0000000000000000000000008d2de8d2f73F1F4cAB472AC9A881C9b123C79627',
};
const isTokenBridgeEmitter = (chain, emitter) => exports.TOKEN_BRIDGE_EMITTERS[(0, consts_1.coalesceChainName)(chain)] === emitter;
exports.isTokenBridgeEmitter = isTokenBridgeEmitter;
exports.NFT_BRIDGE_EMITTERS = {
solana: '0def15a24423e1edd1a5ab16f557b9060303ddbab8c803d2ee48f4b78a1cfd6b',
ethereum: '0000000000000000000000006ffd7ede62328b3af38fcd61461bbfc52f5651fe',
bsc: '0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
polygon: '00000000000000000000000090bbd86a6fe93d3bc3ed6335935447e75fab7fcf',
avalanche: '000000000000000000000000f7b6737ca9c4e08ae573f75a97b73d7a813f5de5',
oasis: '00000000000000000000000004952d522ff217f40b5ef3cbf659eca7b952a6c1',
aurora: '0000000000000000000000006dcc0484472523ed9cdc017f711bcbf909789284',
fantom: '000000000000000000000000a9c7119abda80d4a4e0c06c8f4d8cf5893234535',
karura: '000000000000000000000000b91e3638f82a1facb28690b37e3aae45d2c33808',
acala: '000000000000000000000000b91e3638f82a1facb28690b37e3aae45d2c33808',
klaytn: '0000000000000000000000003c3c561757baa0b78c5c025cdeaa4ee24c1dffef',
celo: '000000000000000000000000a6a377d75ca5c9052c9a77ed1e865cc25bd97bf3',
moonbeam: '000000000000000000000000453cfbe096c0f8d763e8c5f24b441097d577bde2',
arbitrum: '0000000000000000000000003dd14d553cfd986eac8e3bddf629d82073e188c8',
optimism: '000000000000000000000000fe8cd454b4a1ca468b57d79c0cc77ef5b6f64585',
aptos: '0000000000000000000000000000000000000000000000000000000000000005',
base: '000000000000000000000000DA3adC6621B2677BEf9aD26598e6939CF0D92f88',
};
const isNFTBridgeEmitter = (chain, emitter) => exports.NFT_BRIDGE_EMITTERS[(0, consts_1.coalesceChainName)(chain)] === emitter;
exports.isNFTBridgeEmitter = isNFTBridgeEmitter;
//# sourceMappingURL=consts.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"consts.js","sourceRoot":"","sources":["../src/consts.ts"],"names":[],"mappings":";;;AAAA,yEAIsD;AAEzC,QAAA,iCAAiC,GAE1C;IACF,QAAQ,EAAE,UAAU;IACpB,KAAK,EAAE,SAAS;IAChB,GAAG,EAAE,SAAS;IACd,OAAO,EAAE,UAAU;IACnB,SAAS,EAAE,SAAS;IACpB,KAAK,EAAE,MAAM;IACb,QAAQ,EAAE,UAAU;IACpB,MAAM,EAAE,UAAU;IAClB,MAAM,EAAE,SAAS;IACjB,KAAK,EAAE,SAAS;IAChB,MAAM,EAAE,UAAU;IAClB,IAAI,EAAE,UAAU;IAChB,QAAQ,EAAE,SAAS;IACnB,MAAM,EAAE,QAAQ;IAChB,SAAS,EAAE,UAAU;IACrB,QAAQ,EAAE,UAAU;IACpB,QAAQ,EAAE,UAAU;IACpB,KAAK,EAAE,GAAG;IACV,IAAI,EAAE,UAAU;IAChB,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,UAAU;IAClB,GAAG,EAAE,SAAS;IACd,IAAI,EAAE,SAAS;CAChB,CAAC;AAEW,QAAA,qBAAqB,GAAoC;IACpE,MAAM,EAAE,kEAAkE;IAC1E,QAAQ,EAAE,kEAAkE;IAC5E,KAAK,EAAE,kEAAkE;IACzE,MAAM,EAAE,kEAAkE;IAC1E,GAAG,EAAE,kEAAkE;IACvE,OAAO,EAAE,kEAAkE;IAC3E,SAAS,EAAE,kEAAkE;IAC7E,KAAK,EAAE,kEAAkE;IACzE,QAAQ,EAAE,kEAAkE;IAC5E,KAAK,EAAE,kEAAkE;IACzE,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,kEAAkE;IAC1E,KAAK,EAAE,kEAAkE;IACzE,MAAM,EAAE,kEAAkE;IAC1E,IAAI,EAAE,kEAAkE;IACxE,IAAI,EAAE,kEAAkE;IACxE,QAAQ,EAAE,kEAAkE;IAC5E,QAAQ,EAAE,kEAAkE;IAC5E,QAAQ,EAAE,kEAAkE;IAC5E,IAAI,EAAE,kEAAkE;IACxE,SAAS,EAAE,kEAAkE;IAC7E,GAAG,EAAE,kEAAkE;IACvE,IAAI,EAAE,kEAAkE;CACzE,CAAC;AAEK,MAAM,oBAAoB,GAAG,CAAC,KAA0B,EAAE,OAAe,EAAE,EAAE,CAClF,6BAAqB,CAAC,IAAA,0BAAiB,EAAC,KAAK,CAAC,CAAC,KAAK,OAAO,CAAC;AADjD,QAAA,oBAAoB,wBAC6B;AAEjD,QAAA,mBAAmB,GAAoC;IAClE,MAAM,EAAE,kEAAkE;IAC1E,QAAQ,EAAE,kEAAkE;IAC5E,GAAG,EAAE,kEAAkE;IACvE,OAAO,EAAE,kEAAkE;IAC3E,SAAS,EAAE,kEAAkE;IAC7E,KAAK,EAAE,kEAAkE;IACzE,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,kEAAkE;IAC1E,MAAM,EAAE,kEAAkE;IAC1E,KAAK,EAAE,kEAAkE;IACzE,MAAM,EAAE,kEAAkE;IAC1E,IAAI,EAAE,kEAAkE;IACxE,QAAQ,EAAE,kEAAkE;IAC5E,QAAQ,EAAE,kEAAkE;IAC5E,QAAQ,EAAE,kEAAkE;IAC5E,KAAK,EAAE,kEAAkE;IACzE,IAAI,EAAE,kEAAkE;CACzE,CAAC;AAEK,MAAM,kBAAkB,GAAG,CAAC,KAA0B,EAAE,OAAe,EAAE,EAAE,CAChF,2BAAmB,CAAC,IAAA,0BAAiB,EAAC,KAAK,CAAC,CAAC,KAAK,OAAO,CAAC;AAD/C,QAAA,kBAAkB,sBAC6B"}

View File

@ -0,0 +1,3 @@
export * from './arrays';
export * from './consts';
export * from './utils';

20
event-watcher/src/common/dist/index.js vendored Normal file
View File

@ -0,0 +1,20 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./arrays"), exports);
__exportStar(require("./consts"), exports);
__exportStar(require("./utils"), exports);
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,2CAAyB;AACzB,2CAAyB;AACzB,0CAAwB"}

View File

@ -0,0 +1,7 @@
export declare function sleep(timeout: number): Promise<unknown>;
export declare const assertEnvironmentVariable: (varName: string) => string;
export declare const MAX_UINT_16 = "65535";
export declare const padUint16: (s: string) => string;
export declare const MAX_UINT_64 = "18446744073709551615";
export declare const padUint64: (s: string) => string;
export declare const makeSignedVAAsRowKey: (chain: number, emitter: string, sequence: string) => string;

23
event-watcher/src/common/dist/utils.js vendored Normal file
View File

@ -0,0 +1,23 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeSignedVAAsRowKey = exports.padUint64 = exports.MAX_UINT_64 = exports.padUint16 = exports.MAX_UINT_16 = exports.assertEnvironmentVariable = exports.sleep = void 0;
async function sleep(timeout) {
return new Promise((resolve) => setTimeout(resolve, timeout));
}
exports.sleep = sleep;
const assertEnvironmentVariable = (varName) => {
if (varName in process.env)
return process.env[varName];
throw new Error(`Missing required environment variable: ${varName}`);
};
exports.assertEnvironmentVariable = assertEnvironmentVariable;
exports.MAX_UINT_16 = '65535';
const padUint16 = (s) => s.padStart(exports.MAX_UINT_16.length, '0');
exports.padUint16 = padUint16;
exports.MAX_UINT_64 = '18446744073709551615';
const padUint64 = (s) => s.padStart(exports.MAX_UINT_64.length, '0');
exports.padUint64 = padUint64;
// make a bigtable row key for the `signedVAAs` table
const makeSignedVAAsRowKey = (chain, emitter, sequence) => `${(0, exports.padUint16)(chain.toString())}/${emitter}/${(0, exports.padUint64)(sequence)}`;
exports.makeSignedVAAsRowKey = makeSignedVAAsRowKey;
//# sourceMappingURL=utils.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;;AAAO,KAAK,UAAU,KAAK,CAAC,OAAe;IACzC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC;AAChE,CAAC;AAFD,sBAEC;AACM,MAAM,yBAAyB,GAAG,CAAC,OAAe,EAAE,EAAE;IAC3D,IAAI,OAAO,IAAI,OAAO,CAAC,GAAG;QAAE,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,CAAE,CAAC;IACzD,MAAM,IAAI,KAAK,CAAC,0CAA0C,OAAO,EAAE,CAAC,CAAC;AACvE,CAAC,CAAC;AAHW,QAAA,yBAAyB,6BAGpC;AACW,QAAA,WAAW,GAAG,OAAO,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAS,EAAU,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,mBAAW,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAAvE,QAAA,SAAS,aAA8D;AACvE,QAAA,WAAW,GAAG,sBAAsB,CAAC;AAC3C,MAAM,SAAS,GAAG,CAAC,CAAS,EAAU,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,mBAAW,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAAvE,QAAA,SAAS,aAA8D;AAEpF,qDAAqD;AAC9C,MAAM,oBAAoB,GAAG,CAAC,KAAa,EAAE,OAAe,EAAE,QAAgB,EAAU,EAAE,CAC/F,GAAG,IAAA,iBAAS,EAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,IAAI,OAAO,IAAI,IAAA,iBAAS,EAAC,QAAQ,CAAC,EAAE,CAAC;AADxD,QAAA,oBAAoB,wBACoC"}

View File

@ -0,0 +1,7 @@
{
"name": "@wormhole-foundation/wormhole-monitor-common",
"version": "0.0.1",
"private": true,
"main": "dist/index.js",
"types": "dist/index.d.ts"
}

View File

@ -0,0 +1,7 @@
export function chunkArray<T>(arr: T[], size: number): T[][] {
const chunks = [];
for (let i = 0; i < arr.length; i += size) {
chunks.push(arr.slice(i, i + size));
}
return chunks;
}

View File

@ -0,0 +1,86 @@
import {
ChainId,
ChainName,
coalesceChainName,
} from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
export const INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN: {
[key in ChainName]?: string;
} = {
ethereum: '12959638',
terra: '4810000', // not sure exactly but this should be before the first known message
bsc: '9745450',
polygon: '20629146',
avalanche: '8237163',
oasis: '1757',
algorand: '22931277',
fantom: '31817467',
karura: '1824665',
acala: '1144161',
klaytn: '90563824',
celo: '12947144',
moonbeam: '1486591',
terra2: '399813',
injective: '20908376',
arbitrum: '18128584',
optimism: '69401779',
aptos: '0', // block is 1094390 but AptosWatcher uses sequence number instead
near: '72767136',
xpla: '777549',
solana: '94401321', // https://explorer.solana.com/tx/KhLy688yDxbP7xbXVXK7TGpZU5DAFHbYiaoX16zZArxvVySz8i8g7N7Ss2noQYoq9XRbg6HDzrQBjUfmNcSWwhe
sui: '1485552', // https://explorer.sui.io/txblock/671SoTvVUvBZQWKXeameDvAwzHQvnr8Nj7dR9MUwm3CV?network=https%3A%2F%2Frpc.mainnet.sui.io
base: '1422314',
};
export const TOKEN_BRIDGE_EMITTERS: { [key in ChainName]?: string } = {
solana: 'ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5',
ethereum: '0000000000000000000000003ee18b2214aff97000d974cf647e7c347e8fa585',
terra: '0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2',
terra2: 'a463ad028fb79679cfc8ce1efba35ac0e77b35080a1abe9bebe83461f176b0a3',
bsc: '000000000000000000000000b6f6d86a8f9879a9c87f643768d9efc38c1da6e7',
polygon: '0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
avalanche: '0000000000000000000000000e082f06ff657d94310cb8ce8b0d9a04541d8052',
oasis: '0000000000000000000000005848c791e09901b40a9ef749f2a6735b418d7564',
algorand: '67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45',
aptos: '0000000000000000000000000000000000000000000000000000000000000001',
aurora: '00000000000000000000000051b5123a7b0f9b2ba265f9c4c8de7d78d52f510f',
fantom: '0000000000000000000000007c9fc5741288cdfdd83ceb07f3ea7e22618d79d2',
karura: '000000000000000000000000ae9d7fe007b3327aa64a32824aaac52c42a6e624',
acala: '000000000000000000000000ae9d7fe007b3327aa64a32824aaac52c42a6e624',
klaytn: '0000000000000000000000005b08ac39eaed75c0439fc750d9fe7e1f9dd0193f',
celo: '000000000000000000000000796dff6d74f3e27060b71255fe517bfb23c93eed',
near: '148410499d3fcda4dcfd68a1ebfcdddda16ab28326448d4aae4d2f0465cdfcb7',
moonbeam: '000000000000000000000000b1731c586ca89a23809861c6103f0b96b3f57d92',
arbitrum: '0000000000000000000000000b2402144bb366a632d14b83f244d2e0e21bd39c',
optimism: '0000000000000000000000001d68124e65fafc907325e3edbf8c4d84499daa8b',
xpla: '8f9cf727175353b17a5f574270e370776123d90fd74956ae4277962b4fdee24c',
injective: '00000000000000000000000045dbea4617971d93188eda21530bc6503d153313',
sui: 'ccceeb29348f71bdd22ffef43a2a19c1f5b5e17c5cca5411529120182672ade5',
base: '0000000000000000000000008d2de8d2f73F1F4cAB472AC9A881C9b123C79627',
};
export const isTokenBridgeEmitter = (chain: ChainId | ChainName, emitter: string) =>
TOKEN_BRIDGE_EMITTERS[coalesceChainName(chain)] === emitter;
export const NFT_BRIDGE_EMITTERS: { [key in ChainName]?: string } = {
solana: '0def15a24423e1edd1a5ab16f557b9060303ddbab8c803d2ee48f4b78a1cfd6b',
ethereum: '0000000000000000000000006ffd7ede62328b3af38fcd61461bbfc52f5651fe',
bsc: '0000000000000000000000005a58505a96d1dbf8df91cb21b54419fc36e93fde',
polygon: '00000000000000000000000090bbd86a6fe93d3bc3ed6335935447e75fab7fcf',
avalanche: '000000000000000000000000f7b6737ca9c4e08ae573f75a97b73d7a813f5de5',
oasis: '00000000000000000000000004952d522ff217f40b5ef3cbf659eca7b952a6c1',
aurora: '0000000000000000000000006dcc0484472523ed9cdc017f711bcbf909789284',
fantom: '000000000000000000000000a9c7119abda80d4a4e0c06c8f4d8cf5893234535',
karura: '000000000000000000000000b91e3638f82a1facb28690b37e3aae45d2c33808',
acala: '000000000000000000000000b91e3638f82a1facb28690b37e3aae45d2c33808',
klaytn: '0000000000000000000000003c3c561757baa0b78c5c025cdeaa4ee24c1dffef',
celo: '000000000000000000000000a6a377d75ca5c9052c9a77ed1e865cc25bd97bf3',
moonbeam: '000000000000000000000000453cfbe096c0f8d763e8c5f24b441097d577bde2',
arbitrum: '0000000000000000000000003dd14d553cfd986eac8e3bddf629d82073e188c8',
optimism: '000000000000000000000000fe8cd454b4a1ca468b57d79c0cc77ef5b6f64585',
aptos: '0000000000000000000000000000000000000000000000000000000000000005',
base: '000000000000000000000000DA3adC6621B2677BEf9aD26598e6939CF0D92f88',
};
export const isNFTBridgeEmitter = (chain: ChainId | ChainName, emitter: string) =>
NFT_BRIDGE_EMITTERS[coalesceChainName(chain)] === emitter;

View File

@ -0,0 +1,3 @@
export * from './arrays';
export * from './consts';
export * from './utils';

View File

@ -0,0 +1,15 @@
export async function sleep(timeout: number) {
return new Promise((resolve) => setTimeout(resolve, timeout));
}
export const assertEnvironmentVariable = (varName: string) => {
if (varName in process.env) return process.env[varName]!;
throw new Error(`Missing required environment variable: ${varName}`);
};
export const MAX_UINT_16 = '65535';
export const padUint16 = (s: string): string => s.padStart(MAX_UINT_16.length, '0');
export const MAX_UINT_64 = '18446744073709551615';
export const padUint64 = (s: string): string => s.padStart(MAX_UINT_64.length, '0');
// make a bigtable row key for the `signedVAAs` table
export const makeSignedVAAsRowKey = (chain: number, emitter: string, sequence: string): string =>
`${padUint16(chain.toString())}/${emitter}/${padUint64(sequence)}`;

View File

@ -0,0 +1,8 @@
{
"extends": "../tsconfig.base.json",
"compilerOptions": {
"rootDir": "src",
"outDir": "dist"
},
"include": ["src"]
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,89 @@
import { ChainName, CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { AxiosRequestConfig } from 'axios';
export const TIMEOUT = 0.5 * 1000;
// Notes about RPCs
// Ethereum
// ethereum: "https://rpc.ankr.com/eth", // "finalized" does not work on Ankr as of 2022-12-16
// BSC
// https://docs.bscscan.com/misc-tools-and-utilities/public-rpc-nodes
// bsc: "https://bsc-dataseed1.binance.org", // Cannot read properties of undefined (reading 'error')
// 'https://rpc.ankr.com/bsc' has been very slow, trying a diff rpc
// Avalanche
// https://docs.avax.network/apis/avalanchego/public-api-server
// avalanche: "https://api.avax.network/ext/bc/C/rpc", // 500 error on batch request
// Fantom
// fantom: "https://rpc.ftm.tools", // Cannot read properties of null (reading 'timestamp')"
// Klaytn
// this one immediately 429s
// klaytn: 'https://public-node-api.klaytnapi.com/v1/cypress',
// Near
// archive node
// https://archival-rpc.mainnet.near.org
// Arbitrum
// This node didn't work: 'https://arb1.arbitrum.io/rpc',
export const RPCS_BY_CHAIN: { [key in ChainName]?: string } = {
ethereum: process.env.ETH_RPC || 'https://svc.blockdaemon.com/ethereum/mainnet/native',
bsc: process.env.BSC_RPC || 'https://bsc-dataseed2.defibit.io',
polygon: 'https://rpc.ankr.com/polygon',
avalanche: 'https://rpc.ankr.com/avalanche',
oasis: 'https://emerald.oasis.dev',
algorand: 'https://mainnet-api.algonode.cloud',
fantom: 'https://rpc.ankr.com/fantom',
karura: 'https://eth-rpc-karura.aca-api.network',
acala: 'https://eth-rpc-acala.aca-api.network',
klaytn: 'https://klaytn-mainnet-rpc.allthatnode.com:8551',
celo: 'https://forno.celo.org',
moonbeam: 'https://rpc.ankr.com/moonbeam',
arbitrum: 'https://arb1.arbitrum.io/rpc',
optimism: 'https://rpc.ankr.com/optimism',
aptos: 'https://fullnode.mainnet.aptoslabs.com/',
near: 'https://rpc.mainnet.near.org',
xpla: 'https://dimension-lcd.xpla.dev',
terra2: 'https://phoenix-lcd.terra.dev',
// terra: 'https://columbus-fcd.terra.dev',
terra: 'https://terra-classic-fcd.publicnode.com',
injective: 'https://api.injective.network',
solana: process.env.SOLANA_RPC ?? 'https://api.mainnet-beta.solana.com',
sui: 'https://rpc.mainnet.sui.io',
base: 'https://developer-access-mainnet.base.org',
};
// Separating for now so if we max out infura we can keep Polygon going
export const POLYGON_ROOT_CHAIN_RPC = 'https://rpc.ankr.com/eth';
export const POLYGON_ROOT_CHAIN_ADDRESS = '0x86E4Dc95c7FBdBf52e33D563BbDB00823894C287';
// Optimism watcher relies on finalized calls which don't work right on Ankr
export const OPTIMISM_CTC_CHAIN_RPC = process.env.ETH_RPC;
export const OPTIMISM_CTC_CHAIN_ADDRESS = '0x5E4e65926BA27467555EB562121fac00D24E9dD2';
export const ALGORAND_INFO = {
appid: Number(CONTRACTS.MAINNET.algorand.core),
algodToken: '',
algodServer: RPCS_BY_CHAIN.algorand,
algodPort: 443,
server: 'https://mainnet-idx.algonode.cloud',
port: 443,
token: '',
};
export const DB_SOURCE =
process.env.NODE_ENV === 'test' ? 'local' : process.env.DB_SOURCE || 'local';
export const JSON_DB_FILE = process.env.JSON_DB_FILE || './db.json';
export const DB_LAST_BLOCK_FILE = process.env.DB_LAST_BLOCK_FILE || './lastBlockByChain.json';
// without this, axios request will error `Z_BUF_ERROR`: https://github.com/axios/axios/issues/5346
export const AXIOS_CONFIG_JSON: AxiosRequestConfig = {
headers: { 'Accept-Encoding': 'application/json',
'Authorization': 'Bearer zpka_213d294a9a5a44619cd6a02e55a20417_5f43e4d0'
},
};
export const GUARDIAN_RPC_HOSTS = [
'https://wormhole-v2-mainnet-api.certus.one',
'https://wormhole.inotel.ro',
'https://wormhole-v2-mainnet-api.mcf.rocks',
'https://wormhole-v2-mainnet-api.chainlayer.network',
'https://wormhole-v2-mainnet-api.staking.fund',
];

View File

@ -0,0 +1,309 @@
import { ChainName, coalesceChainId } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { parseVaa } from '@certusone/wormhole-sdk/lib/cjs/vaa/wormhole';
import { Bigtable } from '@google-cloud/bigtable';
import {
assertEnvironmentVariable,
chunkArray,
sleep,
} from '../common';
import { cert, initializeApp } from 'firebase-admin/app';
import { getFirestore } from 'firebase-admin/firestore';
import { Database } from './Database';
import {
BigtableMessagesResultRow,
BigtableMessagesRow,
BigtableSignedVAAsResultRow,
BigtableSignedVAAsRow,
BigtableVAAsByTxHashRow,
VaasByBlock,
} from './types';
import {
makeMessageId,
makeVAAsByTxHashRowKey,
makeSignedVAAsRowKey,
parseMessageId,
} from './utils';
import { getSignedVAA } from '../utils/getSignedVAA';
import { PubSub } from '@google-cloud/pubsub';
const WATCH_MISSING_TIMEOUT = 5 * 60 * 1000;
export class BigtableDatabase extends Database {
msgTableId: string;
signedVAAsTableId: string;
vaasByTxHashTableId: string;
instanceId: string;
bigtable: Bigtable;
firestoreDb: FirebaseFirestore.Firestore;
latestCollectionName: string;
pubsubSignedVAATopic: string;
pubsub: PubSub;
constructor() {
super();
this.msgTableId = assertEnvironmentVariable('BIGTABLE_TABLE_ID');
this.signedVAAsTableId = assertEnvironmentVariable('BIGTABLE_SIGNED_VAAS_TABLE_ID');
this.vaasByTxHashTableId = assertEnvironmentVariable('BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID');
this.instanceId = assertEnvironmentVariable('BIGTABLE_INSTANCE_ID');
this.latestCollectionName = assertEnvironmentVariable('FIRESTORE_LATEST_COLLECTION');
this.pubsubSignedVAATopic = assertEnvironmentVariable('PUBSUB_SIGNED_VAA_TOPIC');
try {
this.bigtable = new Bigtable();
const serviceAccount = require(assertEnvironmentVariable('FIRESTORE_ACCOUNT_KEY_PATH'));
initializeApp({
credential: cert(serviceAccount),
});
this.firestoreDb = getFirestore();
this.pubsub = new PubSub();
} catch (e) {
throw new Error('Could not load bigtable db');
}
}
async getLastBlockByChain(chain: ChainName): Promise<string | null> {
const chainId = coalesceChainId(chain);
const lastObservedBlock = this.firestoreDb
.collection(this.latestCollectionName)
.doc(chainId.toString());
const lastObservedBlockByChain = await lastObservedBlock.get();
const blockKeyData = lastObservedBlockByChain.data();
const lastBlockKey = blockKeyData?.lastBlockKey;
if (lastBlockKey) {
this.logger.info(`for chain=${chain}, found most recent firestore block=${lastBlockKey}`);
const tokens = lastBlockKey.split('/');
return chain === 'aptos' ? tokens.at(-1) : tokens[0];
}
return null;
}
async storeLatestBlock(chain: ChainName, lastBlockKey: string): Promise<void> {
if (this.firestoreDb === undefined) {
this.logger.error('no firestore db set');
return;
}
const chainId = coalesceChainId(chain);
this.logger.info(`storing last block=${lastBlockKey} for chain=${chainId}`);
const lastObservedBlock = this.firestoreDb
.collection(this.latestCollectionName)
.doc(`${chainId.toString()}`);
await lastObservedBlock.set({ lastBlockKey });
}
async storeVaasByBlock(
chain: ChainName,
vaasByBlock: VaasByBlock,
updateLatestBlock: boolean = true
): Promise<void> {
if (this.bigtable === undefined) {
this.logger.warn('no bigtable instance set');
return;
}
const chainId = coalesceChainId(chain);
const filteredBlocks = BigtableDatabase.filterEmptyBlocks(vaasByBlock);
const instance = this.bigtable.instance(this.instanceId);
const table = instance.table(this.msgTableId);
const vaasByTxHashTable = instance.table(this.vaasByTxHashTableId);
const rowsToInsert: BigtableMessagesRow[] = [];
const vaasByTxHash: { [key: string]: string[] } = {};
Object.keys(filteredBlocks).forEach((blockKey) => {
const [block, timestamp] = blockKey.split('/');
filteredBlocks[blockKey].forEach((msgKey) => {
const [txHash, vaaKey] = msgKey.split(':');
const [, emitter, seq] = vaaKey.split('/');
rowsToInsert.push({
key: makeMessageId(chainId, block, emitter, seq),
data: {
info: {
timestamp: {
value: timestamp,
// write 0 timestamp to only keep 1 cell each
// https://cloud.google.com/bigtable/docs/gc-latest-value
timestamp: '0',
},
txHash: {
value: txHash,
timestamp: '0',
},
hasSignedVaa: {
value: 0,
timestamp: '0',
},
},
},
});
const txHashRowKey = makeVAAsByTxHashRowKey(txHash, chainId);
const vaaRowKey = makeSignedVAAsRowKey(chainId, emitter, seq);
vaasByTxHash[txHashRowKey] = [...(vaasByTxHash[txHashRowKey] || []), vaaRowKey];
});
});
const txHashRowsToInsert = Object.entries(vaasByTxHash).map<BigtableVAAsByTxHashRow>(
([txHashRowKey, vaaRowKeys]) => ({
key: txHashRowKey,
data: {
info: {
vaaKeys: { value: JSON.stringify(vaaRowKeys), timestamp: '0' },
},
},
})
);
await Promise.all([table.insert(rowsToInsert), vaasByTxHashTable.insert(txHashRowsToInsert)]);
if (updateLatestBlock) {
// store latest vaasByBlock to firestore
const blockKeys = Object.keys(vaasByBlock).sort(
(bk1, bk2) => Number(bk1.split('/')[0]) - Number(bk2.split('/')[0])
);
if (blockKeys.length) {
const lastBlockKey = blockKeys[blockKeys.length - 1];
this.logger.info(`for chain=${chain}, storing last bigtable block=${lastBlockKey}`);
await this.storeLatestBlock(chain, lastBlockKey);
}
}
}
async updateMessageStatuses(messageKeys: string[], value: number = 1): Promise<void> {
const instance = this.bigtable.instance(this.instanceId);
const table = instance.table(this.msgTableId);
const chunkedMessageKeys = chunkArray(messageKeys, 1000);
for (const chunk of chunkedMessageKeys) {
const rowsToInsert: BigtableMessagesRow[] = chunk.map((id) => ({
key: id,
data: {
info: {
hasSignedVaa: {
value: value,
timestamp: '0',
},
},
},
}));
// console.log(rowsToInsert[0].data.info)
await table.insert(rowsToInsert);
}
}
async fetchMissingVaaMessages(): Promise<BigtableMessagesResultRow[]> {
const instance = this.bigtable.instance(this.instanceId);
const messageTable = instance.table(this.msgTableId);
// TODO: how to filter to only messages with hasSignedVaa === 0
const observedMessages = (await messageTable.getRows())[0] as BigtableMessagesResultRow[];
const missingVaaMessages = observedMessages.filter(
(x) => x.data.info.hasSignedVaa?.[0].value === 0
);
return missingVaaMessages;
}
async watchMissing(): Promise<void> {
const instance = this.bigtable.instance(this.instanceId);
const signedVAAsTable = instance.table(this.signedVAAsTableId);
while (true) {
try {
// this array first stores all of the messages which are missing VAAs
// messages which we find VAAs for are then pruned from the array
// lastly we try to fetch VAAs for the messages in the pruned array from the guardians
const missingVaaMessages = await this.fetchMissingVaaMessages();
const total = missingVaaMessages.length;
this.logger.info(`locating ${total} messages with hasSignedVAA === 0`);
let found = 0;
const chunkedVAAIds = chunkArray(
missingVaaMessages.map((observedMessage) => {
const { chain, emitter, sequence } = parseMessageId(observedMessage.id);
return makeSignedVAAsRowKey(chain, emitter, sequence.toString());
}),
1000
);
let chunkNum = 0;
const foundKeys: string[] = [];
for (const chunk of chunkedVAAIds) {
this.logger.info(`processing chunk ${++chunkNum} of ${chunkedVAAIds.length}`);
const vaaRows = (
await signedVAAsTable.getRows({
keys: chunk,
decode: false,
})
)[0] as BigtableSignedVAAsResultRow[];
for (const row of vaaRows) {
try {
const vaaBytes = row.data.info.bytes[0].value;
const parsed = parseVaa(vaaBytes);
const matchingIndex = missingVaaMessages.findIndex((observedMessage) => {
const { chain, emitter, sequence } = parseMessageId(observedMessage.id);
if (
parsed.emitterChain === chain &&
parsed.emitterAddress.toString('hex') === emitter &&
parsed.sequence === sequence
) {
return true;
}
});
if (matchingIndex !== -1) {
found++;
// remove matches to keep array lean
// messages with missing VAAs will be kept in the array
const [matching] = missingVaaMessages.splice(matchingIndex, 1);
foundKeys.push(matching.id);
}
} catch (e) {}
}
}
this.logger.info(`processed ${total} messages, found ${found}, missing ${total - found}`);
this.updateMessageStatuses(foundKeys);
// attempt to fetch VAAs missing from messages from the guardians and store them
// this is useful for cases where the VAA doesn't exist in the `signedVAAsTable` (perhaps due to an outage) but is available
const missingSignedVAARows: BigtableSignedVAAsRow[] = [];
for (const msg of missingVaaMessages) {
const { chain, emitter, sequence } = parseMessageId(msg.id);
const seq = sequence.toString();
const vaaBytes = await getSignedVAA(chain, emitter, seq);
if (vaaBytes) {
const key = makeSignedVAAsRowKey(chain, emitter, seq);
missingSignedVAARows.push({
key,
data: {
info: {
bytes: { value: vaaBytes, timestamp: '0' },
},
},
});
}
}
this.storeSignedVAAs(missingSignedVAARows);
this.publishSignedVAAs(missingSignedVAARows.map((r) => r.key));
// TODO: add slack message alerts
} catch (e) {
this.logger.error(e);
}
await sleep(WATCH_MISSING_TIMEOUT);
}
}
async storeSignedVAAs(rows: BigtableSignedVAAsRow[]): Promise<void> {
const instance = this.bigtable.instance(this.instanceId);
const table = instance.table(this.signedVAAsTableId);
const chunks = chunkArray(rows, 1000);
for (const chunk of chunks) {
await table.insert(chunk);
this.logger.info(`wrote ${chunk.length} signed VAAs to the ${this.signedVAAsTableId} table`);
}
}
async publishSignedVAAs(keys: string[]): Promise<void> {
if (keys.length === 0) {
return;
}
try {
const topic = this.pubsub.topic(this.pubsubSignedVAATopic);
if (!(await topic.exists())) {
this.logger.error(`pubsub topic doesn't exist: ${this.publishSignedVAAs}`);
return;
}
for (const key of keys) {
await topic.publishMessage({ data: Buffer.from(key) });
}
this.logger.info(
`published ${keys.length} signed VAAs to pubsub topic: ${this.pubsubSignedVAATopic}`
);
} catch (e) {
this.logger.error(`pubsub error - ${e}`);
}
}
}

View File

@ -0,0 +1,23 @@
import { ChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { getLogger, WormholeLogger } from '../utils/logger';
import { VaasByBlock } from './types';
export class Database {
logger: WormholeLogger;
constructor() {
this.logger = getLogger('db');
}
static filterEmptyBlocks(vaasByBlock: VaasByBlock): VaasByBlock {
const filteredVaasByBlock: VaasByBlock = {};
for (const [block, vaas] of Object.entries(vaasByBlock)) {
if (vaas.length > 0) filteredVaasByBlock[block] = [...vaas];
}
return filteredVaasByBlock;
}
async getLastBlockByChain(chain: ChainName): Promise<string | null> {
throw new Error('Not Implemented');
}
async storeVaasByBlock(chain: ChainName, vaasByBlock: VaasByBlock): Promise<void> {
throw new Error('Not Implemented');
}
}

View File

@ -0,0 +1,63 @@
import { ChainName, coalesceChainId } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { readFileSync, writeFileSync } from 'fs';
import { DB_LAST_BLOCK_FILE, JSON_DB_FILE } from '../consts';
import { Database } from './Database';
import { DB, LastBlockByChain, VaasByBlock } from './types';
const ENCODING = 'utf8';
export class JsonDatabase extends Database {
db: DB;
lastBlockByChain: LastBlockByChain;
dbFile: string;
dbLastBlockFile: string;
constructor() {
super();
this.db = {};
this.lastBlockByChain = {};
if (!process.env.JSON_DB_FILE) {
this.logger.info(`no db file set, using default path=${JSON_DB_FILE}`);
}
if (!process.env.DB_LAST_BLOCK_FILE) {
this.logger.info(`no db file set, using default path=${DB_LAST_BLOCK_FILE}`);
}
this.dbFile = JSON_DB_FILE;
this.dbLastBlockFile = DB_LAST_BLOCK_FILE;
try {
const rawDb = readFileSync(this.dbFile, ENCODING);
this.db = JSON.parse(rawDb);
const rawLast = readFileSync(this.dbLastBlockFile, ENCODING);
this.lastBlockByChain = JSON.parse(rawLast);
} catch (e) {
this.logger.warn('Failed to load DB, initiating a fresh one.');
this.db = {};
}
}
async getLastBlockByChain(chain: ChainName): Promise<string | null> {
const chainId = coalesceChainId(chain);
const blockInfo = this.lastBlockByChain[chainId];
if (blockInfo) {
const tokens = blockInfo.split('/');
return chain === 'aptos' ? tokens.at(-1)! : tokens[0];
}
return null;
}
async storeVaasByBlock(chain: ChainName, vaasByBlock: VaasByBlock): Promise<void> {
const chainId = coalesceChainId(chain);
const filteredVaasByBlock = Database.filterEmptyBlocks(vaasByBlock);
if (Object.keys(filteredVaasByBlock).length) {
this.db[chainId] = { ...(this.db[chainId] || {}), ...filteredVaasByBlock };
writeFileSync(this.dbFile, JSON.stringify(this.db), ENCODING);
}
// this will always overwrite the "last" block, so take caution if manually backfilling gaps
const blockKeys = Object.keys(vaasByBlock).sort(
(bk1, bk2) => Number(bk1.split('/')[0]) - Number(bk2.split('/')[0])
);
if (blockKeys.length) {
this.lastBlockByChain[chainId] = blockKeys[blockKeys.length - 1];
writeFileSync(this.dbLastBlockFile, JSON.stringify(this.lastBlockByChain), ENCODING);
}
}
}

View File

@ -0,0 +1,23 @@
import { CHAIN_ID_SOLANA } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { expect, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { JsonDatabase } from '../JsonDatabase';
import { getResumeBlockByChain, initDb, makeBlockKey } from '../utils';
test('getResumeBlockByChain', async () => {
const db = initDb() as JsonDatabase;
const fauxBlock = '98765';
const blockKey = makeBlockKey(fauxBlock, new Date().toISOString());
db.lastBlockByChain = { [CHAIN_ID_SOLANA]: blockKey };
// if a chain is in the database, that number should be returned
expect(await db.getLastBlockByChain('solana')).toEqual(fauxBlock);
expect(await getResumeBlockByChain('solana')).toEqual(Number(fauxBlock) + 1);
// if a chain is not in the database, the initial deployment block should be returned
expect(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.moonbeam).toBeDefined();
expect(await getResumeBlockByChain('moonbeam')).toEqual(
Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.moonbeam)
);
// if neither, null should be returned
expect(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.unset).toBeUndefined();
expect(await getResumeBlockByChain('unset')).toEqual(null);
});

View File

@ -0,0 +1,60 @@
import { ChainId } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { Row } from '@google-cloud/bigtable';
export type VaasByBlock = { [blockInfo: string]: string[] };
export type DB = { [chain in ChainId]?: VaasByBlock };
export type LastBlockByChain = { [chain in ChainId]?: string };
export type JSONArray = string;
export type BigtableMessagesRow = {
key: string;
data: {
// column family
info: {
// columns
timestamp?: { value: string; timestamp: string };
txHash?: { value: string; timestamp: string };
hasSignedVaa?: { value: number; timestamp: string };
};
};
};
export interface BigtableSignedVAAsRow {
key: string;
data: {
// column family
info: {
// columns
bytes: { value: Buffer; timestamp: string };
};
};
}
export interface BigtableVAAsByTxHashRow {
key: string;
data: {
// column family
info: {
// columns
vaaKeys: { value: JSONArray; timestamp: string };
};
};
}
export interface BigtableMessagesResultRow extends Row {
key: string;
data: {
// column family
info: {
// columns
timestamp?: [{ value: string; timestamp: string }];
txHash?: [{ value: string; timestamp: string }];
hasSignedVaa?: [{ value: number; timestamp: string }];
};
};
}
export interface BigtableSignedVAAsResultRow extends Row {
key: string;
data: {
// column family
info: {
// columns
bytes: [{ value: Buffer; timestamp: string }];
};
};
}

View File

@ -0,0 +1,107 @@
import { ChainId, ChainName, coalesceChainId } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import {
INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN,
MAX_UINT_64,
padUint16,
padUint64,
} from '../common';
import { DB_SOURCE } from '../consts';
import { BigtableDatabase } from './BigtableDatabase';
import { Database } from './Database';
import { JsonDatabase } from './JsonDatabase';
import { VaasByBlock } from './types';
// Bigtable Message ID format
// chain/MAX_UINT64-block/emitter/sequence
// 00002/00000000000013140651/0000000000000000000000008ea8874192c8c715e620845f833f48f39b24e222/00000000000000000000
export function makeMessageId(
chainId: number,
block: string,
emitter: string,
sequence: string
): string {
return `${padUint16(chainId.toString())}/${padUint64(
(BigInt(MAX_UINT_64) - BigInt(block)).toString()
)}/${emitter}/${padUint64(sequence)}`;
}
export function parseMessageId(id: string): {
chain: number;
block: number;
emitter: string;
sequence: bigint;
} {
const [chain, inverseBlock, emitter, sequence] = id.split('/');
return {
chain: parseInt(chain),
block: Number(BigInt(MAX_UINT_64) - BigInt(inverseBlock)),
emitter,
sequence: BigInt(sequence),
};
}
// TODO: should this be a composite key or should the value become more complex
export const makeBlockKey = (block: string, timestamp: string): string => `${block}/${timestamp}`;
export const makeVaaKey = (
transactionHash: string,
chain: ChainId | ChainName,
emitter: string,
seq: string
): string => `${transactionHash}:${coalesceChainId(chain)}/${emitter}/${seq}`;
// make a bigtable row key for the `vaasByTxHash` table
export const makeVAAsByTxHashRowKey = (txHash: string, chain: number): string =>
`${txHash}/${padUint16(chain.toString())}`;
// make a bigtable row key for the `signedVAAs` table
export const makeSignedVAAsRowKey = (chain: number, emitter: string, sequence: string): string =>
`${padUint16(chain.toString())}/${emitter}/${padUint64(sequence)}`;
let database: Database = new Database();
export const initDb = (): Database => {
if (DB_SOURCE === 'bigtable') {
database = new BigtableDatabase();
(database as BigtableDatabase).watchMissing();
} else {
database = new JsonDatabase();
}
return database;
};
export const getResumeBlockByChain = async (chain: ChainName): Promise<number | null> => {
const lastBlock = await database.getLastBlockByChain(chain);
const initialBlock = INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN[chain];
return lastBlock !== null
? Number(lastBlock) + 1
: initialBlock !== undefined
? Number(initialBlock)
: null;
};
export const storeVaasByBlock = async (
chain: ChainName,
vaasByBlock: VaasByBlock
): Promise<void> => {
return database.storeVaasByBlock(chain, vaasByBlock);
};
export function printRow(rowkey: string, rowData: { [x: string]: any }) {
console.log(`Reading data for ${rowkey}:`);
for (const columnFamily of Object.keys(rowData)) {
const columnFamilyData = rowData[columnFamily];
console.log(`Column Family ${columnFamily}`);
for (const columnQualifier of Object.keys(columnFamilyData)) {
const col = columnFamilyData[columnQualifier];
for (const cell of col) {
const labels = cell.labels.length ? ` [${cell.labels.join(',')}]` : '';
console.log(`\t${columnQualifier}: ${cell.value} @${cell.timestamp}${labels}`);
}
}
}
console.log();
}

View File

@ -0,0 +1,38 @@
import * as dotenv from 'dotenv';
dotenv.config();
import { ChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { initDb } from './databases/utils';
import { makeFinalizedWatcher } from './watchers/utils';
initDb();
const supportedChains: ChainName[] = [
// 'solana',
// 'ethereum',
//'bsc',
'polygon',
// 'avalanche',
// 'oasis',
// 'algorand',
// 'fantom',
// 'karura',
// 'acala',
// 'klaytn',
// 'celo',
// 'moonbeam',
// 'arbitrum',
// 'optimism',
// 'aptos',
// 'near',
// 'terra2',
// 'terra',
// 'xpla',
// 'injective',
// 'sui',
// 'base',
];
for (const chain of supportedChains) {
makeFinalizedWatcher(chain).watch();
}

View File

@ -0,0 +1,13 @@
import { Types } from 'aptos';
export type AptosEvent = Omit<Types.Event, 'data'> & {
version: string;
data: {
consistency_level: number;
nonce: string;
payload: string;
sender: string;
sequence: string;
timestamp: string;
};
};

View File

@ -0,0 +1,124 @@
// https://nomicon.io/Standards/EventsFormat
export type EventLog = {
event: string;
standard: string;
data?: unknown;
version?: string; // this is supposed to exist but is missing in WH logs
};
export type WormholePublishEventLog = {
standard: 'wormhole';
event: 'publish';
data: string;
nonce: number;
emitter: string;
seq: number;
block: number;
};
export type GetTransactionsByAccountIdResponse = [
| {
id: string | null;
result: {
type: string;
data: {
items: Transaction[];
};
};
}
| {
id: string | null;
error: {
message: string;
code: number;
data: {
code: string;
httpStatus: number;
path: string;
};
};
}
];
export type Transaction = {
hash: string;
signerId: string;
receiverId: string;
blockHash: string;
blockTimestamp: number;
actions: Action[];
status: 'unknown' | 'failure' | 'success';
};
export type GetTransactionsByAccountIdRequestParams = {
accountId: string;
limit: number;
cursor?: {
timestamp: string; // paginate with timestamp
indexInChunk: number;
};
};
type Action =
| {
kind: 'createAccount';
args: {};
}
| {
kind: 'deployContract';
args: {
code: string;
};
}
| {
kind: 'functionCall';
args: {
methodName: string;
args: string;
gas: number;
deposit: string;
};
}
| {
kind: 'transfer';
args: {
deposit: string;
};
}
| {
kind: 'stake';
args: {
stake: string;
publicKey: string;
};
}
| {
kind: 'addKey';
args: {
publicKey: string;
accessKey: {
nonce: number;
permission:
| {
type: 'fullAccess';
}
| {
type: 'functionCall';
contractId: string;
methodNames: string[];
};
};
};
}
| {
kind: 'deleteKey';
args: {
publicKey: string;
};
}
| {
kind: 'deleteAccount';
args: {
beneficiaryId: string;
};
};

View File

@ -0,0 +1,8 @@
/**
* Wrap input in array if it isn't already an array.
* @param input
*/
export function toArray<T>(input: T | T[]): T[] {
if (input == null) return []; // Catch undefined and null values
return input instanceof Array ? input : [input];
}

View File

@ -0,0 +1,18 @@
let loggingEnv: LoggingEnvironment | undefined = undefined;
export type LoggingEnvironment = {
logLevel: string;
logDir?: string;
};
export const getEnvironment = () => {
if (loggingEnv) {
return loggingEnv;
} else {
loggingEnv = {
logLevel: process.env.LOG_LEVEL || 'info',
logDir: process.env.LOG_DIR,
};
return loggingEnv;
}
};

View File

@ -0,0 +1,21 @@
import axios from 'axios';
import { AXIOS_CONFIG_JSON, GUARDIAN_RPC_HOSTS } from '../consts';
export const getSignedVAA = async (
chain: number,
emitter: string,
sequence: string
): Promise<Buffer | null> => {
for (const host of GUARDIAN_RPC_HOSTS) {
try {
const result = await axios.get(
`${host}/v1/signed_vaa/${chain}/${emitter}/${sequence.toString()}`,
AXIOS_CONFIG_JSON
);
if (result.data.vaaBytes) {
return Buffer.from(result.data.vaaBytes, 'base64');
}
} catch (e) {}
}
return null;
};

View File

@ -0,0 +1,80 @@
import { createLogger, format, Logger, LoggerOptions, transports } from 'winston';
import { toArray } from './array';
import { getEnvironment } from './environment';
const { combine, errors, printf, simple, timestamp } = format;
let logger: WormholeLogger | undefined = undefined;
export type WormholeLogger = Logger & { labels: string[] };
/**
* Get a logger that is scoped to the given labels. If a parent logger is
* provided, the parent's labels will be prepended to the given labels.
* TODO: add support for custom log levels for scoped loggers
*
* Assuming `LOG_LEVEL=info`, the loggers below will output the following logs.
* ```
* getLogger().info(1); // base logger
* const foo = getLogger('foo'); // implicitly uses base logger
* foo.error(2)
* getLogger('bar', foo).debug(3); // not logged because LOG_LEVEL=info
* getLogger('bar', foo).warn(4);
*
* [2022-12-20 05:04:34.168 +0000] [info] [main] 1
* [2022-12-20 05:04:34.170 +0000] [error] [foo] 2
* [2022-12-20 05:04:34.170 +0000] [warn] [foo | bar] 4
* ```
* @param labels
* @param parent
* @returns
*/
export const getLogger = (
labels: string | string[] = [],
parent?: WormholeLogger
): WormholeLogger => {
// base logger is parent if unspecified
if (!parent) parent = logger = logger ?? createBaseLogger();
// no labels, return parent logger
labels = toArray(labels);
if (labels.length === 0) return parent;
// create scoped logger
const child: WormholeLogger = parent.child({
labels: [...parent.labels, ...labels],
}) as WormholeLogger;
child.labels = labels;
return child;
};
const createBaseLogger = (): WormholeLogger => {
const { logLevel, logDir } = getEnvironment();
const logPath = !!logDir ? `${logDir}/watcher.${new Date().toISOString()}.log` : null;
console.log(`watcher is logging to ${logPath ?? 'the console'} at level ${logLevel}`);
const loggerConfig: LoggerOptions = {
level: logLevel,
format: combine(
simple(),
errors({ stack: true }),
timestamp({
format: 'YYYY-MM-DD HH:mm:ss.SSS ZZ',
}),
printf((info) => {
// log format: [YYYY-MM-DD HH:mm:ss.SSS A ZZ] [level] [labels] message
const labels = info.labels?.length > 0 ? info.labels.join(' | ') : 'main';
return `[${info.timestamp}] [${info.level}] [${labels}] ${info.message}`;
})
),
transports: [
logPath
? new transports.File({
filename: logPath,
})
: new transports.Console(),
],
};
const logger = createLogger(loggerConfig) as WormholeLogger;
logger.labels = [];
return logger;
};

View File

@ -0,0 +1,58 @@
import axios from 'axios';
import { connect } from 'near-api-js';
import { Provider } from 'near-api-js/lib/providers';
import { AXIOS_CONFIG_JSON } from '../consts';
import {
EventLog,
GetTransactionsByAccountIdRequestParams,
GetTransactionsByAccountIdResponse,
Transaction,
WormholePublishEventLog,
} from '../types/near';
// The following is obtained by going to: https://explorer.near.org/accounts/contract.wormhole_crypto.near
// and watching the network tab in the browser to see where the explorer is going.
const NEAR_EXPLORER_TRANSACTION_URL =
'https://explorer-backend-mainnet-prod-24ktefolwq-uc.a.run.app/trpc/transaction.listByAccountId';
export const NEAR_ARCHIVE_RPC = 'https://archival-rpc.mainnet.near.org';
export const getNearProvider = async (rpc: string): Promise<Provider> => {
const connection = await connect({ nodeUrl: rpc, networkId: 'mainnet' });
const provider = connection.connection.provider;
return provider;
};
export const getTransactionsByAccountId = async (
accountId: string,
batchSize: number,
timestamp: string
): Promise<Transaction[]> => {
const params: GetTransactionsByAccountIdRequestParams = {
accountId,
limit: batchSize,
cursor: {
timestamp,
indexInChunk: 0,
},
};
// using this api: https://github.com/near/near-explorer/blob/beead42ba2a91ad8d2ac3323c29b1148186eec98/backend/src/router/transaction/list.ts#L127
const res = (
(
await axios.get(
`${NEAR_EXPLORER_TRANSACTION_URL}?batch=1&input={"0":${JSON.stringify(params)}}`,
AXIOS_CONFIG_JSON
)
).data as GetTransactionsByAccountIdResponse
)[0];
if ('error' in res) throw new Error(res.error.message);
return res.result.data.items
.filter(
(tx) => tx.status === 'success' && tx.actions.some((a) => a.kind === 'functionCall') // other actions don't generate logs
)
.reverse(); // return chronological order
};
export const isWormholePublishEventLog = (log: EventLog): log is WormholePublishEventLog => {
return log.standard === 'wormhole' && log.event === 'publish';
};

View File

@ -0,0 +1,25 @@
import {
CompiledInstruction,
Message,
MessageCompiledInstruction,
MessageV0,
} from '@solana/web3.js';
import { decode } from 'bs58';
export const isLegacyMessage = (message: Message | MessageV0): message is Message => {
return message.version === 'legacy';
};
export const normalizeCompileInstruction = (
instruction: CompiledInstruction | MessageCompiledInstruction
): MessageCompiledInstruction => {
if ('accounts' in instruction) {
return {
accountKeyIndexes: instruction.accounts,
data: decode(instruction.data),
programIdIndex: instruction.programIdIndex,
};
} else {
return instruction;
}
};

View File

@ -0,0 +1,127 @@
import algosdk from 'algosdk';
import { Watcher } from './Watcher';
import { ALGORAND_INFO } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
type Message = {
blockKey: string;
vaaKey: string;
};
export class AlgorandWatcher extends Watcher {
// Arbitrarily large since the code here is capable of pulling all logs from all via indexer pagination
maximumBatchSize: number = 100000;
algodClient: algosdk.Algodv2;
indexerClient: algosdk.Indexer;
constructor() {
super('algorand');
if (!ALGORAND_INFO.algodServer) {
throw new Error('ALGORAND_INFO.algodServer is not defined!');
}
this.algodClient = new algosdk.Algodv2(
ALGORAND_INFO.algodToken,
ALGORAND_INFO.algodServer,
ALGORAND_INFO.algodPort
);
this.indexerClient = new algosdk.Indexer(
ALGORAND_INFO.token,
ALGORAND_INFO.server,
ALGORAND_INFO.port
);
}
async getFinalizedBlockNumber(): Promise<number> {
this.logger.info(`fetching final block for ${this.chain}`);
let status = await this.algodClient.status().do();
return status['last-round'];
}
async getApplicationLogTransactionIds(fromBlock: number, toBlock: number): Promise<string[]> {
// it is possible tihs may result in gaps if toBlock > response['current-round']
// perhaps to avoid this, getFinalizedBlockNumber could use the indexer?
let transactionIds: string[] = [];
let nextToken: string | undefined;
let numResults: number | undefined;
const maxResults = 225; // determined through testing
do {
const request = this.indexerClient
.lookupApplicationLogs(ALGORAND_INFO.appid)
.minRound(fromBlock)
.maxRound(toBlock);
if (nextToken) {
request.nextToken(nextToken);
}
const response = await request.do();
transactionIds = [
...transactionIds,
...(response?.['log-data']?.map((l: any) => l.txid) || []),
];
nextToken = response?.['next-token'];
numResults = response?.['log-data']?.length;
} while (nextToken && numResults && numResults >= maxResults);
return transactionIds;
}
processTransaction(transaction: any, parentId?: string): Message[] {
let messages: Message[] = [];
if (
transaction['tx-type'] !== 'pay' &&
transaction['application-transaction']?.['application-id'] === ALGORAND_INFO.appid &&
transaction.logs?.length === 1
) {
messages.push({
blockKey: makeBlockKey(
transaction['confirmed-round'].toString(),
new Date(transaction['round-time'] * 1000).toISOString()
),
vaaKey: makeVaaKey(
parentId || transaction.id,
this.chain,
Buffer.from(algosdk.decodeAddress(transaction.sender).publicKey).toString('hex'),
BigInt(`0x${Buffer.from(transaction.logs[0], 'base64').toString('hex')}`).toString()
),
});
}
if (transaction['inner-txns']) {
for (const innerTransaction of transaction['inner-txns']) {
messages = [...messages, ...this.processTransaction(innerTransaction, transaction.id)];
}
}
return messages;
}
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
const txIds = await this.getApplicationLogTransactionIds(fromBlock, toBlock);
const transactions = [];
for (const txId of txIds) {
const response = await this.indexerClient.searchForTransactions().txid(txId).do();
if (response?.transactions?.[0]) {
transactions.push(response.transactions[0]);
}
}
let messages: Message[] = [];
for (const transaction of transactions) {
messages = [...messages, ...this.processTransaction(transaction)];
}
const vaasByBlock = messages.reduce((vaasByBlock, message) => {
if (!vaasByBlock[message.blockKey]) {
vaasByBlock[message.blockKey] = [];
}
vaasByBlock[message.blockKey].push(message.vaaKey);
return vaasByBlock;
}, {} as VaasByBlock);
const toBlockInfo = await this.indexerClient.lookupBlock(toBlock).do();
const toBlockTimestamp = new Date(toBlockInfo.timestamp * 1000).toISOString();
const toBlockKey = makeBlockKey(toBlock.toString(), toBlockTimestamp);
if (!vaasByBlock[toBlockKey]) {
vaasByBlock[toBlockKey] = [];
}
return vaasByBlock;
}
}

View File

@ -0,0 +1,96 @@
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../common';
import { AptosClient } from 'aptos';
import { z } from 'zod';
import { RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeVaaKey } from '../databases/utils';
import { AptosEvent } from '../types/aptos';
import { Watcher } from './Watcher';
const APTOS_CORE_BRIDGE_ADDRESS = CONTRACTS.MAINNET.aptos.core;
const APTOS_EVENT_HANDLE = `${APTOS_CORE_BRIDGE_ADDRESS}::state::WormholeMessageHandle`;
const APTOS_FIELD_NAME = 'event';
/**
* NOTE: The Aptos watcher differs from other watchers in that it uses the event sequence number to
* fetch Wormhole messages and therefore also stores sequence numbers instead of block numbers.
*/
export class AptosWatcher extends Watcher {
client: AptosClient;
maximumBatchSize: number = 25;
constructor() {
super('aptos');
this.client = new AptosClient(RPCS_BY_CHAIN[this.chain]!);
}
async getFinalizedBlockNumber(): Promise<number> {
return Number(
(
await this.client.getEventsByEventHandle(
APTOS_CORE_BRIDGE_ADDRESS,
APTOS_EVENT_HANDLE,
APTOS_FIELD_NAME,
{ limit: 1 }
)
)[0].sequence_number
);
}
async getMessagesForBlocks(fromSequence: number, toSequence: number): Promise<VaasByBlock> {
const limit = toSequence - fromSequence + 1;
const events: AptosEvent[] = (await this.client.getEventsByEventHandle(
APTOS_CORE_BRIDGE_ADDRESS,
APTOS_EVENT_HANDLE,
APTOS_FIELD_NAME,
{ start: fromSequence, limit }
)) as AptosEvent[];
const vaasByBlock: VaasByBlock = {};
await Promise.all(
events.map(async ({ data, sequence_number, version }) => {
const [block, transaction] = await Promise.all([
this.client.getBlockByVersion(Number(version)),
this.client.getTransactionByVersion(Number(version)),
]);
const timestamp = new Date(Number(block.block_timestamp) / 1000).toISOString();
const blockKey = [block.block_height, timestamp, sequence_number].join('/'); // use custom block key for now so we can include sequence number
const emitter = data.sender.padStart(64, '0');
const vaaKey = makeVaaKey(transaction.hash, this.chain, emitter, data.sequence);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] ?? []), vaaKey];
})
);
return vaasByBlock;
}
isValidBlockKey(key: string) {
try {
const [block, timestamp, sequence] = key.split('/');
const initialSequence = z
.number()
.int()
.parse(Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.aptos));
return (
z.number().int().parse(Number(block)) > 1094390 && // initial deployment block
Date.parse(z.string().datetime().parse(timestamp)) < Date.now() &&
z.number().int().parse(Number(sequence)) >= initialSequence // initial deployment sequence
);
} catch (e) {
return false;
}
}
isValidVaaKey(key: string) {
try {
const [txHash, vaaKey] = key.split(':');
const [_, emitter, sequence] = vaaKey.split('/');
return (
/^0x[0-9a-fA-F]{64}$/.test(z.string().parse(txHash)) &&
/^[0-9]{64}$/.test(z.string().parse(emitter)) &&
z.number().int().parse(Number(sequence)) >= 0
);
} catch (e) {
return false;
}
}
}

View File

@ -0,0 +1,125 @@
import axios from 'axios';
import { AXIOS_CONFIG_JSON, RPCS_BY_CHAIN } from '../consts';
import { EVMWatcher } from './EVMWatcher';
export class ArbitrumWatcher extends EVMWatcher {
rpc: string | undefined;
evmWatcher: EVMWatcher;
latestL2Finalized: number;
l1L2Map: Map<number, number>;
lastEthTime: number;
constructor() {
super('arbitrum');
this.rpc = RPCS_BY_CHAIN[this.chain];
if (!this.rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
this.evmWatcher = new EVMWatcher('ethereum', 'finalized');
this.latestL2Finalized = 0;
this.l1L2Map = new Map<number, number>();
this.lastEthTime = 0;
this.maximumBatchSize = 25;
}
async getFinalizedBlockNumber(): Promise<number> {
if (!this.rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
// This gets the latest L2 block so we can get the associated L1 block number
const l1Result: BlockByNumberResult = (
await axios.post(
this.rpc,
[
{
jsonrpc: '2.0',
id: 1,
method: 'eth_getBlockByNumber',
params: ['latest', false],
},
],
AXIOS_CONFIG_JSON
)
)?.data?.[0]?.result;
if (!l1Result || !l1Result.l1BlockNumber || !l1Result.number) {
throw new Error(
`Unable to parse result of ArbitrumWatcher::eth_getBlockByNumber for latest on ${this.rpc}`
);
}
const associatedL1: number = parseInt(l1Result.l1BlockNumber, 16);
const l2BlkNum: number = parseInt(l1Result.number, 16);
this.logger.debug(
'getFinalizedBlockNumber() checking map L1Block: ' + associatedL1 + ' => L2Block: ' + l2BlkNum
);
// Only update the map, if the L2 block number is newer
const inMapL2 = this.l1L2Map.get(associatedL1);
if (!inMapL2 || inMapL2 < l2BlkNum) {
this.logger.debug(`Updating map with ${associatedL1} => ${l2BlkNum}`);
this.l1L2Map.set(associatedL1, l2BlkNum);
}
// Only check every 30 seconds
const now = Date.now();
if (now - this.lastEthTime < 30_000) {
return this.latestL2Finalized;
}
this.lastEthTime = now;
// Get the latest finalized L1 block number
const evmFinal = await this.evmWatcher.getFinalizedBlockNumber();
this.logger.debug(`Finalized EVM block number = ${evmFinal}`);
this.logger.debug('Size of map = ' + this.l1L2Map.size);
// Walk the map looking for finalized L2 block number
for (let [l1, l2] of this.l1L2Map) {
if (l1 <= evmFinal) {
this.latestL2Finalized = l2;
this.logger.debug(`Removing key ${l1} from map`);
this.l1L2Map.delete(l1);
}
}
this.logger.debug(`LatestL2Finalized = ${this.latestL2Finalized}`);
return this.latestL2Finalized;
}
// This function is only used in test code.
getFirstMapEntry(): number[] {
if (this.l1L2Map.size > 0) {
for (let [l1, l2] of this.l1L2Map) {
return [l1, l2];
}
}
return [0, 0];
}
}
type BlockByNumberResult = {
baseFeePerGas: string;
difficulty: string;
extraData: string;
gasLimit: string;
gasUsed: string;
hash: string;
l1BlockNumber: string;
logsBloom: string;
miner: string;
mixHash: string;
nonce: string;
number: string;
parentHash: string;
receiptsRoot: string;
sendCount: string;
sendRoot: string;
sha3Uncles: string;
size: string;
stateRoot: string;
timestamp: string;
totalDifficulty: string;
transactions: string[];
transactionsRoot: string;
uncles: string[];
};

View File

@ -0,0 +1,11 @@
import { EVMWatcher } from './EVMWatcher';
export class BSCWatcher extends EVMWatcher {
constructor() {
super('bsc');
}
async getFinalizedBlockNumber(): Promise<number> {
const latestBlock = await super.getFinalizedBlockNumber();
return Math.max(latestBlock - 15, 0);
}
}

View File

@ -0,0 +1,274 @@
import { CONTRACTS, CosmWasmChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import axios from 'axios';
import { AXIOS_CONFIG_JSON, RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { Watcher } from './Watcher';
import { SHA256 } from 'jscrypto/SHA256';
import { Base64 } from 'jscrypto/Base64';
export class CosmwasmWatcher extends Watcher {
latestBlockTag: string;
getBlockTag: string;
hashTag: string;
rpc: string | undefined;
latestBlockHeight: number;
constructor(chain: CosmWasmChainName) {
super(chain);
if (chain === 'injective') {
throw new Error('Please use InjectiveExplorerWatcher for injective');
}
this.rpc = RPCS_BY_CHAIN[this.chain];
if (!this.rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
this.latestBlockTag = 'blocks/latest';
this.getBlockTag = 'blocks/';
this.hashTag = 'cosmos/tx/v1beta1/txs/';
this.latestBlockHeight = 0;
}
/**
* Calculates the transaction hash from Amino-encoded string.
* @param data Amino-encoded string (base64)
* Taken from https://github.com/terra-money/terra.js/blob/9e5f553de3ff3e975eaaf91b1f06e45658b1a5e0/src/util/hash.ts
*/
hexToHash(data: string): string {
return SHA256.hash(Base64.parse(data)).toString().toUpperCase();
}
async getFinalizedBlockNumber(): Promise<number> {
const result = (await axios.get(`${this.rpc}/${this.latestBlockTag}`)).data;
if (result && result.block.header.height) {
let blockHeight: number = parseInt(result.block.header.height);
if (blockHeight !== this.latestBlockHeight) {
this.latestBlockHeight = blockHeight;
this.logger.debug('blockHeight = ' + blockHeight);
}
return blockHeight;
}
throw new Error(`Unable to parse result of ${this.latestBlockTag} on ${this.rpc}`);
}
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
const address = CONTRACTS.MAINNET[this.chain].core;
if (!address) {
throw new Error(`Core contract not defined for ${this.chain}`);
}
this.logger.debug(`core contract for ${this.chain} is ${address}`);
let vaasByBlock: VaasByBlock = {};
this.logger.info(`fetching info for blocks ${fromBlock} to ${toBlock}`);
// For each block number, call {RPC}/{getBlockTag}/{block_number}
// Foreach block.data.txs[] do hexToHash() to get the txHash
// Then call {RPC}/{hashTag}/{hash} to get the logs/events
// Walk the logs/events
for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) {
this.logger.debug('Getting block number ' + blockNumber);
const blockResult: CosmwasmBlockResult = (
await axios.get(`${this.rpc}/${this.getBlockTag}${blockNumber}`)
).data;
if (!blockResult || !blockResult.block.data) {
throw new Error('bad result for block ${blockNumber}');
}
const blockKey = makeBlockKey(
blockNumber.toString(),
new Date(blockResult.block.header.time).toISOString()
);
vaasByBlock[blockKey] = [];
let vaaKey: string = '';
let numTxs: number = 0;
if (blockResult.block.data.txs) {
numTxs = blockResult.block.data.txs.length;
}
for (let i = 0; i < numTxs; i++) {
// The following check is not needed because of the check for numTxs.
// But typescript wanted it anyway.
if (!blockResult.block.data.txs) {
continue;
}
let hash: string = this.hexToHash(blockResult.block.data.txs[i]);
this.logger.debug('blockNumber = ' + blockNumber + ', txHash[' + i + '] = ' + hash);
// console.log('Attempting to get hash', `${this.rpc}/${this.hashTag}${hash}`);
try {
const hashResult: CosmwasmHashResult = (
await axios.get(`${this.rpc}/${this.hashTag}${hash}`, AXIOS_CONFIG_JSON)
).data;
if (hashResult && hashResult.tx_response.events) {
const numEvents = hashResult.tx_response.events.length;
for (let j = 0; j < numEvents; j++) {
let type: string = hashResult.tx_response.events[j].type;
if (type === 'wasm') {
if (hashResult.tx_response.events[j].attributes) {
let attrs = hashResult.tx_response.events[j].attributes;
let emitter: string = '';
let sequence: string = '';
let coreContract: boolean = false;
// only care about _contract_address, message.sender and message.sequence
const numAttrs = attrs.length;
for (let k = 0; k < numAttrs; k++) {
const key = Buffer.from(attrs[k].key, 'base64').toString().toLowerCase();
this.logger.debug('Encoded Key = ' + attrs[k].key + ', decoded = ' + key);
if (key === 'message.sender') {
emitter = Buffer.from(attrs[k].value, 'base64').toString();
} else if (key === 'message.sequence') {
sequence = Buffer.from(attrs[k].value, 'base64').toString();
} else if (key === '_contract_address' || key === 'contract_address') {
let addr = Buffer.from(attrs[k].value, 'base64').toString();
if (addr === address) {
coreContract = true;
}
}
}
if (coreContract && emitter !== '' && sequence !== '') {
vaaKey = makeVaaKey(hash, this.chain, emitter, sequence);
this.logger.debug('blockKey: ' + blockKey);
this.logger.debug('Making vaaKey: ' + vaaKey);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
}
}
}
} else {
this.logger.error('There were no hashResults');
}
} catch (e: any) {
// console.error(e);
if (
e?.response?.status === 500 &&
e?.response?.data?.code === 2 &&
e?.response?.data?.message.startsWith('json: error calling MarshalJSON')
) {
// Just skip this one...
} else {
// Rethrow the error because we only want to catch the above error
throw e;
}
}
}
}
return vaasByBlock;
}
}
type CosmwasmBlockResult = {
block_id: {
hash: string;
parts: {
total: number;
hash: string;
};
};
block: {
header: {
version: { block: string };
chain_id: string;
height: string;
time: string; // eg. '2023-01-03T12:13:00.849094631Z'
last_block_id: { hash: string; parts: { total: number; hash: string } };
last_commit_hash: string;
data_hash: string;
validators_hash: string;
next_validators_hash: string;
consensus_hash: string;
app_hash: string;
last_results_hash: string;
evidence_hash: string;
proposer_address: string;
};
data: { txs: string[] | null };
evidence: { evidence: null };
last_commit: {
height: string;
round: number;
block_id: { hash: string; parts: { total: number; hash: string } };
signatures: string[];
};
};
};
type CosmwasmHashResult = {
tx: {
body: {
messages: string[];
memo: string;
timeout_height: string;
extension_options: [];
non_critical_extension_options: [];
};
auth_info: {
signer_infos: string[];
fee: {
amount: [{ denom: string; amount: string }];
gas_limit: string;
payer: string;
granter: string;
};
};
signatures: string[];
};
tx_response: {
height: string;
txhash: string;
codespace: string;
code: 0;
data: string;
raw_log: string;
logs: [{ msg_index: number; log: string; events: EventsType }];
info: string;
gas_wanted: string;
gas_used: string;
tx: {
'@type': '/cosmos.tx.v1beta1.Tx';
body: {
messages: [
{
'@type': '/cosmos.staking.v1beta1.MsgBeginRedelegate';
delegator_address: string;
validator_src_address: string;
validator_dst_address: string;
amount: { denom: string; amount: string };
}
];
memo: '';
timeout_height: '0';
extension_options: [];
non_critical_extension_options: [];
};
auth_info: {
signer_infos: [
{
public_key: {
'@type': '/cosmos.crypto.secp256k1.PubKey';
key: string;
};
mode_info: { single: { mode: string } };
sequence: string;
}
];
fee: {
amount: [{ denom: string; amount: string }];
gas_limit: string;
payer: string;
granter: string;
};
};
signatures: string[];
};
timestamp: string; // eg. '2023-01-03T12:12:54Z'
events: EventsType[];
};
};
type EventsType = {
type: string;
attributes: [
{
key: string;
value: string;
index: boolean;
}
];
};

View File

@ -0,0 +1,242 @@
import { Implementation__factory } from '@certusone/wormhole-sdk/lib/cjs/ethers-contracts/factories/Implementation__factory';
import { CONTRACTS, EVMChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { Log } from '@ethersproject/abstract-provider';
import axios from 'axios';
import { BigNumber } from 'ethers';
import { AXIOS_CONFIG_JSON, RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { Watcher } from './Watcher';
// This is the hash for topic[0] of the core contract event LogMessagePublished
// https://github.com/wormhole-foundation/wormhole/blob/main/ethereum/contracts/Implementation.sol#L12
export const LOG_MESSAGE_PUBLISHED_TOPIC =
'0x6eb224fb001ed210e379b335e35efe88672a8ce935d981a6896b27ffdf52a3b2';
export const wormholeInterface = Implementation__factory.createInterface();
export type BlockTag = 'finalized' | 'safe' | 'latest';
export type Block = {
hash: string;
number: number;
timestamp: number;
};
export type ErrorBlock = {
code: number; //6969,
message: string; //'Error: No response received from RPC endpoint in 60s'
};
export class EVMWatcher extends Watcher {
finalizedBlockTag: BlockTag;
lastTimestamp: number;
latestFinalizedBlockNumber: number;
constructor(chain: EVMChainName, finalizedBlockTag: BlockTag = 'latest') {
super(chain);
this.lastTimestamp = 0;
this.latestFinalizedBlockNumber = 0;
this.finalizedBlockTag = finalizedBlockTag;
if (chain === 'acala' || chain === 'karura') {
this.maximumBatchSize = 50;
}
}
async getBlock(blockNumberOrTag: number | BlockTag): Promise<Block> {
const rpc = RPCS_BY_CHAIN[this.chain];
if (!rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
let result = (
await axios.post(
rpc,
[
{
jsonrpc: '2.0',
id: 1,
method: 'eth_getBlockByNumber',
params: [
typeof blockNumberOrTag === 'number'
? `0x${blockNumberOrTag.toString(16)}`
: blockNumberOrTag,
false,
],
},
{
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer zpka_213d294a9a5a44619cd6a02e55a20417_5f43e4d0'
}
}
],
AXIOS_CONFIG_JSON
)
)?.data?.[0];
if (result && result.result === null) {
// Found null block
if (
typeof blockNumberOrTag === 'number' &&
blockNumberOrTag < this.latestFinalizedBlockNumber - 1000
) {
return {
hash: '',
number: BigNumber.from(blockNumberOrTag).toNumber(),
timestamp: BigNumber.from(this.lastTimestamp).toNumber(),
};
}
} else if (result && result.error && result.error.code === 6969) {
return {
hash: '',
number: BigNumber.from(blockNumberOrTag).toNumber(),
timestamp: BigNumber.from(this.lastTimestamp).toNumber(),
};
}
result = result?.result;
if (result && result.hash && result.number && result.timestamp) {
// Convert to Ethers compatible type
this.lastTimestamp = result.timestamp;
return {
hash: result.hash,
number: BigNumber.from(result.number).toNumber(),
timestamp: BigNumber.from(result.timestamp).toNumber(),
};
}
throw new Error(
`Unable to parse result of eth_getBlockByNumber for ${blockNumberOrTag} on ${rpc}`
);
}
async getBlocks(fromBlock: number, toBlock: number): Promise<Block[]> {
const rpc = RPCS_BY_CHAIN[this.chain];
if (!rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
const reqs: any[] = [];
for (let blockNumber = fromBlock; blockNumber <= toBlock; blockNumber++) {
reqs.push({
jsonrpc: '2.0',
id: (blockNumber - fromBlock).toString(),
method: 'eth_getBlockByNumber',
params: [`0x${blockNumber.toString(16)}`, false],
});
}
const results = (await axios.post(rpc, reqs, AXIOS_CONFIG_JSON))?.data;
if (results && results.length) {
// Convert to Ethers compatible type
return results.map(
(response: undefined | { result?: Block; error?: ErrorBlock }, idx: number) => {
// Karura is getting 6969 errors for some blocks, so we'll just return empty blocks for those instead of throwing an error.
// We take the timestamp from the previous block, which is not ideal but should be fine.
if (
(response &&
response.result === null &&
fromBlock + idx < this.latestFinalizedBlockNumber - 1000) ||
(response?.error && response.error?.code && response.error.code === 6969)
) {
return {
hash: '',
number: BigNumber.from(fromBlock + idx).toNumber(),
timestamp: BigNumber.from(this.lastTimestamp).toNumber(),
};
}
if (
response?.result &&
response.result?.hash &&
response.result.number &&
response.result.timestamp
) {
this.lastTimestamp = response.result.timestamp;
return {
hash: response.result.hash,
number: BigNumber.from(response.result.number).toNumber(),
timestamp: BigNumber.from(response.result.timestamp).toNumber(),
};
}
console.error(reqs[idx], response, idx);
throw new Error(
`Unable to parse result of eth_getBlockByNumber for ${fromBlock + idx} on ${rpc}`
);
}
);
}
throw new Error(
`Unable to parse result of eth_getBlockByNumber for range ${fromBlock}-${toBlock} on ${rpc}`
);
}
async getLogs(
fromBlock: number,
toBlock: number,
address: string,
topics: string[]
): Promise<Array<Log>> {
const rpc = RPCS_BY_CHAIN[this.chain];
if (!rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
const result = (
await axios.post(
rpc,
[
{
jsonrpc: '2.0',
id: 1,
method: 'eth_getLogs',
params: [
{
fromBlock: `0x${fromBlock.toString(16)}`,
toBlock: `0x${toBlock.toString(16)}`,
address,
topics,
},
],
},
],
AXIOS_CONFIG_JSON
)
)?.data?.[0]?.result;
if (result) {
// Convert to Ethers compatible type
return result.map((l: Log) => ({
...l,
blockNumber: BigNumber.from(l.blockNumber).toNumber(),
transactionIndex: BigNumber.from(l.transactionIndex).toNumber(),
logIndex: BigNumber.from(l.logIndex).toNumber(),
}));
}
throw new Error(`Unable to parse result of eth_getLogs for ${fromBlock}-${toBlock} on ${rpc}`);
}
async getFinalizedBlockNumber(): Promise<number> {
this.logger.info(`fetching block ${this.finalizedBlockTag}`);
const block: Block = await this.getBlock(this.finalizedBlockTag);
this.latestFinalizedBlockNumber = block.number;
return block.number;
}
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
const address = CONTRACTS.MAINNET[this.chain].core;
if (!address) {
throw new Error(`Core contract not defined for ${this.chain}`);
}
const logs = await this.getLogs(fromBlock, toBlock, address, [LOG_MESSAGE_PUBLISHED_TOPIC]);
const timestampsByBlock: { [block: number]: string } = {};
// fetch timestamps for each block
const vaasByBlock: VaasByBlock = {};
this.logger.info(`fetching info for blocks ${fromBlock} to ${toBlock}`);
const blocks = await this.getBlocks(fromBlock, toBlock);
for (const block of blocks) {
const timestamp = new Date(block.timestamp * 1000).toISOString();
timestampsByBlock[block.number] = timestamp;
vaasByBlock[makeBlockKey(block.number.toString(), timestamp)] = [];
}
this.logger.info(`processing ${logs.length} logs`);
for (const log of logs) {
const blockNumber = log.blockNumber;
const emitter = log.topics[1].slice(2);
const {
args: { sequence },
} = wormholeInterface.parseLog(log);
const vaaKey = makeVaaKey(log.transactionHash, this.chain, emitter, sequence.toString());
const blockKey = makeBlockKey(blockNumber.toString(), timestampsByBlock[blockNumber]);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
return vaasByBlock;
}
}

View File

@ -0,0 +1,236 @@
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import axios from 'axios';
import { RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { EventObjectsTypes, RawLogEvents } from './TerraExplorerWatcher';
import { Watcher } from './Watcher';
export class InjectiveExplorerWatcher extends Watcher {
// Arbitrarily large since the code here is capable of pulling all logs from all via indexer pagination
maximumBatchSize: number = 1_000_000;
latestBlockTag: string;
getBlockTag: string;
hashTag: string;
contractTag: string;
rpc: string | undefined;
latestBlockHeight: number;
constructor() {
super('injective');
this.rpc = RPCS_BY_CHAIN[this.chain];
if (!this.rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
this.latestBlockHeight = 0;
this.latestBlockTag = 'api/explorer/v1/blocks'; // This returns a page of the latest blocks
this.getBlockTag = 'api/explorer/v1/blocks/';
this.hashTag = 'api/explorer/v1/txs/';
this.contractTag = 'api/explorer/v1/contractTxs/';
}
async getFinalizedBlockNumber(): Promise<number> {
const result: ExplorerBlocks = (await axios.get(`${this.rpc}/${this.latestBlockTag}`)).data;
if (result && result.paging.total) {
let blockHeight: number = result.paging.total;
if (blockHeight !== this.latestBlockHeight) {
this.latestBlockHeight = blockHeight;
this.logger.info('blockHeight = ' + blockHeight);
}
return blockHeight;
}
throw new Error(`Unable to parse result of ${this.latestBlockTag} on ${this.rpc}`);
}
// retrieve blocks for token bridge contract.
// should be core, but the explorer doesn't support it yet
// use "to": as the pagination key
// compare block height ("block_number":) with what is passed in.
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
const coreAddress = CONTRACTS.MAINNET[this.chain].core;
const address = CONTRACTS.MAINNET[this.chain].token_bridge;
if (!address) {
throw new Error(`Token Bridge contract not defined for ${this.chain}`);
}
this.logger.debug(`Token Bridge contract for ${this.chain} is ${address}`);
let vaasByBlock: VaasByBlock = {};
this.logger.info(`fetching info for blocks ${fromBlock} to ${toBlock}`);
const limit: number = 50;
let done: boolean = false;
let skip: number = 0;
let lastBlockInserted: number = 0;
while (!done) {
// This URL gets the paginated list of transactions for the token bridge contract
let url: string = `${this.rpc}/${this.contractTag}${address}?skip=${skip}&limit=${limit}`;
this.logger.debug(`Query string = ${url}`);
const bulkTxnResult = (
await axios.get<ContractTxnResult>(url, {
headers: {
'User-Agent': 'Mozilla/5.0',
},
})
).data;
if (!bulkTxnResult) {
throw new Error('bad bulkTxnResult');
}
skip = bulkTxnResult.paging.to;
const bulkTxns: ContractTxnData[] = bulkTxnResult.data;
if (!bulkTxns) {
throw new Error('No transactions');
}
for (let i: number = 0; i < bulkTxns.length; ++i) {
// Walk the transactions
const txn: ContractTxnData = bulkTxns[i];
const height: number = txn.block_number;
if (height >= fromBlock && height <= toBlock) {
// We only care about the transactions in the given block range
this.logger.debug(`Found one: ${fromBlock}, ${height}, ${toBlock}`);
const blockKey = makeBlockKey(
txn.block_number.toString(),
new Date(txn.block_unix_timestamp).toISOString()
);
vaasByBlock[blockKey] = [];
lastBlockInserted = height;
this.logger.debug(`lastBlockInserted = ${lastBlockInserted}`);
let vaaKey: string = '';
// Each txn has an array of raw_logs
if (txn.logs) {
const rawLogs: RawLogEvents[] = txn.logs;
for (let j: number = 0; j < rawLogs.length; ++j) {
const rawLog: RawLogEvents = rawLogs[j];
const events: EventObjectsTypes[] = rawLog.events;
if (!events) {
this.logger.debug(
`No events in rawLog${j} for block ${height}, hash = ${txn.hash}`
);
continue;
}
for (let k: number = 0; k < events.length; k++) {
const event: EventObjectsTypes = events[k];
if (event.type === 'wasm') {
if (event.attributes) {
let attrs = event.attributes;
let emitter: string = '';
let sequence: string = '';
let coreContract: boolean = false;
// only care about _contract_address, message.sender and message.sequence
const numAttrs = attrs.length;
for (let l = 0; l < numAttrs; l++) {
const key = attrs[l].key;
if (key === 'message.sender') {
emitter = attrs[l].value;
} else if (key === 'message.sequence') {
sequence = attrs[l].value;
} else if (key === '_contract_address' || key === 'contract_address') {
let addr = attrs[l].value;
if (addr === coreAddress) {
coreContract = true;
}
}
}
if (coreContract && emitter !== '' && sequence !== '') {
vaaKey = makeVaaKey(txn.hash, this.chain, emitter, sequence);
this.logger.debug('blockKey: ' + blockKey);
this.logger.debug('Making vaaKey: ' + vaaKey);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
}
}
}
}
}
}
if (height < fromBlock) {
this.logger.debug('Breaking out due to height < fromBlock');
done = true;
break;
}
}
if (bulkTxns.length < limit) {
this.logger.debug('Breaking out due to ran out of txns.');
done = true;
}
}
if (lastBlockInserted < toBlock) {
// Need to create something for the last requested block because it will
// become the new starting point for subsequent calls.
this.logger.debug(`Adding filler for block ${toBlock}`);
const blkUrl = `${this.rpc}/${this.getBlockTag}${toBlock}`;
this.logger.debug(`Query string for block = ${blkUrl}`);
const result = (await axios.get<ExplorerBlock>(blkUrl)).data;
if (!result) {
throw new Error(`Unable to get block information for block ${toBlock}`);
}
const blockKey = makeBlockKey(
result.data.height.toString(),
new Date(result.data.timestamp).toISOString()
);
vaasByBlock[blockKey] = [];
}
return vaasByBlock;
}
}
type ExplorerBlocks = {
paging: { total: number; from: number; to: number };
data: ExplorerBlocksData[];
};
type ExplorerBlock = {
s: string;
data: ExplorerBlocksData;
};
type ExplorerBlocksData = {
height: number;
proposer: string;
moniker: string;
block_hash: string;
parent_hash: string;
num_pre_commits: number;
num_txs: number;
timestamp: string;
};
type ContractTxnResult = {
data: ContractTxnData[];
paging: {
from: number;
to: number;
total: number;
};
};
type ContractTxnData = {
block_number: number;
block_timestamp: string;
block_unix_timestamp: number;
code: number;
codespace: string;
data: string;
error_log: string;
gas_fee: {
amount: Coin[];
gas_limit: number;
granter: string;
payer: string;
};
gas_used: number;
gas_wanted: number;
hash: string;
id: string;
info: string;
logs?: RawLogEvents[];
memo: string;
// messages: [];
// signatures: [];
tx_number: number;
tx_type: string;
};
type Coin = {
denom: string;
amount: string;
};

View File

@ -0,0 +1,42 @@
import { sleep } from '../common';
import axios from 'axios';
import { AXIOS_CONFIG_JSON, RPCS_BY_CHAIN } from '../consts';
import { EVMWatcher } from './EVMWatcher';
export class MoonbeamWatcher extends EVMWatcher {
constructor() {
super('moonbeam');
}
async getFinalizedBlockNumber(): Promise<number> {
const latestBlock = await super.getFinalizedBlockNumber();
let isBlockFinalized = false;
while (!isBlockFinalized) {
if (!RPCS_BY_CHAIN.moonbeam) {
throw new Error('Moonbeam RPC is not defined!');
}
await sleep(100);
// refetch the block by number to get an up-to-date hash
try {
const blockFromNumber = await this.getBlock(latestBlock);
isBlockFinalized =
(
await axios.post(
RPCS_BY_CHAIN.moonbeam,
[
{
jsonrpc: '2.0',
id: 1,
method: 'moon_isBlockFinalized',
params: [blockFromNumber.hash],
},
],
AXIOS_CONFIG_JSON
)
)?.data?.[0]?.result || false;
} catch (e) {
this.logger.error(`error while trying to check for finality of block ${latestBlock}`);
}
}
return latestBlock;
}
}

View File

@ -0,0 +1,122 @@
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { decode } from 'bs58';
import { Provider, TypedError } from 'near-api-js/lib/providers';
import { BlockResult, ExecutionStatus } from 'near-api-js/lib/providers/provider';
import ora from 'ora';
import { z } from 'zod';
import { RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { EventLog } from '../types/near';
import { getNearProvider, isWormholePublishEventLog } from '../utils/near';
import { Watcher } from './Watcher';
export class NearWatcher extends Watcher {
provider: Provider | null = null;
constructor() {
super('near');
}
async getFinalizedBlockNumber(): Promise<number> {
this.logger.info(`fetching final block for ${this.chain}`);
const provider = await this.getProvider();
const block = await provider.block({ finality: 'final' });
return block.header.height;
}
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
// assume toBlock was retrieved from getFinalizedBlockNumber and is finalized
this.logger.info(`fetching info for blocks ${fromBlock} to ${toBlock}`);
const provider = await this.getProvider();
const blocks: BlockResult[] = [];
let block: BlockResult | null = null;
try {
block = await provider.block({ blockId: toBlock });
blocks.push(block);
while (true) {
// traverse backwards via block hashes: https://github.com/wormhole-foundation/wormhole-monitor/issues/35
block = await provider.block({ blockId: block.header.prev_hash });
if (block.header.height < fromBlock) break;
blocks.push(block);
}
} catch (e) {
if (e instanceof TypedError && e.type === 'HANDLER_ERROR') {
const error = block
? `block ${block.header.prev_hash} is too old, use backfillNear for blocks before height ${block.header.height}`
: `toBlock ${toBlock} is too old, use backfillNear for this range`; // starting block too old
this.logger.error(error);
} else {
throw e;
}
}
return getMessagesFromBlockResults(provider, blocks);
}
async getProvider(): Promise<Provider> {
return (this.provider = this.provider || (await getNearProvider(RPCS_BY_CHAIN.near!)));
}
isValidVaaKey(key: string) {
try {
const [txHash, vaaKey] = key.split(':');
const txHashDecoded = Buffer.from(decode(txHash)).toString('hex');
const [_, emitter, sequence] = vaaKey.split('/');
return (
/^[0-9a-fA-F]{64}$/.test(z.string().parse(txHashDecoded)) &&
/^[0-9a-fA-F]{64}$/.test(z.string().parse(emitter)) &&
z.number().int().parse(Number(sequence)) >= 0
);
} catch (e) {
return false;
}
}
}
export const getMessagesFromBlockResults = async (
provider: Provider,
blocks: BlockResult[],
debug: boolean = false
): Promise<VaasByBlock> => {
const vaasByBlock: VaasByBlock = {};
let log: ora.Ora;
if (debug) log = ora(`Fetching messages from ${blocks.length} blocks...`).start();
for (let i = 0; i < blocks.length; i++) {
if (debug) log!.text = `Fetching messages from block ${i + 1}/${blocks.length}...`;
const { height, timestamp } = blocks[i].header;
const blockKey = makeBlockKey(height.toString(), new Date(timestamp / 1_000_000).toISOString());
vaasByBlock[blockKey] = [];
const chunks = [];
for (const chunk of blocks[i].chunks) {
chunks.push(await provider.chunk(chunk.chunk_hash));
}
const transactions = chunks.flatMap(({ transactions }) => transactions);
for (const tx of transactions) {
const outcome = await provider.txStatus(tx.hash, CONTRACTS.MAINNET.near.core);
const logs = outcome.receipts_outcome
.filter(
({ outcome }) =>
(outcome as any).executor_id === CONTRACTS.MAINNET.near.core &&
(outcome.status as ExecutionStatus).SuccessValue
)
.flatMap(({ outcome }) => outcome.logs)
.filter((log) => log.startsWith('EVENT_JSON:')) // https://nomicon.io/Standards/EventsFormat
.map((log) => JSON.parse(log.slice(11)) as EventLog)
.filter(isWormholePublishEventLog);
for (const log of logs) {
const vaaKey = makeVaaKey(tx.hash, 'near', log.emitter, log.seq.toString());
vaasByBlock[blockKey] = [...vaasByBlock[blockKey], vaaKey];
}
}
}
if (debug) {
const numMessages = Object.values(vaasByBlock).flat().length;
log!.succeed(`Fetched ${numMessages} messages from ${blocks.length} blocks`);
}
return vaasByBlock;
};

View File

@ -0,0 +1,37 @@
import axios from 'axios';
import { ethers } from 'ethers';
import { AXIOS_CONFIG_JSON, POLYGON_ROOT_CHAIN_ADDRESS, POLYGON_ROOT_CHAIN_RPC } from '../consts';
import { EVMWatcher } from './EVMWatcher';
export class PolygonWatcher extends EVMWatcher {
constructor() {
super('polygon');
}
async getFinalizedBlockNumber(): Promise<number> {
this.logger.info('fetching last child block from Ethereum');
const rootChain = new ethers.utils.Interface([
`function getLastChildBlock() external view returns (uint256)`,
]);
const callData = rootChain.encodeFunctionData('getLastChildBlock');
const callResult = (
await axios.post(
POLYGON_ROOT_CHAIN_RPC,
[
{
jsonrpc: '2.0',
id: 1,
method: 'eth_call',
params: [
{ to: POLYGON_ROOT_CHAIN_ADDRESS, data: callData },
'latest', // does the guardian use latest?
],
},
],
AXIOS_CONFIG_JSON
)
)?.data?.[0]?.result;
const block = rootChain.decodeFunctionResult('getLastChildBlock', callResult)[0].toNumber();
this.logger.info(`rooted child block ${block}`);
return block;
}
}

View File

@ -0,0 +1,193 @@
import { getPostedMessage } from '@certusone/wormhole-sdk/lib/cjs/solana/wormhole';
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import {
Commitment,
ConfirmedSignatureInfo,
Connection,
PublicKey,
SolanaJSONRPCError,
VersionedBlockResponse,
} from '@solana/web3.js';
import { decode } from 'bs58';
import { z } from 'zod';
import { RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { isLegacyMessage, normalizeCompileInstruction } from '../utils/solana';
import { Watcher } from './Watcher';
const WORMHOLE_PROGRAM_ID = CONTRACTS.MAINNET.solana.core;
const COMMITMENT: Commitment = 'finalized';
const GET_SIGNATURES_LIMIT = 1000;
export class SolanaWatcher extends Watcher {
rpc: string;
// this is set as a class field so we can modify it in tests
getSignaturesLimit = GET_SIGNATURES_LIMIT;
// The Solana watcher uses the `getSignaturesForAddress` RPC endpoint to fetch all transactions
// containing Wormhole messages. This API takes in signatures and paginates based on number of
// transactions returned. Since we don't know the number of transactions in advance, we use
// a block range of 100K slots. Technically, batch size can be arbitrarily large since pagination
// of the WH transactions within that range is handled internally below.
maximumBatchSize = 100_000;
constructor() {
super('solana');
this.rpc = RPCS_BY_CHAIN.solana!;
}
async getFinalizedBlockNumber(): Promise<number> {
const connection = new Connection(this.rpc, COMMITMENT);
return connection.getSlot();
}
async getMessagesForBlocks(fromSlot: number, toSlot: number): Promise<VaasByBlock> {
const connection = new Connection(this.rpc, COMMITMENT);
// in the rare case of maximumBatchSize skipped blocks in a row,
// you might hit this error due to the recursion below
if (fromSlot > toSlot) throw new Error('solana: invalid block range');
this.logger.info(`fetching info for blocks ${fromSlot} to ${toSlot}`);
const vaasByBlock: VaasByBlock = {};
// identify block range by fetching signatures of the first and last transactions
// getSignaturesForAddress walks backwards so fromSignature occurs after toSignature
let toBlock: VersionedBlockResponse | null = null;
try {
toBlock = await connection.getBlock(toSlot, { maxSupportedTransactionVersion: 0 });
} catch (e) {
if (e instanceof SolanaJSONRPCError && (e.code === -32007 || e.code === -32009)) {
// failed to get confirmed block: slot was skipped or missing in long-term storage
return this.getMessagesForBlocks(fromSlot, toSlot - 1);
} else {
throw e;
}
}
if (!toBlock || !toBlock.blockTime || toBlock.transactions.length === 0) {
return this.getMessagesForBlocks(fromSlot, toSlot - 1);
}
const fromSignature =
toBlock.transactions[toBlock.transactions.length - 1].transaction.signatures[0];
let fromBlock: VersionedBlockResponse | null = null;
try {
fromBlock = await connection.getBlock(fromSlot, { maxSupportedTransactionVersion: 0 });
} catch (e) {
if (e instanceof SolanaJSONRPCError && (e.code === -32007 || e.code === -32009)) {
// failed to get confirmed block: slot was skipped or missing in long-term storage
return this.getMessagesForBlocks(fromSlot + 1, toSlot);
} else {
throw e;
}
}
if (!fromBlock || !fromBlock.blockTime || fromBlock.transactions.length === 0) {
return this.getMessagesForBlocks(fromSlot + 1, toSlot);
}
const toSignature = fromBlock.transactions[0].transaction.signatures[0];
// get all core bridge signatures between fromTransaction and toTransaction
let numSignatures = this.getSignaturesLimit;
let currSignature: string | undefined = fromSignature;
while (numSignatures === this.getSignaturesLimit) {
const signatures: ConfirmedSignatureInfo[] = await connection.getSignaturesForAddress(
new PublicKey(WORMHOLE_PROGRAM_ID),
{
before: currSignature,
until: toSignature,
limit: this.getSignaturesLimit,
}
);
this.logger.info(`processing ${signatures.length} transactions`);
// In order to determine if a transaction has a Wormhole message, we normalize and iterate
// through all instructions in the transaction. Only PostMessage instructions are relevant
// when looking for messages. PostMessageUnreliable instructions are ignored because there
// are no data availability guarantees (ie the associated message accounts may have been
// reused, overwriting previous data). Then, the message account is the account given by
// the second index in the instruction's account key indices. From here, we can fetch the
// message data from the account and parse out the emitter and sequence.
const results = await connection.getTransactions(
signatures.map((s) => s.signature),
{
maxSupportedTransactionVersion: 0,
}
);
if (results.length !== signatures.length) {
throw new Error(`solana: failed to fetch tx for signatures`);
}
for (const res of results) {
if (res?.meta?.err) {
// skip errored txs
continue;
}
if (!res || !res.blockTime) {
throw new Error(
`solana: failed to fetch tx for signature ${
res?.transaction.signatures[0] || 'unknown'
}`
);
}
const message = res.transaction.message;
const accountKeys = isLegacyMessage(message)
? message.accountKeys
: message.staticAccountKeys;
const programIdIndex = accountKeys.findIndex((i) => i.toBase58() === WORMHOLE_PROGRAM_ID);
const instructions = message.compiledInstructions;
const innerInstructions =
res.meta?.innerInstructions?.flatMap((i) =>
i.instructions.map(normalizeCompileInstruction)
) || [];
const whInstructions = innerInstructions
.concat(instructions)
.filter((i) => i.programIdIndex === programIdIndex);
for (const instruction of whInstructions) {
// skip if not postMessage instruction
const instructionId = instruction.data;
if (instructionId[0] !== 0x01) continue;
const accountId = accountKeys[instruction.accountKeyIndexes[1]];
const {
message: { emitterAddress, sequence },
} = await getPostedMessage(connection, accountId.toBase58(), COMMITMENT);
const blockKey = makeBlockKey(
res.slot.toString(),
new Date(res.blockTime * 1000).toISOString()
);
const vaaKey = makeVaaKey(
res.transaction.signatures[0],
this.chain,
emitterAddress.toString('hex'),
sequence.toString()
);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
}
numSignatures = signatures.length;
currSignature = signatures.at(-1)?.signature;
}
// add last block for storeVaasByBlock
const lastBlockKey = makeBlockKey(
toSlot.toString(),
new Date(toBlock.blockTime * 1000).toISOString()
);
return { [lastBlockKey]: [], ...vaasByBlock };
}
isValidVaaKey(key: string) {
try {
const [txHash, vaaKey] = key.split(':');
const txHashDecoded = Buffer.from(decode(txHash)).toString('hex');
const [_, emitter, sequence] = vaaKey.split('/');
return !!(
/^[0-9a-fA-F]{128}$/.test(z.string().parse(txHashDecoded)) &&
/^[0-9a-fA-F]{64}$/.test(z.string().parse(emitter)) &&
z.number().int().parse(Number(sequence)) >= 0
);
} catch (e) {
return false;
}
}
}

View File

@ -0,0 +1,123 @@
import { CHAIN_ID_SUI } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import {
Checkpoint,
JsonRpcClient,
PaginatedEvents,
SuiTransactionBlockResponse,
} from '@mysten/sui.js';
import { array } from 'superstruct';
import { RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { Watcher } from './Watcher';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
const SUI_EVENT_HANDLE = `0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a::publish_message::WormholeMessage`;
type PublishMessageEvent = {
consistency_level: number;
nonce: number;
payload: number[];
sender: string;
sequence: string;
timestamp: string;
};
export class SuiWatcher extends Watcher {
client: JsonRpcClient;
maximumBatchSize: number = 100000; // arbitrarily large as this pages back by events
constructor() {
super('sui');
this.client = new JsonRpcClient(RPCS_BY_CHAIN[this.chain]!);
}
// TODO: this might break using numbers, the whole service needs a refactor to use BigInt
async getFinalizedBlockNumber(): Promise<number> {
return Number(
(await this.client.request('sui_getLatestCheckpointSequenceNumber', undefined)).result
);
}
// TODO: this might break using numbers, the whole service needs a refactor to use BigInt
async getMessagesForBlocks(fromCheckpoint: number, toCheckpoint: number): Promise<VaasByBlock> {
this.logger.info(`fetching info for checkpoints ${fromCheckpoint} to ${toCheckpoint}`);
const vaasByBlock: VaasByBlock = {};
{
// reserve empty slot for initial block so query is cataloged
const fromCheckpointTimestamp = new Date(
Number(
(
await this.client.requestWithType(
'sui_getCheckpoint',
{ id: fromCheckpoint.toString() },
Checkpoint
)
).timestampMs
)
).toISOString();
const fromBlockKey = makeBlockKey(fromCheckpoint.toString(), fromCheckpointTimestamp);
vaasByBlock[fromBlockKey] = [];
}
let lastCheckpoint: null | number = null;
let cursor: any = undefined;
let hasNextPage = false;
do {
const response = await this.client.requestWithType(
'suix_queryEvents',
{
query: { MoveEventType: SUI_EVENT_HANDLE },
cursor,
descending_order: true,
},
PaginatedEvents
);
const digest = response.data.length
? response.data[response.data.length - 1].id.txDigest
: null;
lastCheckpoint = digest
? Number(
(
await this.client.requestWithType(
'sui_getTransactionBlock',
{ digest },
SuiTransactionBlockResponse
)
).checkpoint!
)
: null;
cursor = response.nextCursor;
hasNextPage = response.hasNextPage;
const txBlocks = await this.client.requestWithType(
'sui_multiGetTransactionBlocks',
{ digests: response.data.map((e) => e.id.txDigest) },
array(SuiTransactionBlockResponse)
);
const checkpointByTxDigest = txBlocks.reduce<Record<string, string | undefined>>(
(value, { digest, checkpoint }) => {
value[digest] = checkpoint;
return value;
},
{}
);
for (const event of response.data) {
const checkpoint = checkpointByTxDigest[event.id.txDigest];
if (!checkpoint) continue;
const checkpointNum = Number(checkpoint);
if (checkpointNum < fromCheckpoint || checkpointNum > toCheckpoint) continue;
const msg = event.parsedJson as PublishMessageEvent;
const timestamp = new Date(Number(msg.timestamp) * 1000).toISOString();
const vaaKey = makeVaaKey(
event.id.txDigest,
CHAIN_ID_SUI,
msg.sender.slice(2),
msg.sequence
);
const blockKey = makeBlockKey(checkpoint, timestamp);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
} while (hasNextPage && lastCheckpoint && fromCheckpoint < lastCheckpoint);
return vaasByBlock;
}
}

View File

@ -0,0 +1,234 @@
import { CONTRACTS, CosmWasmChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import axios from 'axios';
import { AXIOS_CONFIG_JSON, RPCS_BY_CHAIN } from '../consts';
import { VaasByBlock } from '../databases/types';
import { makeBlockKey, makeVaaKey } from '../databases/utils';
import { Watcher } from './Watcher';
export class TerraExplorerWatcher extends Watcher {
// Arbitrarily large since the code here is capable of pulling all logs from all via indexer pagination
maximumBatchSize: number = 100000;
latestBlockTag: string;
getBlockTag: string;
allTxsTag: string;
rpc: string | undefined;
latestBlockHeight: number;
constructor(chain: CosmWasmChainName) {
super(chain);
this.rpc = RPCS_BY_CHAIN[this.chain];
if (!this.rpc) {
throw new Error(`${this.chain} RPC is not defined!`);
}
this.latestBlockTag = 'blocks/latest';
this.getBlockTag = 'blocks/';
this.allTxsTag = 'v1/txs?';
this.latestBlockHeight = 0;
}
async getFinalizedBlockNumber(): Promise<number> {
const result = (await axios.get(`${this.rpc}/${this.latestBlockTag}`, AXIOS_CONFIG_JSON)).data;
if (result && result.block.header.height) {
let blockHeight: number = parseInt(result.block.header.height);
if (blockHeight !== this.latestBlockHeight) {
this.latestBlockHeight = blockHeight;
this.logger.debug('blockHeight = ' + blockHeight);
}
return blockHeight;
}
throw new Error(`Unable to parse result of ${this.latestBlockTag} on ${this.rpc}`);
}
// retrieve blocks for core contract.
// use "next": as the pagination key
// compare block height ("height":) with what is passed in.
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
const address = CONTRACTS.MAINNET[this.chain].core;
if (!address) {
throw new Error(`Core contract not defined for ${this.chain}`);
}
this.logger.debug(`core contract for ${this.chain} is ${address}`);
let vaasByBlock: VaasByBlock = {};
this.logger.info(`fetching info for blocks ${fromBlock} to ${toBlock}`);
const limit: number = 100;
let done: boolean = false;
let offset: number = 0;
let lastBlockInserted: number = 0;
while (!done) {
// This URL gets the paginated list of transactions for the core contract
let url: string = `${this.rpc}/${this.allTxsTag}offset=${offset}&limit=${limit}&account=${address}`;
this.logger.debug(`Query string = ${url}`);
const bulkTxnResult: BulkTxnResult = (
await axios.get(url, {
headers: {
'User-Agent': 'Mozilla/5.0',
'Accept-Encoding': 'application/json',
},
})
).data;
if (!bulkTxnResult) {
throw new Error('bad bulkTxnResult');
}
offset = bulkTxnResult.next;
const bulkTxns: BulkTxn[] = bulkTxnResult.txs;
if (!bulkTxns) {
throw new Error('No transactions');
}
for (let i: number = 0; i < bulkTxns.length; ++i) {
// Walk the transactions
const txn: BulkTxn = bulkTxns[i];
const height: number = parseInt(txn.height);
if (height >= fromBlock && height <= toBlock) {
// We only care about the transactions in the given block range
this.logger.debug(`Found one: ${fromBlock}, ${height}, ${toBlock}`);
const blockKey = makeBlockKey(txn.height, new Date(txn.timestamp).toISOString());
vaasByBlock[blockKey] = [];
lastBlockInserted = height;
this.logger.debug(`lastBlockInserted = ${lastBlockInserted}`);
let vaaKey: string = '';
// Each txn has an array of raw_logs
const rawLogs: RawLogEvents[] = JSON.parse(txn.raw_log);
for (let j: number = 0; j < rawLogs.length; ++j) {
const rawLog: RawLogEvents = rawLogs[j];
const events: EventObjectsTypes[] = rawLog.events;
if (!events) {
this.logger.debug(
`No events in rawLog${j} for block ${height}, hash = ${txn.txhash}`
);
continue;
}
for (let k: number = 0; k < events.length; k++) {
const event: EventObjectsTypes = events[k];
if (event.type === 'wasm') {
if (event.attributes) {
let attrs = event.attributes;
let emitter: string = '';
let sequence: string = '';
let coreContract: boolean = false;
// only care about _contract_address, message.sender and message.sequence
const numAttrs = attrs.length;
for (let l = 0; l < numAttrs; l++) {
const key = attrs[l].key;
if (key === 'message.sender') {
emitter = attrs[l].value;
} else if (key === 'message.sequence') {
sequence = attrs[l].value;
} else if (key === '_contract_address' || key === 'contract_address') {
let addr = attrs[l].value;
if (addr === address) {
coreContract = true;
}
}
}
if (coreContract && emitter !== '' && sequence !== '') {
vaaKey = makeVaaKey(txn.txhash, this.chain, emitter, sequence);
this.logger.debug('blockKey: ' + blockKey);
this.logger.debug('Making vaaKey: ' + vaaKey);
vaasByBlock[blockKey] = [...(vaasByBlock[blockKey] || []), vaaKey];
}
}
}
}
}
}
if (height < fromBlock) {
this.logger.debug('Breaking out due to height < fromBlock');
done = true;
break;
}
}
if (bulkTxns.length < limit) {
this.logger.debug('Breaking out due to ran out of txns.');
done = true;
}
}
if (lastBlockInserted < toBlock) {
// Need to create something for the last requested block because it will
// become the new starting point for subsequent calls.
this.logger.debug(`Adding filler for block ${toBlock}`);
const blkUrl = `${this.rpc}/${this.getBlockTag}${toBlock}`;
const result: CosmwasmBlockResult = (await axios.get(blkUrl, AXIOS_CONFIG_JSON)).data;
if (!result) {
throw new Error(`Unable to get block information for block ${toBlock}`);
}
const blockKey = makeBlockKey(
result.block.header.height.toString(),
new Date(result.block.header.time).toISOString()
);
vaasByBlock[blockKey] = [];
}
return vaasByBlock;
}
}
type BulkTxnResult = {
next: number; //400123609;
limit: number; //10;
txs: BulkTxn[];
};
type BulkTxn = {
id: number; //400300689;
chainId: string; //'columbus-5';
tx: [Object];
logs: [];
height: string; //'11861053';
txhash: string; //'31C82DC3432B4824B5195AA572A8963BA6147CAFD3ADAC6C5250BF447FA5D206';
raw_log: string;
gas_used: string; //'510455';
timestamp: string; //'2023-03-10T12:18:05Z';
gas_wanted: string; //'869573';
};
export type RawLogEvents = {
msg_index?: number;
events: EventObjectsTypes[];
};
export type EventObjectsTypes = {
type: string;
attributes: Attribute[];
};
type Attribute = {
key: string;
value: string;
};
type CosmwasmBlockResult = {
block_id: {
hash: string;
parts: {
total: number;
hash: string;
};
};
block: {
header: {
version: { block: string };
chain_id: string;
height: string;
time: string; // eg. '2023-01-03T12:13:00.849094631Z'
last_block_id: { hash: string; parts: { total: number; hash: string } };
last_commit_hash: string;
data_hash: string;
validators_hash: string;
next_validators_hash: string;
consensus_hash: string;
app_hash: string;
last_results_hash: string;
evidence_hash: string;
proposer_address: string;
};
data: { txs: string[] | null };
evidence: { evidence: null };
last_commit: {
height: string;
round: number;
block_id: { hash: string; parts: { total: number; hash: string } };
signatures: string[];
};
};
};

View File

@ -0,0 +1,88 @@
import { ChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import {
INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN,
sleep,
} from '../common';
import { z } from 'zod';
import { TIMEOUT } from '../consts';
import { VaasByBlock } from '../databases/types';
import { getResumeBlockByChain, storeVaasByBlock } from '../databases/utils';
import { getLogger, WormholeLogger } from '../utils/logger';
export class Watcher {
chain: ChainName;
logger: WormholeLogger;
maximumBatchSize: number = 100;
constructor(chain: ChainName) {
this.chain = chain;
this.logger = getLogger(chain);
}
async getFinalizedBlockNumber(): Promise<number> {
throw new Error('Not Implemented');
}
async getMessagesForBlocks(fromBlock: number, toBlock: number): Promise<VaasByBlock> {
throw new Error('Not Implemented');
}
isValidBlockKey(key: string) {
try {
const [block, timestamp] = key.split('/');
const initialBlock = z
.number()
.int()
.parse(Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN[this.chain]));
return (
z.number().int().parse(Number(block)) > initialBlock &&
Date.parse(z.string().datetime().parse(timestamp)) < Date.now()
);
} catch (e) {
return false;
}
}
isValidVaaKey(key: string): boolean {
throw new Error('Not Implemented');
}
async watch(): Promise<void> {
let toBlock: number | null = null;
let fromBlock: number | null = await getResumeBlockByChain(this.chain);
let retry = 0;
while (true) {
try {
if (fromBlock !== null && toBlock !== null && fromBlock <= toBlock) {
// fetch logs for the block range, inclusive of toBlock
toBlock = Math.min(fromBlock + this.maximumBatchSize - 1, toBlock);
this.logger.info(`fetching messages from ${fromBlock} to ${toBlock}`);
const vaasByBlock = await this.getMessagesForBlocks(fromBlock, toBlock);
await storeVaasByBlock(this.chain, vaasByBlock);
fromBlock = toBlock + 1;
}
try {
this.logger.info('fetching finalized block');
toBlock = await this.getFinalizedBlockNumber();
if (fromBlock === null) {
// handle first loop on a fresh chain without initial block set
fromBlock = toBlock;
}
retry = 0;
await sleep(TIMEOUT);
} catch (e) {
// skip attempting to fetch messages until getting the finalized block succeeds
toBlock = null;
this.logger.error(`error fetching finalized block`);
throw e;
}
} catch (e) {
retry++;
this.logger.error(e);
const expoBacko = TIMEOUT * 2 ** retry;
this.logger.warn(`backing off for ${expoBacko}ms`);
await sleep(expoBacko);
}
}
}
}

View File

@ -0,0 +1,71 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { AlgorandWatcher } from '../AlgorandWatcher';
jest.setTimeout(180000);
const initialAlgorandBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.algorand);
test('getFinalizedBlockNumber', async () => {
const watcher = new AlgorandWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(initialAlgorandBlock);
});
test('getMessagesForBlocks', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(25692450, 25692450);
expect(messages).toMatchObject({ '25692450/2022-12-21T02:00:40.000Z': [] });
});
test('getMessagesForBlocks initial block', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(initialAlgorandBlock, initialAlgorandBlock);
expect(messages).toMatchObject({
'22931277/2022-08-19T15:10:48.000Z': [
'2RBQLCETCLFV4F3PQ7IHEWVWQV3MCP4UM5S5OFZM23XMC2O2DJ6A:8/67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45/1',
],
});
});
test('getMessagesForBlocks indexer pagination support', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(initialAlgorandBlock, 27069946);
expect(Object.keys(messages).length).toEqual(420);
});
test('getMessagesForBlocks seq < 192', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(25428873, 25428873);
expect(messages).toMatchObject({
'25428873/2022-12-09T18:10:08.000Z': [
'M6QPEZ42P5O23II7SCWZTNZ7MHBSOH6KUNAPMH5YL3XHGNTEFUSQ:8/67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45/191',
],
});
});
test('getMessagesForBlocks seq = 192', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(25433218, 25433218);
expect(messages).toMatchObject({
'25433218/2022-12-09T22:40:55.000Z': [
'3PJPDBGTQK6JXAQEJNOYFE4NLLKFMCTKRY5FYNAXSEBDO25XUUJQ:8/67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45/192',
],
});
});
test('getMessagesForBlocks seq > 383', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(26856742, 26856742);
expect(messages).toMatchObject({
'26856742/2023-02-09T09:05:04.000Z': [
'LJNYXPG5VLJNNTBLSZSHLZQ7XQWTSUPKGA7APVI53J3MAKHQN72Q:8/67e93fa6c8ac5c819990aa7340c0c16b508abb1178be9b30d024b8ac25193d45/384',
],
});
});
test('getMessagesForBlocks on known empty block', async () => {
const watcher = new AlgorandWatcher();
const messages = await watcher.getMessagesForBlocks(23761195, 23761195);
expect(messages).toMatchObject({ '23761195/2022-09-28T21:42:30.000Z': [] });
});

View File

@ -0,0 +1,53 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '@wormhole-foundation/wormhole-monitor-common/dist/consts';
import { AptosWatcher } from '../AptosWatcher';
jest.setTimeout(60000);
const INITAL_SEQUENCE_NUMBER = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.aptos ?? 0);
test('getFinalizedSequenceNumber', async () => {
const watcher = new AptosWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(INITAL_SEQUENCE_NUMBER);
});
test('getMessagesForSequenceNumbers', async () => {
const watcher = new AptosWatcher();
const messages = await watcher.getMessagesForBlocks(0, 1);
expect(messages).toMatchObject({
'1095891/2022-10-19T00:55:54.676Z/0': [
'0x27b5808a7cfdb688e02be192ed334da683975b7487e8be7a09670b3088b58908:22/0000000000000000000000000000000000000000000000000000000000000001/0',
],
'1099053/2022-10-19T01:08:28.554Z/1': [
'0x9c0d5200d61d20aa20c72f5785bee645dd7c526989443eed4140fb46e5207248:22/0000000000000000000000000000000000000000000000000000000000000001/1',
],
});
// validate keys
expect(watcher.isValidBlockKey(Object.keys(messages)[0])).toBe(true);
expect(watcher.isValidVaaKey(Object.values(messages).flat()[0])).toBe(true);
// test that block number, timestamp, and sequence number are all strictly increasing
const latestSequenceNumber = await watcher.getFinalizedBlockNumber();
const messageKeys = Object.keys(
await watcher.getMessagesForBlocks(
latestSequenceNumber - watcher.maximumBatchSize + 1,
latestSequenceNumber
)
).sort();
console.log(messageKeys);
expect(messageKeys.length).toBe(watcher.maximumBatchSize);
expect(Date.parse(messageKeys.at(-1)!.split('/')[1])).toBeLessThan(Date.now());
let prevKey = messageKeys[0];
for (let i = 1; i < watcher.maximumBatchSize; i++) {
const currKey = messageKeys[i];
const [prevBlockNumber, prevTimestamp, prevEventSequenceNumber] = prevKey.split('/');
const [blockNumber, timestamp, eventSequenceNumber] = currKey.split('/');
// blocks may contain multiple wormhole messages
expect(Number(blockNumber)).toBeGreaterThanOrEqual(Number(prevBlockNumber));
expect(Date.parse(timestamp)).toBeGreaterThanOrEqual(Date.parse(prevTimestamp));
expect(Number(eventSequenceNumber)).toBeGreaterThan(Number(prevEventSequenceNumber));
prevKey = currKey;
}
});

View File

@ -0,0 +1,33 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { ArbitrumWatcher } from '../ArbitrumWatcher';
jest.setTimeout(60000);
const initialArbitrumBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.arbitrum);
const initialEthBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.ethereum);
test('getFinalizedBlockNumber', async () => {
const watcher = new ArbitrumWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toEqual(0);
let retval: number[] = watcher.getFirstMapEntry();
expect(retval[0]).toBeGreaterThan(initialEthBlock);
expect(retval[1]).toBeGreaterThan(initialArbitrumBlock);
});
// The following test will be enabled when there is a block to see.
test.skip('getMessagesForBlocks', async () => {
const watcher = new ArbitrumWatcher();
const vaasByBlock = await watcher.getMessagesForBlocks(53473701, 53473701);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(0);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z']).toBeDefined();
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z'].length).toEqual(1);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z'][0]).toEqual(
'8A31CDE56ED3ACB7239D705949BD6C164747210A6C4C69D98756E0CF6D22C9EB:3/0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2/256813'
);
});

View File

@ -0,0 +1,51 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { EVMWatcher } from '../EVMWatcher';
jest.setTimeout(60000);
const initialBaseBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.base);
test('getFinalizedBlockNumber', async () => {
const watcher = new EVMWatcher('base');
const blockNumber = await watcher.getFinalizedBlockNumber();
console.log('blockNumber', blockNumber);
expect(blockNumber).toBeGreaterThan(initialBaseBlock);
});
test('getMessagesForBlocks', async () => {
const watcher = new EVMWatcher('base');
const vaasByBlock = await watcher.getMessagesForBlocks(1544175, 1544185);
expect(vaasByBlock).toMatchObject({
'1544175/2023-07-20T18:28:17.000Z': [],
'1544176/2023-07-20T18:28:19.000Z': [],
'1544177/2023-07-20T18:28:21.000Z': [],
'1544178/2023-07-20T18:28:23.000Z': [],
'1544179/2023-07-20T18:28:25.000Z': [],
'1544180/2023-07-20T18:28:27.000Z': [],
'1544181/2023-07-20T18:28:29.000Z': [],
'1544182/2023-07-20T18:28:31.000Z': [],
'1544183/2023-07-20T18:28:33.000Z': [],
'1544184/2023-07-20T18:28:35.000Z': [],
'1544185/2023-07-20T18:28:37.000Z': [],
});
});
test('getMessagesForBlockWithWHMsg', async () => {
const watcher = new EVMWatcher('base');
const vaasByBlock = await watcher.getMessagesForBlocks(1557420, 1557429);
expect(vaasByBlock).toMatchObject({
'1557420/2023-07-21T01:49:47.000Z': [],
'1557421/2023-07-21T01:49:49.000Z': [],
'1557422/2023-07-21T01:49:51.000Z': [],
'1557423/2023-07-21T01:49:53.000Z': [],
'1557424/2023-07-21T01:49:55.000Z': [],
'1557425/2023-07-21T01:49:57.000Z': [
'0x9d217269dff740e74d21d32babbefe4bece7b88870b020f5505d3de3c6e59694:30/000000000000000000000000e2e2d9e31d7e1cc1178fe0d1c5950f6c809816a3/0',
],
'1557426/2023-07-21T01:49:59.000Z': [],
'1557427/2023-07-21T01:50:01.000Z': [],
'1557428/2023-07-21T01:50:03.000Z': [],
'1557429/2023-07-21T01:50:05.000Z': [],
});
});

View File

@ -0,0 +1,139 @@
import { expect, jest, test } from '@jest/globals';
import { CosmwasmWatcher } from '../CosmwasmWatcher';
import { TerraExplorerWatcher } from '../TerraExplorerWatcher';
import { InjectiveExplorerWatcher } from '../InjectiveExplorerWatcher';
jest.setTimeout(60000);
test('getFinalizedBlockNumber(terra2)', async () => {
const watcher = new CosmwasmWatcher('terra2');
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(3181746);
});
test('getMessagesForBlocks(terra2)', async () => {
const watcher = new CosmwasmWatcher('terra2');
const vaasByBlock = await watcher.getMessagesForBlocks(3165191, 3165192);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['3165191/2023-01-03T12:12:54.922Z']).toBeDefined();
expect(vaasByBlock['3165191/2023-01-03T12:12:54.922Z'].length).toEqual(1);
expect(vaasByBlock['3165191/2023-01-03T12:12:54.922Z'][0]).toEqual(
'4FF15C860D78E65AA25DC41F634E158CC4D79BBD4EB5F72C0D09A1F6AC25810C:18/a463ad028fb79679cfc8ce1efba35ac0e77b35080a1abe9bebe83461f176b0a3/651'
);
});
test('getMessagesForBlocks(terra2)', async () => {
const watcher = new CosmwasmWatcher('terra2');
const vaasByBlock = await watcher.getMessagesForBlocks(5635710, 5635712);
const entries = Object.entries(vaasByBlock);
console.log(entries);
expect(entries.length).toEqual(3);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(3);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(0);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['5635710/2023-06-23T12:54:10.949Z']).toBeDefined();
expect(vaasByBlock['5635711/2023-06-23T12:54:16.979Z']).toBeDefined();
expect(vaasByBlock['5635712/2023-06-23T12:54:23.010Z']).toBeDefined();
});
test.skip('getFinalizedBlockNumber(terra)', async () => {
const watcher = new CosmwasmWatcher('terra');
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(10980872);
});
// flaky rpc, skip
test.skip('getMessagesForBlocks(terra)', async () => {
const watcher = new CosmwasmWatcher('terra');
const vaasByBlock = await watcher.getMessagesForBlocks(10974196, 10974197);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z']).toBeDefined();
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z'].length).toEqual(1);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.045Z'][0]).toEqual(
'8A31CDE56ED3ACB7239D705949BD6C164747210A6C4C69D98756E0CF6D22C9EB:3/0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2/256813'
);
});
test('getFinalizedBlockNumber(terra explorer)', async () => {
const watcher = new TerraExplorerWatcher('terra');
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(10980872);
});
// flaky rpc, skip
test.skip('getMessagesForBlocks(terra explorer)', async () => {
const watcher = new TerraExplorerWatcher('terra');
const vaasByBlock = await watcher.getMessagesForBlocks(10974196, 10974197);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.000Z']).toBeDefined();
expect(vaasByBlock['10974196/2023-01-06T04:23:21.000Z'].length).toEqual(1);
expect(vaasByBlock['10974196/2023-01-06T04:23:21.000Z'][0]).toEqual(
'8A31CDE56ED3ACB7239D705949BD6C164747210A6C4C69D98756E0CF6D22C9EB:3/0000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2/256813'
);
});
// flaky rpc, skip
test.skip('getMessagesForBlocks(terra explorer, no useful info)', async () => {
const watcher = new TerraExplorerWatcher('terra');
const vaasByBlock = await watcher.getMessagesForBlocks(10975000, 10975010);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(0);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
});
test('getFinalizedBlockNumber(xpla)', async () => {
const watcher = new CosmwasmWatcher('xpla');
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(1980633);
});
test('getMessagesForBlocks(xpla)', async () => {
const watcher = new CosmwasmWatcher('xpla');
const vaasByBlock = await watcher.getMessagesForBlocks(1645812, 1645813);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['1645812/2022-12-13T22:02:58.413Z']).toBeDefined();
expect(vaasByBlock['1645812/2022-12-13T22:02:58.413Z'].length).toEqual(1);
expect(vaasByBlock['1645812/2022-12-13T22:02:58.413Z'][0]).toEqual(
'B01268B9A4A1F502E4278E203DBFF23AADEEFDDD91542880737845A5BDF9B3E4:28/8f9cf727175353b17a5f574270e370776123d90fd74956ae4277962b4fdee24c/19'
);
});
test('getFinalizedBlockNumber(injective)', async () => {
const watcher = new InjectiveExplorerWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(23333696);
});
test('getMessagesForBlocks(injective)', async () => {
const watcher = new InjectiveExplorerWatcher();
const vaasByBlock = await watcher.getMessagesForBlocks(24905509, 24905510);
// const vaasByBlock = await watcher.getMessagesForBlocks(4209642, 4209643); // Testnet
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['24905509/2023-01-27T19:11:35.174Z']).toBeDefined();
expect(vaasByBlock['24905509/2023-01-27T19:11:35.174Z'].length).toEqual(1);
expect(vaasByBlock['24905509/2023-01-27T19:11:35.174Z'][0]).toEqual(
'0xab3f3f6ebd51c4776eeb5d0eef525207590daab24cf794434387747395a3e904:19/00000000000000000000000045dbea4617971d93188eda21530bc6503d153313/33'
);
});

View File

@ -0,0 +1,170 @@
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { expect, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { Block, EVMWatcher, LOG_MESSAGE_PUBLISHED_TOPIC } from '../EVMWatcher';
const initialAvalancheBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.avalanche);
const initialCeloBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.celo);
const initialOasisBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.oasis);
const initialKaruraBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.karura);
test('getBlock by tag', async () => {
const watcher = new EVMWatcher('avalanche');
const block = await watcher.getBlock('latest');
expect(block.number).toBeGreaterThan(initialAvalancheBlock);
expect(block.timestamp).toBeGreaterThan(1671672811);
expect(new Date(block.timestamp * 1000).toISOString() > '2022-12-21').toBeTruthy();
});
test('getBlock by number', async () => {
const watcher = new EVMWatcher('avalanche');
const block = await watcher.getBlock(initialAvalancheBlock);
expect(block.number).toEqual(initialAvalancheBlock);
expect(block.hash).toEqual('0x33b358fe68a2a11b6a5a5969f29f9223001e36a5d719734ba542b238d397f14e');
expect(block.timestamp).toEqual(1639504758);
expect(new Date(block.timestamp * 1000).toISOString()).toEqual('2021-12-14T17:59:18.000Z');
});
test('getBlocks', async () => {
const watcher = new EVMWatcher('avalanche');
const blocks = await watcher.getBlocks(
initialAvalancheBlock,
initialAvalancheBlock + watcher.maximumBatchSize - 1
);
expect(blocks.length).toEqual(watcher.maximumBatchSize);
expect(blocks[0].number).toEqual(initialAvalancheBlock);
expect(blocks[0].hash).toEqual(
'0x33b358fe68a2a11b6a5a5969f29f9223001e36a5d719734ba542b238d397f14e'
);
expect(blocks[0].timestamp).toEqual(1639504758);
expect(new Date(blocks[0].timestamp * 1000).toISOString()).toEqual('2021-12-14T17:59:18.000Z');
expect(blocks[99].number).toEqual(initialAvalancheBlock + 99);
expect(blocks[99].hash).toEqual(
'0x598080458a28e1241528d0d8c745425147179b86e353d5b0e5cc29e4154d13f6'
);
expect(blocks[99].timestamp).toEqual(1639504940);
});
test('getLogs', async () => {
const watcher = new EVMWatcher('avalanche');
const logs = await watcher.getLogs(9743300, 9743399, CONTRACTS.MAINNET.avalanche.core, [
LOG_MESSAGE_PUBLISHED_TOPIC,
]);
expect(logs.length).toEqual(2);
expect(logs[0].topics[0]).toEqual(LOG_MESSAGE_PUBLISHED_TOPIC);
expect(logs[0].blockNumber).toEqual(9743306);
expect(logs[0].transactionHash).toEqual(
'0x0ca26f28b454591e600ff03fcff60e35bf74f12ebe0c3ba2165a6b6d5a5e4da8'
);
});
test('getFinalizedBlockNumber', async () => {
const watcher = new EVMWatcher('avalanche');
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(initialAvalancheBlock);
});
test('getMessagesForBlocks', async () => {
const watcher = new EVMWatcher('avalanche');
const vaasByBlock = await watcher.getMessagesForBlocks(9743300, 9743399);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(100);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(98);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['9743306/2022-01-18T17:59:33.000Z']).toBeDefined();
expect(vaasByBlock['9743306/2022-01-18T17:59:33.000Z'].length).toEqual(1);
expect(vaasByBlock['9743306/2022-01-18T17:59:33.000Z'][0]).toEqual(
'0x0ca26f28b454591e600ff03fcff60e35bf74f12ebe0c3ba2165a6b6d5a5e4da8:6/0000000000000000000000000e082f06ff657d94310cb8ce8b0d9a04541d8052/3683'
);
});
test('getBlock by tag (Oasis compatibility)', async () => {
const watcher = new EVMWatcher('oasis');
const block = await watcher.getBlock('latest');
expect(block.number).toBeGreaterThan(initialOasisBlock);
expect(block.timestamp).toBeGreaterThan(3895665);
expect(new Date(block.timestamp * 1000).toISOString() > '2022-12-21').toBeTruthy();
});
test('getBlock by tag (Celo compatibility)', async () => {
const watcher = new EVMWatcher('celo');
const block = await watcher.getBlock('latest');
expect(block.number).toBeGreaterThan(initialCeloBlock);
expect(block.timestamp).toBeGreaterThan(1671672811);
expect(new Date(block.timestamp * 1000).toISOString() > '2022-12-21').toBeTruthy();
});
test('getBlock by number (Celo compatibility)', async () => {
const watcher = new EVMWatcher('celo');
const block = await watcher.getBlock(initialCeloBlock);
expect(block.number).toEqual(initialCeloBlock);
expect(block.timestamp).toEqual(1652314820);
expect(new Date(block.timestamp * 1000).toISOString()).toEqual('2022-05-12T00:20:20.000Z');
});
test('getMessagesForBlocks (Celo compatibility)', async () => {
const watcher = new EVMWatcher('celo');
const vaasByBlock = await watcher.getMessagesForBlocks(13322450, 13322549);
const entries = Object.entries(vaasByBlock);
expect(entries.length).toEqual(100);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(98);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(2);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(0);
expect(vaasByBlock['13322492/2022-06-02T17:40:22.000Z']).toBeDefined();
expect(vaasByBlock['13322492/2022-06-02T17:40:22.000Z'].length).toEqual(1);
expect(vaasByBlock['13322492/2022-06-02T17:40:22.000Z'][0]).toEqual(
'0xd73c03b0d59ecae473d50b61e8756bc19b54314869e9b11d0fda6f89dbcf3918:14/000000000000000000000000796dff6d74f3e27060b71255fe517bfb23c93eed/5'
);
});
test('getBlock by number (Karura compatibility)', async () => {
const watcher = new EVMWatcher('karura');
const latestBlock = await watcher.getFinalizedBlockNumber();
const moreRecentBlockNumber = 4646601;
// block {
// hash: '0xe370a794f27fc49d1e468c78e4f92f9aeefc949a62f919cea8d2bd81904840b5',
// number: 4646601,
// timestamp: 1687963290
// }
expect(latestBlock).toBeGreaterThan(moreRecentBlockNumber);
const block = await watcher.getBlock(moreRecentBlockNumber);
expect(block.number).toEqual(moreRecentBlockNumber);
expect(block.timestamp).toEqual(1687963290);
expect(new Date(block.timestamp * 1000).toISOString()).toEqual('2023-06-28T14:41:30.000Z');
});
test('getMessagesForBlocks (Karura compatibility)', async () => {
const watcher = new EVMWatcher('karura');
const vaasByBlock = await watcher.getMessagesForBlocks(4582511, 4582513);
const entries = Object.entries(vaasByBlock);
console.log('entries', entries);
expect(entries.length).toEqual(3);
expect(entries[0][0]).toEqual('4582511/2023-06-19T15:54:48.000Z');
// 4582512 was an error block. In that case, make sure it has the same timestamp as the previous block
// expect(entries[1][0]).toEqual('4582512/2023-06-19T15:54:48.000Z');
// As of July 15, 2023, the above block appears to have been fixed
expect(entries[1][0]).toEqual('4582512/2023-06-19T15:55:00.000Z');
});
test('getMessagesForBlocks (Karura compatibility 2)', async () => {
const watcher = new EVMWatcher('karura');
await watcher.getFinalizedBlockNumber(); // This has the side effect of initializing the latestFinalizedBlockNumber
const vaasByBlock = await watcher.getMessagesForBlocks(4595356, 4595358);
const entries = Object.entries(vaasByBlock);
console.log('entries', entries);
expect(entries.length).toEqual(3);
});
test('getBlock (Karura compatibility)', async () => {
const watcher = new EVMWatcher('karura');
await watcher.getFinalizedBlockNumber(); // This has the side effect of initializing the latestFinalizedBlockNumber
let block: Block = await watcher.getBlock(4582512); // 6969 block
console.log('block', block);
block = await watcher.getBlock(4595357); // Null block
console.log('block', block);
// block = await watcher.getBlock(4595358); // good block
// console.log('block', block);
// block = await watcher.getBlock(4619551); // good luck
// console.log('block', block);
});

View File

@ -0,0 +1,13 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { MoonbeamWatcher } from '../MoonbeamWatcher';
jest.setTimeout(60000);
const initialMoonbeamBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.moonbeam);
test('getFinalizedBlockNumber', async () => {
const watcher = new MoonbeamWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(initialMoonbeamBlock);
});

View File

@ -0,0 +1,93 @@
import { CONTRACTS } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { describe, expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '@wormhole-foundation/wormhole-monitor-common/dist/consts';
import { RPCS_BY_CHAIN } from '../../consts';
import { getNearProvider, getTransactionsByAccountId, NEAR_ARCHIVE_RPC } from '../../utils/near';
import { getMessagesFromBlockResults, NearWatcher } from '../NearWatcher';
jest.setTimeout(60000);
const INITIAL_NEAR_BLOCK = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.near ?? 0);
test('getFinalizedBlockNumber', async () => {
const watcher = new NearWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(INITIAL_NEAR_BLOCK);
});
test('getMessagesForBlocks', async () => {
// requests that are too old for rpc node should error, be caught, and return an empty object
const watcher = new NearWatcher();
const messages = await watcher.getMessagesForBlocks(INITIAL_NEAR_BLOCK, INITIAL_NEAR_BLOCK);
expect(Object.keys(messages).length).toEqual(0);
});
describe('getNearProvider', () => {
test('with normal RPC', async () => {
const provider = await getNearProvider(RPCS_BY_CHAIN['near']!);
// grab last block from core contract
expect(await provider.block({ finality: 'final' })).toBeTruthy();
});
test('with archive RPC', async () => {
const provider = await getNearProvider(NEAR_ARCHIVE_RPC);
// grab first block with activity from core contract
expect(
await provider.block({ blockId: 'Asie8hpJFKaipvw8jh1wPfBwwbjP6JUfsQdCuQvwr3Sz' })
).toBeTruthy();
});
});
test('getTransactionsByAccountId', async () => {
let transactions = await getTransactionsByAccountId(
CONTRACTS.MAINNET.near.core,
10,
'1669732480649090392'
);
expect(transactions.length).toEqual(10);
expect(transactions[0].hash).toEqual('7jDrPnvErjbi3EHbQBcKT9wtiUPo77J9tpxXjE3KHcUp');
// test custom timestamp, filtering out non function call actions, and querying last page
transactions = await getTransactionsByAccountId(
CONTRACTS.MAINNET.near.core,
15,
'1661429914932000000'
);
expect(transactions.length).toEqual(2);
expect(transactions[0].hash).toEqual('3VivTHp1W5ErWgsASUQvW1qwoTCsxYeke4498apDJsss');
});
describe('getMessagesFromBlockResults', () => {
test('with Provider', async () => {
const watcher = new NearWatcher();
const provider = await watcher.getProvider();
const messages = getMessagesFromBlockResults(provider, [
await provider.block({ finality: 'final' }),
]);
expect(messages).toBeTruthy();
});
test.skip('with ArchiveProvider', async () => {
const provider = await getNearProvider(NEAR_ARCHIVE_RPC);
const messages = await getMessagesFromBlockResults(provider, [
await provider.block({ blockId: 'Bzjemj99zxe1h8kVp8H2hwVifmbQL8HT34LyPHzEK5qp' }),
await provider.block({ blockId: '4SHFxSo8DdP8DhMauS5iFqfmdLwLET3W3e8Lg9PFvBSn' }),
await provider.block({ blockId: 'GtQYaYMhrDHgLJJTroUaUzSR24E29twewpkqyudrCyVN' }),
]);
expect(messages).toMatchObject({
'72777217/2022-08-25T18:42:26.121Z': [],
'74616314/2022-09-21T18:48:05.392Z': [
'SYRSkE8pBWWLPZWJtHEGN5Hk7SPZ7kHgf4D1Q4viRcz:15/148410499d3fcda4dcfd68a1ebfcdddda16ab28326448d4aae4d2f0465cdfcb7/233',
],
'74714181/2022-09-23T05:15:53.722Z': [
'2xh2rLR3ehjRRjU1BbuHEhU6FbXiKp5rZ88niyKC6MBs:15/148410499d3fcda4dcfd68a1ebfcdddda16ab28326448d4aae4d2f0465cdfcb7/237',
],
});
// validate keys
const watcher = new NearWatcher();
const blockKey = Object.keys(messages).at(-1)!;
expect(watcher.isValidBlockKey(blockKey)).toBe(true);
expect(watcher.isValidVaaKey(messages[blockKey][0])).toBe(true);
});
});

View File

@ -0,0 +1,32 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { EVMWatcher } from '../EVMWatcher';
jest.setTimeout(60000);
const initialOptimismBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.optimism);
test('getFinalizedBlockNumber', async () => {
const watcher = new EVMWatcher('optimism');
const blockNumber = await watcher.getFinalizedBlockNumber();
console.log('blockNumber', blockNumber);
expect(blockNumber).toBeGreaterThan(105235062);
});
test('getMessagesForBlocks', async () => {
const watcher = new EVMWatcher('optimism');
const vaasByBlock = await watcher.getMessagesForBlocks(105235070, 105235080);
expect(vaasByBlock).toMatchObject({
'105235070/2023-06-06T16:28:37.000Z': [],
'105235071/2023-06-06T16:28:39.000Z': [],
'105235072/2023-06-06T16:28:41.000Z': [],
'105235073/2023-06-06T16:28:43.000Z': [],
'105235074/2023-06-06T16:28:45.000Z': [],
'105235075/2023-06-06T16:28:47.000Z': [],
'105235076/2023-06-06T16:28:49.000Z': [],
'105235077/2023-06-06T16:28:51.000Z': [],
'105235078/2023-06-06T16:28:53.000Z': [],
'105235079/2023-06-06T16:28:55.000Z': [],
'105235080/2023-06-06T16:28:57.000Z': [],
});
});

View File

@ -0,0 +1,13 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '../../common';
import { PolygonWatcher } from '../PolygonWatcher';
jest.setTimeout(60000);
const initialPolygonBlock = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.polygon);
test('getFinalizedBlockNumber', async () => {
const watcher = new PolygonWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(initialPolygonBlock);
});

View File

@ -0,0 +1,115 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '@wormhole-foundation/wormhole-monitor-common/dist/consts';
import { SolanaWatcher } from '../SolanaWatcher';
jest.setTimeout(60000);
const INITIAL_SOLANA_BLOCK = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.solana ?? 0);
test('getFinalizedBlockNumber', async () => {
const watcher = new SolanaWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
expect(blockNumber).toBeGreaterThan(INITIAL_SOLANA_BLOCK);
});
test('getMessagesForBlocks - single block', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(170799004, 170799004);
expect(Object.keys(messages).length).toBe(1);
expect(messages).toMatchObject({
'170799004/2023-01-04T16:43:43.000Z': [
'3zWJevhFB5XqUCdDmqoRLQUMgiNBmFZLaE5rZpSexH47Mx2268eimrj2FY23Z1mq1WXsRRkyhmMcsguXcSw7Rnh1:1/ec7372995d5cc8732397fb0ad35c0121e0eaa90d26f828a534cab54391b3a4f5/262100',
],
});
// validate keys
expect(watcher.isValidBlockKey(Object.keys(messages)[0])).toBe(true);
expect(watcher.isValidVaaKey(Object.values(messages).flat()[0])).toBe(true);
});
// temporary skip due to SolanaJSONRPCError: failed to get confirmed block: Block 171774030 cleaned up, does not exist on node. First available block: 176896202
test('getMessagesForBlocks - fromSlot is skipped slot', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(171774030, 171774032); // 171774024 - 171774031 are skipped
expect(Object.keys(messages).length).toBe(1);
expect(messages).toMatchObject({ '171774032/2023-01-10T13:36:38.000Z': [] });
});
test('getMessagesForBlocks - toSlot is skipped slot', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(171774023, 171774025);
expect(messages).toMatchObject({ '171774023/2023-01-10T13:36:34.000Z': [] });
});
test('getMessagesForBlocks - empty block', async () => {
// Even if there are no messages, last block should still be returned
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(170979766, 170979766);
expect(Object.keys(messages).length).toBe(1);
expect(messages).toMatchObject({ '170979766/2023-01-05T18:40:24.000Z': [] });
});
// temporary skip due to SolanaJSONRPCError: failed to get confirmed block: Block 174108865 cleaned up, does not exist on node. First available block: 176892532
test('getMessagesForBlocks - block with no transactions', async () => {
const watcher = new SolanaWatcher();
expect(watcher.getMessagesForBlocks(174108861, 174108861)).rejects.toThrowError(
'solana: invalid block range'
);
let messages = await watcher.getMessagesForBlocks(174108661, 174108861);
expect(Object.keys(messages).length).toBe(1);
expect(Object.values(messages).flat().length).toBe(0);
messages = await watcher.getMessagesForBlocks(174108863, 174109061);
expect(Object.keys(messages).length).toBe(1);
expect(Object.values(messages).flat().length).toBe(0);
});
test('getMessagesForBlocks - multiple blocks', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(171050470, 171050474);
expect(Object.keys(messages).length).toBe(2);
expect(Object.values(messages).flat().length).toBe(2);
});
test('getMessagesForBlocks - multiple blocks, last block empty', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(170823000, 170825000);
expect(Object.keys(messages).length).toBe(3);
expect(Object.values(messages).flat().length).toBe(2); // 2 messages, last block has no message
});
test('getMessagesForBlocks - multiple blocks containing more than `getSignaturesLimit` WH transactions', async () => {
const watcher = new SolanaWatcher();
watcher.getSignaturesLimit = 10;
const messages = await watcher.getMessagesForBlocks(171582367, 171583452);
expect(Object.keys(messages).length).toBe(3);
expect(Object.values(messages).flat().length).toBe(3);
});
test('getMessagesForBlocks - multiple calls', async () => {
const watcher = new SolanaWatcher();
const messages1 = await watcher.getMessagesForBlocks(171773021, 171773211);
const messages2 = await watcher.getMessagesForBlocks(171773212, 171773250);
const messages3 = await watcher.getMessagesForBlocks(171773251, 171773500);
const allMessageKeys = [
...Object.keys(messages1),
...Object.keys(messages2),
...Object.keys(messages3),
];
const uniqueMessageKeys = [...new Set(allMessageKeys)];
expect(allMessageKeys.length).toBe(uniqueMessageKeys.length); // assert no duplicate keys
});
test('getMessagesForBlocks - handle failed transactions', async () => {
const watcher = new SolanaWatcher();
const messages = await watcher.getMessagesForBlocks(94401321, 94501321);
expect(Object.keys(messages).length).toBe(6);
expect(Object.values(messages).flat().length).toBe(5);
expect(
Object.values(messages)
.flat()
.map((m) => m.split('/')[2])
.join(',')
).toBe('4,3,2,1,0');
});

View File

@ -0,0 +1,32 @@
import { expect, jest, test } from '@jest/globals';
import { INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN } from '@wormhole-foundation/wormhole-monitor-common/dist/consts';
import { SuiWatcher } from '../SuiWatcher';
jest.setTimeout(60000);
const INITAL_SEQUENCE_NUMBER = Number(INITIAL_DEPLOYMENT_BLOCK_BY_CHAIN.sui ?? 1581000);
test('getFinalizedSequenceNumber', async () => {
const watcher = new SuiWatcher();
const blockNumber = await watcher.getFinalizedBlockNumber();
console.log('Received blockNumber:', blockNumber);
expect(blockNumber).toBeGreaterThan(INITAL_SEQUENCE_NUMBER);
});
// This test will fail as time goes on because getMessagesForBlocks() grabs the latest and
// works backwards. This will cause a 429 until we clear that up.
test.skip('getMessagesForBlocks', async () => {
const watcher = new SuiWatcher();
const messages = await watcher.getMessagesForBlocks(1581997, 1581997);
console.log(messages);
const entries = Object.entries(messages);
expect(entries.length).toEqual(46);
expect(entries.filter(([block, vaas]) => vaas.length === 0).length).toEqual(1);
expect(entries.filter(([block, vaas]) => vaas.length === 1).length).toEqual(40);
expect(entries.filter(([block, vaas]) => vaas.length === 2).length).toEqual(5);
expect(messages['1584976/2023-05-03T17:15:00.000Z']).toBeDefined();
expect(messages['1584976/2023-05-03T17:15:00.000Z'].length).toEqual(1);
expect(messages['1584976/2023-05-03T17:15:00.000Z'][0]).toEqual(
'HydDe4yNBBu98ak46fPdw7qCZ4x7h8DsYdMfeWEBf5ge:21/ccceeb29348f71bdd22ffef43a2a19c1f5b5e17c5cca5411529120182672ade5/187'
);
});

View File

@ -0,0 +1,7 @@
export { AptosWatcher } from './AptosWatcher';
export { BSCWatcher } from './BSCWatcher';
export { CosmwasmWatcher } from './CosmwasmWatcher';
export { EVMWatcher } from './EVMWatcher';
export { InjectiveExplorerWatcher } from './InjectiveExplorerWatcher';
export { MoonbeamWatcher } from './MoonbeamWatcher';
export { PolygonWatcher } from './PolygonWatcher';

View File

@ -0,0 +1,57 @@
import { ChainName } from '@certusone/wormhole-sdk/lib/cjs/utils/consts';
import { AlgorandWatcher } from './AlgorandWatcher';
import { AptosWatcher } from './AptosWatcher';
import { ArbitrumWatcher } from './ArbitrumWatcher';
import { BSCWatcher } from './BSCWatcher';
import { CosmwasmWatcher } from './CosmwasmWatcher';
import { EVMWatcher } from './EVMWatcher';
import { InjectiveExplorerWatcher } from './InjectiveExplorerWatcher';
import { MoonbeamWatcher } from './MoonbeamWatcher';
import { NearWatcher } from './NearWatcher';
import { PolygonWatcher } from './PolygonWatcher';
import { SolanaWatcher } from './SolanaWatcher';
import { TerraExplorerWatcher } from './TerraExplorerWatcher';
import { Watcher } from './Watcher';
import { SuiWatcher } from './SuiWatcher';
export function makeFinalizedWatcher(chainName: ChainName): Watcher {
if (chainName === 'solana') {
return new SolanaWatcher();
} else if (chainName === 'ethereum' || chainName === 'karura' || chainName === 'acala') {
return new EVMWatcher(chainName, 'finalized');
} else if (chainName === 'bsc') {
return new BSCWatcher();
} else if (chainName === 'polygon') {
return new PolygonWatcher();
} else if (
chainName === 'avalanche' ||
chainName === 'oasis' ||
chainName === 'fantom' ||
chainName === 'klaytn' ||
chainName === 'celo' ||
chainName === 'optimism' ||
chainName === 'base'
) {
return new EVMWatcher(chainName);
} else if (chainName === 'algorand') {
return new AlgorandWatcher();
} else if (chainName === 'moonbeam') {
return new MoonbeamWatcher();
} else if (chainName === 'arbitrum') {
return new ArbitrumWatcher();
} else if (chainName === 'aptos') {
return new AptosWatcher();
} else if (chainName === 'near') {
return new NearWatcher();
} else if (chainName === 'injective') {
return new InjectiveExplorerWatcher();
} else if (chainName === 'terra') {
return new TerraExplorerWatcher('terra');
} else if (chainName === 'terra2' || chainName === 'xpla') {
return new CosmwasmWatcher(chainName);
} else if (chainName === 'sui') {
return new SuiWatcher();
} else {
throw new Error(`Attempted to create finalized watcher for unsupported chain ${chainName}`);
}
}

View File

@ -0,0 +1,20 @@
{
"compilerOptions": {
"outDir": "dist",
"incremental": true,
"composite": true,
"target": "esnext",
"module": "commonjs",
"declaration": true,
"sourceMap": true,
"strict": true,
"moduleResolution": "node",
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"resolveJsonModule": true,
"lib": ["es2022"]
},
"include": ["scripts", "src", "src/abi/*.json"]
}