Added CI badge

This commit is contained in:
Kirill Fedoseev 2019-11-26 18:50:58 +03:00
commit f6db1118b4
151 changed files with 5059 additions and 35933 deletions

172
.circleci/config.yml Normal file
View File

@ -0,0 +1,172 @@
version: 2.1
orbs:
bridge:
executors:
node-dev:
docker:
- image: circleci/node:10.15
environment:
TARGET_NETWORK: development
commands:
fetch_repo:
description: "Checkout and init submodules"
steps:
- checkout
- run:
name: "Initialize submodules"
command: git submodule update --init
setup_docker:
description: "Set up remote docker engine"
steps:
- setup_remote_docker:
version: 18.09.3
docker_layer_caching: true
restore_tss_image:
description: "Restores tss image from cache"
steps:
- restore_cache:
name: "Restore tss image from cache"
key: "tss-v1-{{ .Branch }}"
- load_tss
save_tss:
description: "Save tss image to workspace"
steps:
- run:
name: "Save tss image to archive"
command: |
mkdir -p ./workspace
docker save tss | gzip > ./workspace/tss.tar.gz
- persist_to_workspace:
name: "Save tss image to workspace"
root: ./workspace
paths:
- tss.tar.gz
- save_cache:
name: "Save tss image to cache"
paths:
- ./workspace/tss.tar.gz
key: "tss-v1-{{ .Branch }}"
load_tss:
description: "Load tss image from workspace"
steps:
- run:
name: "Load tss image from archive"
command: docker load -i ./workspace/tss.tar.gz || true
save_docker_logs:
description: "Save docker logs"
parameters:
validator:
type: integer
default: 1
steps:
- run:
name: "Saving validator<< parameters.validator >> logs"
command: |
mkdir -p ./artifacts/logs/validator<< parameters.validator >>
docker logs validator<< parameters.validator >>_rabbitmq_1 > ./artifacts/logs/validator<< parameters.validator >>/rabbitmq.log
docker logs validator<< parameters.validator >>_redis_1 > ./artifacts/logs/validator<< parameters.validator >>/redis.log
docker logs validator<< parameters.validator >>_proxy_1 > ./artifacts/logs/validator<< parameters.validator >>/proxy.log
docker logs validator<< parameters.validator >>_eth-watcher_1 > ./artifacts/logs/validator<< parameters.validator >>/eth-watcher.log
docker logs validator<< parameters.validator >>_bnc-watcher_1 > ./artifacts/logs/validator<< parameters.validator >>/bnc-watcher.log
docker logs validator<< parameters.validator >>_signer_1 > ./artifacts/logs/validator<< parameters.validator >>/signer.log
docker logs validator<< parameters.validator >>_keygen_1 > ./artifacts/logs/validator<< parameters.validator >>/keygen.log
when: always
save_artifacts:
description: "Save and upload tests results, save validator logs to artifacts"
steps:
- run:
name: "Save tests results"
command: |
mkdir -p ./artifacts/test_results/mocha
docker cp "tests:/tests/results.xml" "./artifacts/test_results/mocha/results.xml"
when: always
- store_test_results:
path: ./artifacts/test_results/mocha
when: always
- save_docker_logs:
validator: 1
- save_docker_logs:
validator: 2
- save_docker_logs:
validator: 3
- run:
name: "Save ethereum logs"
command: |
mkdir -p ./artifacts/logs/ethereum
docker logs ethereum-testnet_side-oracle_1 > ./artifacts/logs/ethereum/side-oracle.log
docker logs ethereum-testnet_ganache_home_1 > ./artifacts/logs/ethereum/ganache_home.log
docker logs ethereum-testnet_ganache_side_1 > ./artifacts/logs/ethereum/ganache_side.log
when: always
- run:
name: "Save binance logs"
command: |
mkdir -p ./artifacts/logs/binance
docker logs binance-testnet_node_1 > ./artifacts/logs/binance/node.log
docker logs binance-testnet_api-server_1 > ./artifacts/logs/binance/api-server.log
docker logs binance-testnet_http-api_1 > ./artifacts/logs/binance/http-api.log
when: always
- store_artifacts:
path: ./artifacts
destination: artifacts
when: always
jobs:
init_tss:
executor: bridge/node-dev
steps:
- bridge/fetch_repo
- bridge/setup_docker
- bridge/restore_tss_image
- run:
name: "Build tss image"
command: docker build --cache-from tss -t tss ./src/tss || docker build -t tss ./src/tss
- bridge/save_tss
run_tests:
executor: bridge/node-dev
steps:
- bridge/fetch_repo
- bridge/setup_docker
- attach_workspace:
at: ./workspace
- bridge/load_tss
- run:
name: "Init tests environment"
command: |
BLOCK_TIME=3 ./demo/start-ethereum-environment.sh
./demo/start-binance-environment.sh
N=1 ./demo/validator-demo.sh -d
N=2 ./demo/validator-demo.sh -d
N=3 ./demo/validator-demo.sh -d
- run:
name: "Wait until validator nodes are ready"
command: |
docker run --network validator1_test_network --entrypoint ash appropriate/curl:latest -c "until curl -X GET http://proxy:8002/info > /dev/null 2>&1; do sleep 1; done"
docker run --network validator2_test_network --entrypoint ash appropriate/curl:latest -c "until curl -X GET http://proxy:8002/info > /dev/null 2>&1; do sleep 1; done"
docker run --network validator3_test_network --entrypoint ash appropriate/curl:latest -c "until curl -X GET http://proxy:8002/info > /dev/null 2>&1; do sleep 1; done"
no_output_timeout: 3m
- run:
name: "Build and prepare tests container"
command: |
docker build -t tests ./tests
docker create --env-file ./tests/.env --name tests tests
docker network connect binance_net tests
docker network connect ethereum_side_rpc_net tests
docker network connect ethereum_home_rpc_net tests
docker network connect validator1_test_network tests
docker network connect validator2_test_network tests
docker network connect validator3_test_network tests
- run:
name: "Run tests"
command: docker start -a tests
- run:
name: "Check alive docker containers"
command: docker ps
when: always
- bridge/save_artifacts
workflows:
version: 2
main:
jobs:
- init_tss
- run_tests:
requires:
- init_tss

8
.eslintignore Normal file
View File

@ -0,0 +1,8 @@
node_modules/
data/
demo/validator*/development
demo/validator*/staging
src/deploy/deploy/*/build
src/deploy/deploy-home/contracts/openzeppelin-solidity
src/deploy/deploy-test/contracts/openzeppelin-solidity
src/tss/multi-party-ecdsa

18
.eslintrc Normal file
View File

@ -0,0 +1,18 @@
{
"extends": [
"plugin:node/recommended",
"airbnb-base"
],
"plugins": [
"node"
],
"rules": {
"semi": [1, "never"],
"comma-dangle": [1, "never"],
"node/no-missing-require": 0,
"import/no-unresolved": 0,
"no-return-await": 0,
"no-await-in-loop": 0,
"no-constant-condition": 0
}
}

15
.gitignore vendored
View File

@ -1,20 +1,13 @@
.idea/
node_modules/
.DS_Store
**/keys*.store
**/signature
**/params
data/
demo/validator*/development
demo/validator*/staging
demo/validator*/.keys.staging
demo/ganache_home_db/
demo/ganache_side_db/
demo/*.zip
src/deploy/deploy-home/build/
src/deploy/deploy-side/build/
src/deploy/deploy-test/build/
src/deploy/deploy*/build/
src/deploy/.keys.staging
src/test-services/.keys.staging
src/test-services/.keys.development
test.js
src/test.js
tests/results.xml
package-lock.json

395
DEMO.md
View File

@ -13,7 +13,13 @@ This demo supports two ways of dealing with the Ethereum side of a bridge:
As part of this demo two EVM-based chains ([ganache](https://github.com/trufflesuite/ganache-cli)) will be started:
- **Home chain** - it keeps an ERC20 contract (`0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc`) and the bridge contract (`0x44c158FE850821ae69DaF37AADF5c539e9d0025B`).
- **Side chain** - the MPC orchestration contract (`0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc`) is located here
Both chains are run in separate docker containers.
Both chains are run in separate docker containers.
JSON-RPC ports are mapped to the host (7545 - side chain, 8545 - home chain)
Local Binance network within separate docker container will be used.
In addition, some part of Binance accelerated node [HTTP API](https://docs.binance.org/api-reference/dex-api/paths.html)
will be emulated, since a regular full-node API does not provide all required features.
APIs and NODE RPC ports are mapped to the host (26657 - RPC, 8080 - api-server, 8000 - emulated accelerated node api)
#### Staging mode
@ -22,6 +28,9 @@ As part of this demo two EVM-based public chains will be used:
- **Side chain** - Sokol POA testnet keeps the MPC orchestration contract.
Interaction with chains is done by using public available RPC urls.
Public Binance testnet will be used for demo purposes.
Interaction with chain is done by using a public available HTTP API endpoint.
### Demo validators
Three validators will be run and only two validators are required to confirm the transfer. Every validator node is a set of docker containers (`eth-watcher`, `bnc-watcher`, `signer`, `proxy`, `redis`, `rabbitmq`).
@ -30,7 +39,163 @@ Three validators will be run and only two validators are required to confirm the
The public Binance Chain testnet will keep a BEP2 token.
### Running demo
### Running demo in development mode
1. Preparation
* (1.1) Clone the repo and initialize git submodules:
```
git clone --recurse-submodules https://github.com/k1rill-fedoseev/eth-to-bnc-bridge.git
```
* (1.2) Build TSS to be used in the bridge oracles:
```
docker build -t tss ./src/tss
```
* (1.3) Generate several private keys for bridge testing. (e. g. `openssl rand -hex 32`)
* (1.4) Get Ethereum and Binance addresses for recently created accounts via running
```
./src/test-services/getAddresses/run.sh <PRIVATE_KEY>
```
2. Run test environment
* (2.1) Modify `src/deploy/deploy-test/.env.development` and specify the amount of tokens to mint in the parameter `TOKEN_INITIAL_MINT`.
* (2.2) Run Ethereum testnets and deploy contracts
```
TARGET_NETWORK=development ./demo/start-ethereum-environment.sh
```
This command will also mint tokens, the owner of tokens is the address that corresponds to the
private key specified in `HOME_PRIVATE_KEY` of `src/deploy/deploy-test/.env.development` (`0xA374DC09057D6B3253d04fACb15736B43fBc7943`).
* (2.4) Run Binance testnet and api services
```
./demo/start-binance-environment.sh
```
This command will also issue a BEP2 token, the owner of tokens is the address that corresponds to the
private key specified in `FOREIGN_PRIVATE_KEY` of `src/test-services/binanceSend/.env.development` (`tbnb1z7u9f8mcuwxanns9xa6qgjtlka0d392epc0m9x`).
The balance of `tbnb1z7u9f8mcuwxanns9xa6qgjtlka0d392epc0m9x` will contain 10000 BNB and 10000 Test Tokens.
* (2.5) Send few tokens and coins from the current token owner to the first account. Coins are needed to pay transaction fees.
```
./src/test-services/ethereumSend/run.sh <first account Ethereum address> 5 0.5
```
* (2.6) Check that the tokens were transferred properly:
```
./src/test-services/ethereumBalance/run.sh <first account Ethereum address>
```
3. Run validators nodes:
* (3.1) Run three validators in separate terminal sessions.
```
N=1 ./demo/validator-demo.sh
N=2 ./demo/validator-demo.sh
N=3 ./demo/validator-demo.sh
```
Wait for when the line like the following appears:
```
keygen_1 | Generated multisig account in binance chain: tbnb1mutgnx9n9devmrjh3d0wz332fl8ymgel6tydx6
```
The line contains the address of the bridge address in the Binance Chain.
4. Initialize the state of the bridge account in the Binance Chain
* (4.1) Fill the balance Fund with BNB coins as so the account will be able to make transactions:
```
./src/test-services/binanceSend/run.sh <address of the bridge account> 100 1
```
To check the balance of the bridge account use `./src/test-services/binanceBalance/run.sh <address of the bridge account>`
5. Transfer tokens from Ethereum-based chain to the Binance Chain:
* (5.1) Send some amount of tokens to the bridge contract, for `PRIVATE_KEY` use some of the keys from step (1.3):
```
PRIVATE_KEY=<test account private key> ./src/test-services/ethereumSend/run.sh bridge 5
```
* (5.2) The validators will catch the event and start the process to sign the transaction.
* (5.3) As soon as the signature is generated and sent, the balance of the bridge account in both chains will be changed:
```
./src/test-services/ethereumBalance/run.sh <ethereum bridge address>
```
should report non-zero balance,
```
./src/test-services/binanceBalance/run.sh <binance bridge address>
```
should report about the balance reduction.
* (5.4) Check that the tokens were transferred properly to the test account:
```
./src/test-services/binanceBalance/run.sh <test account address>
```
6. Transfer tokens from the Binance Chain to Ethereum-based chain:
* (6.1) Send some amount of tokens to the bridge account:
```
PRIVATE_KEY=<test account private key> ./src/test-services/binanceSend/run.sh <binance bridge address> 3
```
* (6.2) Check the balances of the test account on both sides of the bridge to see that the funds were transferred properly using commands from (5.3), (5.4).
7. Bridge supports changing the list of validators and required voting threshold via voting process, and then keys regeneration.
* (7.0) Obtain information about current epoch, current list validators, upcoming epoch information, bridge state via:
```
curl http://localhost:$PORT/info
```
Where `$PORT` is specific port for some validator oracle.
The response object contains lots of useful information about current bridge state.
```json5
{
// current epoch number, in which bridge is operating
"epoch": 2,
// next epoch number, for which votes and keygen operations are applied
"nextEpoch": 3,
// threshold number for current epoch,
// at least threshold votes are required for any changes in next epoch
"threshold": 2,
// threshold number for next epoch
"nextThreshold": 2,
// current bridge addresses in home and foreign networks
"homeBridgeAddress": "0x44c158FE850821ae69DaF37AADF5c539e9d0025B",
"foreignBridgeAddress": "tbnb19z22khee969yj05dckg9usvmwndkucpyl543xk",
// current set of validators
"validators": [
"0x99Eb3D86663c6Db090eFFdBC20510Ca9f836DCE3",
"0x6352e3e6038e05b9da00C84AE851308f9774F883"
],
// set of validators for the next epoch
"nextValidators": [
"0x99Eb3D86663c6Db090eFFdBC20510Ca9f836DCE3",
"0x6352e3e6038e05b9da00C84AE851308f9774F883",
"0xAa006899B0EC407De930bA8A166DEfe59bBfd3DC"
],
// balances of bridge in both networks
"homeBalance": 50,
"foreignBalanceTokens": 100,
"foreignBalanceNative": 0.0994,
// current bridge status, can be one of: ready, voting, keygen, funds_transfer
"bridgeStatus": "ready",
// current votes count for starting voting, starting/cancelling keygen
// -1 means that enough confirmations are already collected
"votesForVoting": 0,
"votesForKeygen": 0,
"votesForCancelKeygen": 0,
// collected confirmations for changing epoch to nextEpoch
// -1 means that enough confirmations are already collected
"confirmationsForFundsTransfer": 0
}
```
* (7.1) Start voting process for next epoch, via sending `$THRESHOLD` requests to `/vote/startVoting` url. Bridge
state should be successfully changed to `voting`.
* 7.2 Changing next epoch bridge validators / threshold
* (7.2.1) Add / remove validator in next validators list, via sending `$THRESHOLD` requests to
`/vote/addValidator/$ADDRESS` / `/vote/removeValidator/$ADDRESS`.
* (7.2.2) Change threshold for the next epoch, via sending `$THRESHOLD` requests to `/vote/changeThreshold/$THRESHOLD`.
* (7.3) Start keygen process for next epoch, via sending `$THRESHOLD` requests to `/vote/startKeygen` url. Bridge
state should be successfully changed to `keygen`, and in some time to `funds_transfer`, and then to `ready`.
* (7.4) If keygen process at some state was stopped(i. e. one validator turned off his oracle),
it can be cancelled via via sending `$THRESHOLD` requests to `/vote/cancelKeygen` url. After
keygen cancellation, bridge state will return to `voting`, and later it can be restarted manually
once again.
### Running demo in staging mode
Staging mode demo is similar to development mode demo, but requires additional manual actions for preparing demo.
Make sure, to first run demo in development mode, before trying to run it in the staging environment.
1. Preparation
* (1.1) Download `tbnbcli` from https://github.com/binance-chain/node-binary/tree/master/cli.
@ -72,205 +237,107 @@ The public Binance Chain testnet will keep a BEP2 token.
```
* (1.10) Build TSS to be used in the bridge oracles:
```
docker build -t tss -f ./src/tss/Dockerfile-local ./src/tss
docker build -t tss ./src/tss
```
2. Run test environment
* 2.1 Running in development mode (using local ganache networks):
* (2.1.1) Modify `src/deploy/deploy-test/.env.development` and specify the amount of tokens to mint in the parameter `TOKEN_INITIAL_MINT`.
* (2.1.2) Run testnets and deploy contracts
```
TARGET_NETWORK=development ./demo/start-environment.sh
```
This command will also mint tokens, the owner of tokens is the address that corresponds to the
private key specified in `HOME_PRIVATE_KEY` of `src/deploy/deploy-test/.env.development` (`0xA374DC09057D6B3253d04fACb15736B43fBc7943`).
* 2.2 Running in staging mode (using public test networks):
* (2.2.1) Prepare three private keys for validators. Get the Ethereum account addresses for these keys.
* (2.2.2) Modify `src/deploy/deploy-home/.env.staging` and specify the token contract address in
the Kovan network via `HOME_TOKEN_ADDRESS` (use empty address `0x` if you want to create new
ERC20 contract while deployment). \
Set `VALIDATOR_ADDRESS_*` to Ethereum addresses obtained in the previous step.
* (2.2.3) Modify `src/deploy/.keys.staging` and specify private keys for prefunded accounts in both networks.
These accounts are used for contract deployment. Use `src/deploy/.keys.staging.example` as an example.
* (2.2.4) Deploy contracts
```
TARGET_NETWORK=staging ./demo/start-environment.sh
```
This command will deploy ERC20 contract and also mint tokens if you left `HOME_TOKEN_ADDRESS` empty,
the owner of tokens is the address that corresponds to the private key specified in
`HOME_PRIVATE_KEY` of `src/deploy/.keys.staging`.\
Deployed contract address will be automatically updates in all required validators
and test services configs.
* (2.2.5) Prefund validator accounts in home network (Kovan):
```
TARGET_NETWORK=staging ./src/test-services/ethereumSend/run.sh <Nth validator address> 0 0.5
```
* (2.2.6) Prefund validator accounts in side network (Sokol):
```
TARGET_NETWORK=staging ./src/test-services/sidePrefund/run.sh <Nth validator address> 1
```
* (2.3) Get the Ethereum account address for the first test account from its private key (step 1.2). [NiftyWallet](https://forum.poa.network/c/nifty-wallet) could be used for this.
* (2.4) Send few tokens and coins from the current token owner to the first account. Coins are needed to pay transaction fees.
* (2.1) Prepare three private keys for validators. Get the Ethereum account addresses for these keys.
* (2.2) Modify `src/deploy/deploy-home/.env.staging` and specify the token contract address in
the Kovan network via `HOME_TOKEN_ADDRESS` (use empty address `0x` if you want to create new
ERC20 contract while deployment). \
Set `VALIDATOR_ADDRESS_*` to Ethereum addresses obtained in the previous step.
* (2.3) Modify `src/deploy/.keys.staging` and specify private keys for prefunded accounts in both networks.
These accounts are used for contract deployment. Use `src/deploy/.keys.staging.example` as an example.
* (2.4) Deploy contracts
```
TARGET_NETWORK=<target network> ./src/test-services/ethereumSend/run.sh <first account Ethereum address> 5000000000000000000 0.5
TARGET_NETWORK=staging ./demo/start-ethereum-environment.sh
```
* (2.5) Check that the tokens were transferred properly:
This command will deploy ERC20 contract and also mint tokens if you left `HOME_TOKEN_ADDRESS` empty,
the owner of tokens is the address that corresponds to the private key specified in
`HOME_PRIVATE_KEY` of `src/deploy/.keys.staging`.\
Deployed contract addresses will be automatically updated in all required validators
and test services configs.
* (2.5) Prefund validator accounts in home network (Kovan):
```
TARGET_NETWORK=<target network> ./src/test-services/ethereumBalance/run.sh <first account Ethereum address>
TARGET_NETWORK=staging ./src/test-services/ethereumSend/run.sh <Nth validator address> 0 0.5
```
* (2.6) Prefund validator accounts in side network (Sokol):
```
TARGET_NETWORK=staging ./src/test-services/sidePrefund/run.sh <Nth validator address> 1
```
* (2.7) Send few tokens and coins from the current token owner to the first account. Coins are needed to pay transaction fees.
```
TARGET_NETWORK=staging ./src/test-services/ethereumSend/run.sh <first account Ethereum address> 5 0.5
```
* (2.8) Check that the tokens were transferred properly:
```
TARGET_NETWORK=staging ./src/test-services/ethereumBalance/run.sh <first account Ethereum address>
```
3. Run validators nodes:
* (3.1) Modify the parameter `FOREIGN_ASSET` in `demo/validator1/.env.<network>`, `demo/validator2/.env.<network>`
and `demo/validator3/.env.<network>` to specify the identificator of the token (step 1.8) that the oracle will watch. \
* (3.1) Modify the parameter `FOREIGN_ASSET` in `demo/validator1/.env.staging`, `demo/validator2/.env.staging`
and `demo/validator3/.env.staging` to the identificator of the token (step 1.8) that the oracle will track. \
For staging environment additionally specify `VALIDATOR_PRIVATE_KEY` in the `demo/validator<N>/.keys.staging` (step 2.2.1)
* (3.2) Run three validators in separate terminal sessions.
```
N=1 TARGET_NETWORK=<network> ./demo/validator-demo.sh
N=2 TARGET_NETWORK=<network> ./demo/validator-demo.sh
N=3 TARGET_NETWORK=<network> ./demo/validator-demo.sh
N=1 TARGET_NETWORK=staging ./demo/validator-demo.sh
N=2 TARGET_NETWORK=staging ./demo/validator-demo.sh
N=3 TARGET_NETWORK=staging ./demo/validator-demo.sh
```
Wait for when the line like the following appears:
```
keygen_1 | Generated multisig account in binance chain: tbnb1mutgnx9n9devmrjh3d0wz332fl8ymgel6tydx6
```
The line contains the address of the bridge address in the Bincance Chain.
The line contains the address of the bridge address in the Binance Chain.
4. Initialize the state of the bridge account in the Binance Chain
* (4.1) Fill the balance Fund with BNB coins as so the account will be able to make transactions:
```
./tbnbcli send --from test_account1 --to <address of the bridge account> \
--amount 1000000000:BNB --chain-id=Binance-Chain-Nile \
--node=data-seed-pre-2-s1.binance.org:80 --memo "initialization"
```
* (4.2) Fund the account with bridgeable tokens. **This transaction should have 'funding' in the memo**:
./src/test-services/binanceSend/run.sh <address of the bridge account> 100 1
```
./tbnbcli send --from test_account1 --to <address of the bridge account> \
--amount 3141500000000000:ETB0819-863 --chain-id=Binance-Chain-Nile \
--node=data-seed-pre-2-s1.binance.org:80 --memo "funding"
```
The oracles should catch this transaction but will ignore it:
```
bnc-watcher_1 | Fetching new transactions
bnc-watcher_1 | Sending api transactions request
bnc-watcher_1 | Found 1 new transactions
```
To check the balance of the bridge account the [Binance Testnet Explorer](https://testnet-explorer.binance.org) could be used. It should report about two assets owned by the account.
To check the balance of the bridge account use `./src/test-services/binanceBalance/run.sh`
or [Binance Testnet Explorer](https://testnet-explorer.binance.org). It should report about two assets owned by the account.
5. Transfer tokens from Ethereum-based chain to the Binance Chain:
* (5.1) Modify the parameter `HOME_PRIVATE_KEY`
(in `src/test-services/ethereumSend/.env.development` or `src/test-services/.keys.staging`)
as so it contains the private key of the first test account (step 1.2)
* (5.2) Send some amount of tokens to the bridge contract:
* (5.1) Send some amount of tokens to the bridge contract, for `PRIVATE_KEY` use some of the keys from step (1.3):
```
TARGET_NETWORK=<network> ./src/test-services/ethereumSend/run.sh bridge 5000000000000000000
TARGET_NETWORK=staging PRIVATE_KEY=<test account private key> ./src/test-services/ethereumSend/run.sh bridge 5
```
* (5.3) The validators will catch the event and start the process to sign the transaction.
* (5.4) As soon as the signature is generated and sent, the balance of the bridge account in both chains will be changed:
* (5.2) The validators will catch the event and start the process to sign the transaction.
* (5.3) As soon as the signature is generated and sent, the balance of the bridge account in both chains will be changed:
```
TARGET_NETWORK=<network> ./src/test-services/ethereumBalance/run.sh 0x94b40CC641Ed7db241A1f04C8896ba6f6cC36b85
./src/test-services/ethereumBalance/run.sh <ethereum bridge address>
```
should report non-zero balance
should report non-zero balance,
```
./tbnbcli account <address of the bridge account> \
--chain-id=Binance-Chain-Nile --node=data-seed-pre-2-s1.binance.org:80 --trust-node
./src/test-services/binanceBalance/run.sh <binance bridge address>
```
should report about the balance reduction.
The balance and transactions related to the bridge account in the Binance Chain could be checked in [Binance Testnet Explorer](https://testnet-explorer.binance.org).
* (5.5) Check that the tokens was transferred to the first test account either by `tbnbcli` or by [Binance Testnet Explorer](https://testnet-explorer.binance.org).
* (5.4) Check that the tokens were transferred properly to the test account:
```
./src/test-services/binanceBalance/run.sh <test account address>
```
The balance and transactions related to the bridge account in the Binance Chain could be checked in
[Binance Testnet Explorer](https://testnet-explorer.binance.org).
6. Transfer tokens from the Binance Chain to Ethereum-based chain:
* Use either `tbnbcli` or the [Binance testnet wallet](https://testnet.binance.org/) to send tokens to the bridge account:
* (6.1) Send some amount of tokens to the bridge account:
```
./tbnbcli send --from test_account1 --to <address of the bridge account> \
--amount 300000000:ETB0819-863 --chain-id=Binance-Chain-Nile \
--node=data-seed-pre-2-s1.binance.org:80 --memo "any note"
TARGET_NETWORK=staging PRIVATE_KEY=<test account private key> ./src/test-services/binanceSend/run.sh <binance bridge address> 3
```
* Check the balances of the test account on both sides of the bridge to see that the funds were transferred properly.
7. Bridge supports changing the list of validators and required voting threshold via voting process, and then keys regeneration.
* (7.0) Obtain information about current epoch, current list validators, upcoming epoch information, bridge state via:
```
./curl http://localhost:$PORT/info
```
Where `$PORT` is specific port for some validator oracle.
The response object contains lots of useful information about current bridge state.
```json5
{
// current epoch number, in which bridge is operating
"epoch": 2,
// next epoch number, for which votes and keygen operations are applied
"nextEpoch": 3,
// threshold number for current epoch,
// threshold + 1 votes are required for any changes in next epoch
"threshold": 1,
// threshold number for next epoch
"nextThreshold": 1,
// current bridge addresses in home and foreign networks
"homeBridgeAddress": "0x44c158FE850821ae69DaF37AADF5c539e9d0025B",
"foreignBridgeAddress": "tbnb19z22khee969yj05dckg9usvmwndkucpyl543xk",
// current set of validators
"validators": [
"0x99Eb3D86663c6Db090eFFdBC20510Ca9f836DCE3",
"0x6352e3e6038e05b9da00C84AE851308f9774F883"
],
// set of validators for the next epoch
"nextValidators": [
"0x99Eb3D86663c6Db090eFFdBC20510Ca9f836DCE3",
"0x6352e3e6038e05b9da00C84AE851308f9774F883",
"0xAa006899B0EC407De930bA8A166DEfe59bBfd3DC"
],
// balances of bridge in both networks
"homeBalance": 50,
"foreignBalanceTokens": 100,
"foreignBalanceNative": 0.0994,
// current bridge status, can be one of: ready, voting, keygen, funds_transfer
"bridgeStatus": "ready",
// current votes count for starting voting, starting/cancelling keygen
// -1 means that enough confirmations are already collected
"votesForVoting": 0,
"votesForKeygen": 0,
"votesForCancelKeygen": 0,
// collected confirmations for changing epoch to nextEpoch
// -1 means that enough confirmations are already collected
"confirmationsForFundsTransfer": 0
}
```
* (7.1) Start voting process for next epoch, via sending `$THRESHOLD + 1` requests to `/vote/startVoting` url. Bridge
state should be successfully changed to `voting`.
* 7.2 Changing next epoch bridge validators / threshold
* (7.2.1) Add / remove validator in next validators list, via sending `$THRESHOLD + 1` requests to
`/vote/addValidator/$ADDRESS` / `/vote/removeValidator/$ADDRESS`.
* (7.2.2) Change threshold for the next epoch, via sending `$THRESHOLD + 1` requests to `/vote/changeThreshold/$THRESHOLD`.
* (7.3) Start keygen process for next epoch, via sending `$THRESHOLD + 1` requests to `/vote/startKeygen` url. Bridge
state should be successfully changed to `keygen`, and in some time to `funds_transfer`, and then to `ready`.
* (7.4) If keygen process at some state was stopped(i. e. one validator turned of his oracle),
it can be cancelled via via sending `$THRESHOLD + 1` requests to `/vote/cancelKeygen` url. After
keygen cancellation, bridge state will return to `voting`, and later it can be restarted manually
once again.
* (6.2) Check the balances of the test account on both sides of the bridge to see that the funds were transferred properly using commands from (5.3), (5.4).
7. Steps for updating validators list are exactly the same for both demo modes. Check the steps from development mode.
### Finish demo
1. Stop all validator instances by pressing `^C` in the terminal.
2. Stop the local testnets:
2. Stop the local testnets (if any):
```
docker kill ganache_home
docker kill ganache_side
docker kill binance-testnet_http-api_1
docker kill binance-testnet_node_1
docker kill binance-testnet_api-server_1
docker kill ethereum-testnet_ganache_home_1
docker kill ethereum-testnet_ganache_side_1
docker kill ethereum-testnet_side-oracle_1
```
3. Remove virtual networks:
3. Remove testnets and validators data:
```
docker network rm blockchain_home
docker network rm blockchain_side
docker network rm validator1_test_network
docker network rm validator2_test_network
docker network rm validator3_test_network
```
4. Remove testnets and validators data:
```
TARGET_NETWORK=<network> ./demo/clean.sh
TARGET_NETWORK=development ./demo/clean.sh
```
#### Testing tools for both sides of the bridge

View File

@ -9,13 +9,31 @@ TARGET_NETWORK=${TARGET_NETWORK:=development}
echo "Cleaning $TARGET_NETWORK network"
docker kill $(docker ps | grep validator[1-3]_ | awk '{print $1}') > /dev/null 2>&1 || true
docker rm $(docker ps -a | grep validator[1-3]_ | awk '{print $1}') > /dev/null 2>&1 || true
docker kill ganache_home ganache_side > /dev/null 2>&1 || true
docker rm ganache_home ganache_side > /dev/null 2>&1 || true
docker kill $(docker ps | grep binance-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker rm $(docker ps -a | grep binance-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker kill $(docker ps | grep ethereum-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker rm $(docker ps -a | grep ethereum-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
if [[ "$TARGET_NETWORK" == "development" ]]; then
rm -rf ganache_side_db
rm -rf ganache_home_db
mkdir ganache_side_db
mkdir ganache_home_db
docker volume rm ganache_side_data > /dev/null 2>&1 || true
docker volume rm ganache_home_data > /dev/null 2>&1 || true
docker volume rm binance_data > /dev/null 2>&1 || true
docker volume rm binance_marketdata > /dev/null 2>&1 || true
docker network rm ethereum_side_rpc_net > /dev/null 2>&1 || true
docker network rm ethereum_home_rpc_net > /dev/null 2>&1 || true
docker network rm binance_net > /dev/null 2>&1 || true
docker network rm binance-testnet_binance_rpc_net > /dev/null 2>&1 || true
fi
docker network rm validator1_test_network > /dev/null 2>&1 || true
docker network rm validator2_test_network > /dev/null 2>&1 || true
docker network rm validator3_test_network > /dev/null 2>&1 || true
for (( I = 1; I < 4; ++I )); do
DIRNAME="validator$I"
rm -rf "$DIRNAME/$TARGET_NETWORK"

View File

@ -1,417 +0,0 @@
ObjC.import('stdlib')
const terminal = Application('Terminal')
const system = Application('System Events')
const curApp = Application.currentApplication()
curApp.includeStandardAdditions = true
const validator1 = '0x99Eb3D86663c6Db090eFFdBC20510Ca9f836DCE3'
const validator2 = '0xAa006899B0EC407De930bA8A166DEfe59bBfd3DC'
const validator3 = '0x6352e3e6038e05b9da00C84AE851308f9774F883'
const userAccounts = [
{
privateKey: '7ed93ad7753e00b52265a73dfbbcd2296256772965323fcb9a6320b5cd084b89',
ethAddress: '0x4db6b4bd0a3fdc03b027a60f1c48f05c572312aa',
bncAddress: 'tbnb14r3z8xk7qsar3vwj05w8cd8gqwk7g6gfurlt5l'
},
{
privateKey: '2ad6e3a232ad3ea058b61352302118a99085600ff8b6eec4ccf0066a33756231',
ethAddress: '0xf7ca4aed1795e424433498cef43f6a3825c88731',
bncAddress: 'tbnb1efjg7xt98t67ql2cmwjc5860lgayet9l8m55ym'
},
{
privateKey: 'eb6dd328677b3fa2822fb8e834507e569bda52e8ffa49266df0f2de239c4ec98',
ethAddress: '0xad6c8127143032d843a260c5d379d8d9b3d51f15',
bncAddress: 'tbnb12epcy4p7ktas0nlyrfuektcyh0e83dwzuq73f4'
}
]
let bridgeBncAddress
const windows = terminal.windows()
const wins = []
function saveBlockchainData () {
console.log('Saving blockchain data')
curApp.doShellScript('zip -r ./demo/ganache_home_backup.zip ./demo/ganache_home_db')
curApp.doShellScript('zip -r ./demo/ganache_side_backup.zip ./demo/ganache_side_db')
}
function reloadBlockchainData () {
console.log('Reloading blockchain data')
curApp.doShellScript('unzip -d . ./demo/ganache_home_backup.zip')
curApp.doShellScript('unzip -d . ./demo/ganache_side_backup.zip')
}
function closeOldWindows () {
for (let i in windows) {
try {
windows[i].selectedTab()
if (windows[i].selectedTab().customTitle().startsWith('Validator') || windows[i].selectedTab().customTitle() === 'Services') {
windows[i].close()
}
} catch (e) {
}
}
}
function killValidators () {
terminal.activate()
for (let i = 0; i < 3; i++) {
wins[i].frontmost = true
delay(0.5)
system.keystroke('c', { using: 'control down' })
}
}
function openNewWindows () {
for (let i = 0; i < 3; i++) {
// open new terminal
const tab = terminal.doScript()
// get opened window
const winId = terminal.windows[0].id()
wins[i] = terminal.windows.byId(winId)
tab.customTitle = `Validator ${i + 1}`
}
wins[0].bounds = { x: 0, y: 23, width: 558, height: 1027 }
wins[1].bounds = { x: 559, y: 374, width: 560, height: 676 }
wins[2].bounds = { x: 1120, y: 374, width: 560, height: 676 }
// open new terminal
const tab = terminal.doScript()
// get opened window
const winId = terminal.windows[0].id()
wins[3] = terminal.windows.byId(winId)
tab.customTitle = `Services`
wins[3].bounds = { x: 559, y: 23, width: 1120, height: 350 }
terminal.activate()
delay(0.5)
}
function apiRequestBackground (url) {
const response = curApp.doShellScript(`curl -s -X GET "${url}"`)
try {
return JSON.parse(response)
} catch (e) {
return response
}
}
function exec (n, script) {
terminal.doScript(script, { in: wins[n - 1] })
}
function wait (n) {
while (wins[n - 1].selectedTab().busy()) {
delay(0.2)
}
}
function execSync (n, script) {
exec(n, script)
wait(n)
}
function waitAll () {
wait(1)
wait(2)
wait(3)
wait(4)
}
function waitLog (n, log) {
do {
const s = wins[n - 1].selectedTab().contents().split('\n').find(x => x.includes(log))
if (s) {
return s
}
delay(0.2)
} while (true)
}
function waitApi (n, url, check) {
do {
const res = apiRequestBackground(`http://localhost:500${n}${url}`)
const checkerRes = check ? check(res) : true
if (checkerRes)
return checkerRes
delay(3)
} while (true)
}
function prefundEthAddresses () {
for (let { ethAddress } of userAccounts) {
execSync(4, `./src/test-services/ethereumSend/run.sh ${ethAddress} 100`)
}
}
function prefundBncAddresses () {
for (let { bncAddress } of userAccounts) {
execSync(4, `./src/test-services/binanceSend/run.sh ${bncAddress} 100 0.1`)
}
}
function initBalances () {
userAccounts.forEach(account => {
account.ethBalance = getEthTokenBalance(account.ethAddress)
account.bncBalance = getBncTokenBalance(account.bncAddress)
})
}
function getBncTokenBalance (address) {
const res = curApp.doShellScript(`./src/test-services/binanceBalance/run.sh ${address}`)
return parseFloat(/KFT-94F: [0-9.]+/.exec(res)[0].split(' ')[1])
}
function waitBncTokenBalance (address, balance) {
while (true) {
const newBalance = getBncTokenBalance(address)
if (Math.abs(newBalance - balance) < 0.0001)
return newBalance
delay(3)
}
}
function getEthTokenBalance (address) {
const res = curApp.doShellScript(`./src/test-services/ethereumBalance/run.sh ${address}`)
return parseFloat(/[0-9.]+ tokens/.exec(res)[0].split(' ')[0])
}
function waitEthTokenBalance (address, balance) {
while (true) {
const newBalance = getEthTokenBalance(address)
if (Math.abs(newBalance - balance) < 0.0001)
return newBalance
delay(3)
}
}
function apiRequest (n, url, suffix) {
execSync(4, `curl -s -X GET http://localhost:500${n}${url} ${suffix ? suffix : ''}`)
}
function printState (msg) {
execSync(4, `echo "${msg}"`)
apiRequest(1, '/info', '| jq .')
}
function initCwd () {
const cwd = $.getenv('PWD')
for (let i = 1; i <= 4; i++) {
exec(i, `cd "${cwd}"`)
}
waitAll()
}
function killDockerContainers () {
execSync(4, `docker kill $(docker ps | grep validator | awk '{print $1}') > /dev/null 2>&1 || true`)
execSync(4, `docker kill ganache_side ganache_home > /dev/null 2>&1 || true`)
}
function clean () {
killDockerContainers()
execSync(4, `./demo/clean.sh`)
exec(1, `clear`)
exec(2, `clear`)
exec(3, `clear`)
waitAll()
}
function testEthToBnc () {
console.log('Testing eth => bnc')
// try token transfer in eth => bnc direction
let prevBridgeHomeBalance
let prevBridgeForeignBalance
waitApi(1, '/info', res => {
prevBridgeHomeBalance = res.homeBalance
prevBridgeForeignBalance = res.foreignBalanceTokens
return true
})
userAccounts.forEach((account, i) => {
execSync(4, `PRIVATE_KEY=${account.privateKey} ./src/test-services/ethereumSend/run.sh bridge ${5 + i}`)
account.ethBalance -= 5 + i
})
const delta = (9 + userAccounts.length) * userAccounts.length / 2
waitApi(1, '/info', res => res.homeBalance === prevBridgeHomeBalance + delta && res.foreignBalanceTokens === prevBridgeForeignBalance - delta)
userAccounts.forEach((account, i) => {
account.bncBalance = waitBncTokenBalance(account.bncAddress, account.bncBalance + 5 + i)
})
printState(`Token transfer in eth => bnc direction succeed`)
console.log('Testing eth => bnc is OK')
}
function testBncToEth () {
console.log('Testing bnc => eth')
// try token transfer in bnc => eth direction
let prevBridgeHomeBalance
let prevBridgeForeignBalance
waitApi(1, '/info', res => {
prevBridgeHomeBalance = res.homeBalance
prevBridgeForeignBalance = res.foreignBalanceTokens
return true
})
userAccounts.forEach((account , i) => {
execSync(4, `PRIVATE_KEY=${account.privateKey} ./src/test-services/binanceSend/run.sh ${bridgeBncAddress} ${3 + i}`)
account.bncBalance -= 3 + i
})
const delta = (5 + userAccounts.length) * userAccounts.length / 2
waitApi(1, '/info', res => res.homeBalance === prevBridgeHomeBalance - delta && res.foreignBalanceTokens === prevBridgeForeignBalance + delta)
userAccounts.forEach((account, i) => {
account.ethBalance = waitEthTokenBalance(account.ethAddress, account.ethBalance + 3 + i)
})
printState(`Token transfer in bnc => eth direction succeed`)
console.log('Testing bnc => eth is OK')
}
function testRemoveValidator () {
console.log('Testing removing validator')
apiRequest(1, '/vote/startVoting')
apiRequest(2, '/vote/startVoting')
waitApi(1, '/info', res => res.bridgeStatus === 'voting')
apiRequest(1, `/vote/removeValidator/${validator2}`)
apiRequest(3, `/vote/removeValidator/${validator2}`)
waitApi(1, '/info', res => res.nextValidators.length === 2)
apiRequest(1, '/vote/startKeygen')
apiRequest(3, '/vote/startKeygen')
waitApi(1, '/info', res => {
if (res.bridgeStatus === 'ready' && res.epoch === 2 && res.validators.length === 2) {
bridgeBncAddress = res.foreignBridgeAddress
return true
}
return false
})
printState(`Removing validator succeed`)
console.log('Testing removing validator is OK')
}
function testAddValidator () {
console.log('Testing adding validator')
apiRequest(1, '/vote/startVoting')
apiRequest(3, '/vote/startVoting')
waitApi(1, '/info', res => res.bridgeStatus === 'voting')
apiRequest(1, `/vote/addValidator/${validator2}`)
apiRequest(3, `/vote/addValidator/${validator2}`)
waitApi(1, '/info', res => res.nextValidators.length === 3)
apiRequest(1, '/vote/startKeygen')
apiRequest(3, '/vote/startKeygen')
waitApi(1, '/info', res => {
if (res.bridgeStatus === 'ready' && res.epoch === 3 && res.validators.length === 3) {
bridgeBncAddress = res.foreignBridgeAddress
return true
}
return false
})
printState(`Adding validator succeed`)
console.log('Testing adding validator is OK')
}
function testChangeThreshold () {
console.log('Testing changing threshold')
apiRequest(1, '/vote/startVoting')
apiRequest(3, '/vote/startVoting')
waitApi(1, '/info', res => res.bridgeStatus === 'voting')
apiRequest(2, `/vote/changeThreshold/2`)
apiRequest(3, `/vote/changeThreshold/2`)
waitApi(1, '/info', res => res.nextThreshold === 2)
apiRequest(1, '/vote/startKeygen')
apiRequest(2, '/vote/startKeygen')
waitApi(1, '/info', res => {
if (res.bridgeStatus === 'ready' && res.epoch === 4 && res.threshold === 2) {
bridgeBncAddress = res.foreignBridgeAddress
return true
}
return false
})
printState(`Changing threshold succeed`)
console.log('Testing changing threshold is OK')
}
function run () {
closeOldWindows()
openNewWindows()
initCwd()
clean()
if ($.getenv('RELOAD') !== 'true') {
execSync(4, `./demo/start-environment.sh`)
prefundEthAddresses()
saveBlockchainData()
} else {
reloadBlockchainData()
execSync(4, `./demo/start-environment.sh`)
}
prefundBncAddresses()
initBalances()
exec(1, `N=1 ./demo/validator-demo.sh`)
exec(2, `N=2 ./demo/validator-demo.sh`)
exec(3, `N=3 ./demo/validator-demo.sh`)
// wait until binance account willl be generated
waitLog(1, 'Generated multisig account in binance chain')
waitApi(1, '/info', res => {
if (res.epoch === 1) {
bridgeBncAddress = res.foreignBridgeAddress
return true
}
return false
})
// prefund binance account
execSync(4, `./src/test-services/binanceSend/run.sh ${bridgeBncAddress} 100 0.1`)
// wait until binance prefund transaction will be processed
waitApi(1, '/info', res => res.foreignBalanceTokens === 100)
printState(`Binance bridge account at ${bridgeBncAddress} for epoch 1 is generated and prefunded`)
testEthToBnc()
testBncToEth()
testRemoveValidator()
testEthToBnc()
testBncToEth()
testAddValidator()
testEthToBnc()
testBncToEth()
testChangeThreshold()
testEthToBnc()
testBncToEth()
console.log('PASSED ALL TESTS')
killValidators()
killDockerContainers()
}

View File

@ -1,5 +0,0 @@
#!/bin/bash
RELOAD=${RELOAD:=false}
RELOAD="$RELOAD" osascript -l JavaScript ./demo/scenarios/macos/main.jxa

View File

@ -0,0 +1,61 @@
#!/bin/bash
set -e
cd $(dirname "$0")
tbnbcli() {
echo 12345678 | docker exec -i binance-testnet_node_1 ./tbnbcli $@ --from node0 --node http://node:26657 --chain-id Binance-Dev --json
}
if [[ "$(docker volume ls | grep binance_data)" ]]; then
echo "Restarting binance test network"
else
echo "Creating new binance test network"
echo "Removing old environment"
docker kill $(docker ps -a | grep binance-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker rm $(docker ps -a | grep binance-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker volume rm binance_marketdata > /dev/null 2>&1 || true
docker network create binance_net > /dev/null 2>&1 || true
docker volume create binance_marketdata > /dev/null 2>&1 || true
docker volume create binance_data > /dev/null 2>&1 || true
need_to_deploy=true
fi
echo "Building required binaries"
docker build -t testnet-binaries ../src/binance-testnet > /dev/null 2>&1 || true
echo "Running environment"
docker-compose -f ../src/binance-testnet/docker-compose.yml up --build -d
if [[ -n "$need_to_deploy" ]]; then
echo "Issuing test asset"
TOKEN_SYMBOL=''
while [[ -z "$TOKEN_SYMBOL" ]]; do
sleep 2
ISSUED_LOG=$(tbnbcli token issue --symbol DEV --total-supply 10000000000000000 --token-name "DEV Token" | jq .Response.log)
TOKEN_SYMBOL=${ISSUED_LOG:(-8):7}
done
echo "Issued $TOKEN_SYMBOL"
sed -i 's/FOREIGN_ASSET=.*$/FOREIGN_ASSET='"$TOKEN_SYMBOL"'/' ../src/test-services/binanceBalance/.env.development
sed -i 's/FOREIGN_ASSET=.*$/FOREIGN_ASSET='"$TOKEN_SYMBOL"'/' ../src/test-services/binanceSend/.env.development
sed -i 's/FOREIGN_ASSET=.*$/FOREIGN_ASSET='"$TOKEN_SYMBOL"'/' ../tests/.env
for file in ./validator*/.env.development; do
sed -i 's/FOREIGN_ASSET=.*$/FOREIGN_ASSET='"$TOKEN_SYMBOL"'/' "$file"
done
sleep 2
echo "Sending tokens to controlled address"
tbnbcli token multi-send \
--transfers '[{"to":"tbnb1z7u9f8mcuwxanns9xa6qgjtlka0d392epc0m9x","amount":"10000000000000000:BNB,10000000000000000:'"$TOKEN_SYMBOL"'"}]'
sleep 2
else
echo "Tokens are already issued, run clean.sh first if you want to redeploy everything"
fi

View File

@ -7,35 +7,14 @@ cd ..
# either development or staging
TARGET_NETWORK=${TARGET_NETWORK:=development}
BLOCK_TIME=${BLOCK_TIME:=3}
DEPLOY_DIR="`pwd`/src/deploy"
TEST_SERVICES_DIR="`pwd`/src/test-services"
DEMO_DIR="`pwd`/demo"
SIDE_GANACHE_DB="$DEMO_DIR/ganache_side_db"
HOME_GANACHE_DB="$DEMO_DIR/ganache_home_db"
start_dev_blockchain_networks() {
echo "Starting side test blockchain"
docker kill ganache_side > /dev/null 2>&1 || true
docker network create blockchain_side > /dev/null 2>&1 || true
docker run -d --network blockchain_side --rm --name ganache_side -v "$SIDE_GANACHE_DB:/app/db" \
-p "7545:8545" \
trufflesuite/ganache-cli:latest \
-m "shrug dwarf easily blade trigger lucky reopen cage lake scatter desk boat" -i 33 -q --db /app/db -b 3 --noVMErrorsOnRPCResponse
echo "Starting home test blockchain"
docker kill ganache_home > /dev/null 2>&1 || true
docker network create blockchain_home > /dev/null 2>&1 || true
docker run -d --network blockchain_home --rm --name ganache_home -v "$HOME_GANACHE_DB:/app/db" \
-p "8545:8545" \
trufflesuite/ganache-cli:latest \
-m "shrug dwarf easily blade trigger lucky reopen cage lake scatter desk boat" -i 44 -q --db /app/db -b 3 --noVMErrorsOnRPCResponse
sleep 4
}
HOME_NETWORK="ethereum_home_rpc_net"
SIDE_NETWORK="ethereum_side_rpc_net"
deploy_token() {
echo "Compiling and deploying erc20"
@ -45,13 +24,13 @@ deploy_token() {
echo "Deploying"
if [[ "$TARGET_NETWORK" == "development" ]]; then
TOKEN_ADDRESS=$(docker run --network blockchain_home --rm -v "$DEPLOY_DIR/deploy-test/build:/build/build" --env-file "$DEPLOY_DIR/deploy-test/.env.$TARGET_NETWORK" \
TOKEN_ADDRESS=$(docker run --network "$HOME_NETWORK" --rm -v "$DEPLOY_DIR/deploy-test/build:/build/build" --env-file "$DEPLOY_DIR/deploy-test/.env.development" \
deploy_test \
--network home 2>&1 \
| grep "contract address" \
| awk '{print $4}')
else
TOKEN_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-test/build:/build/build" --env-file "$DEPLOY_DIR/deploy-test/.env.$TARGET_NETWORK" --env-file "$DEPLOY_DIR/.keys.$TARGET_NETWORK" \
TOKEN_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-test/build:/build/build" --env-file "$DEPLOY_DIR/deploy-test/.env.staging" --env-file "$DEPLOY_DIR/.keys.staging" \
deploy_test \
--network home 2>&1 \
| grep "contract address" \
@ -67,13 +46,13 @@ deploy_bridge() {
echo "Deploying"
if [[ "$TARGET_NETWORK" == "development" ]]; then
BRIDGE_ADDRESS=$(docker run --network blockchain_home --rm -v "$DEPLOY_DIR/deploy-home/build:/build/build" --env-file "$DEPLOY_DIR/deploy-home/.env.$TARGET_NETWORK" \
BRIDGE_ADDRESS=$(docker run --network "$HOME_NETWORK" --rm -v "$DEPLOY_DIR/deploy-home/build:/build/build" --env-file "$DEPLOY_DIR/deploy-home/.env.development" \
deploy_home \
--network home 2>&1 \
| grep "contract address" \
| awk '{print $4}')
else
BRIDGE_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-home/build:/build/build" --env-file "$DEPLOY_DIR/deploy-home/.env.$TARGET_NETWORK" --env-file "$DEPLOY_DIR/.keys.$TARGET_NETWORK" \
BRIDGE_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-home/build:/build/build" --env-file "$DEPLOY_DIR/deploy-home/.env.staging" --env-file "$DEPLOY_DIR/.keys.staging" \
deploy_home \
--network home 2>&1 \
| grep "contract address" \
@ -89,13 +68,13 @@ deploy_db() {
echo "Deploying"
if [[ "$TARGET_NETWORK" == "development" ]]; then
SHARED_DB_ADDRESS=$(docker run --network blockchain_side --rm -v "$DEPLOY_DIR/deploy-side/build:/build/build" --env-file "$DEPLOY_DIR/deploy-side/.env.$TARGET_NETWORK" \
SHARED_DB_ADDRESS=$(docker run --network "$SIDE_NETWORK" --rm -v "$DEPLOY_DIR/deploy-side/build:/build/build" --env-file "$DEPLOY_DIR/deploy-side/.env.development" \
deploy_side \
--network side 2>&1 \
| grep "contract address" \
| awk '{print $4}')
else
SHARED_DB_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-side/build:/build/build" --env-file "$DEPLOY_DIR/deploy-side/.env.$TARGET_NETWORK" --env-file "$DEPLOY_DIR/.keys.$TARGET_NETWORK" \
SHARED_DB_ADDRESS=$(docker run --rm -v "$DEPLOY_DIR/deploy-side/build:/build/build" --env-file "$DEPLOY_DIR/deploy-side/.env.staging" --env-file "$DEPLOY_DIR/.keys.staging" \
deploy_side \
--network side 2>&1 \
| grep "contract address" \
@ -131,33 +110,39 @@ deploy_all() {
sed -i 's/HOME_TOKEN_ADDRESS=.*$/HOME_TOKEN_ADDRESS='"$TOKEN_ADDRESS"'/' "$TEST_SERVICES_DIR/ethereumSend/.env.$TARGET_NETWORK"
}
if [[ "$TARGET_NETWORK" == "development" ]]; then
if [[ ! -d "$SIDE_GANACHE_DB" ]]; then
mkdir "$SIDE_GANACHE_DB"
fi
if [[ ! -d "$HOME_GANACHE_DB" ]]; then
mkdir "$HOME_GANACHE_DB"
fi
if [[ -z "$(ls -A "$SIDE_GANACHE_DB")" ]] || [[ -z "$(ls -A "$HOME_GANACHE_DB")" ]]; then
echo "Starting dev blockchain networks and deploying contracts"
need_to_deploy=true
if [[ "$(docker volume ls | grep ganache_side_data)" ]] || [[ "$(docker volume ls | grep ganache_home_data)" ]]; then
echo "Restarting ethereum test network"
else
echo "Restarting dev blockchain networks"
echo "Creating new ethereum test network"
echo "Removing old environment"
docker kill $(docker ps -a | grep ethereum-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker rm $(docker ps -a | grep ethereum-testnet_ | awk '{print $1}') > /dev/null 2>&1 || true
docker volume rm ganache_side_data > /dev/null 2>&1 || true
docker volume rm ganache_home_data > /dev/null 2>&1 || true
docker network create ethereum_side_rpc_net > /dev/null 2>&1 || true
docker network create ethereum_home_rpc_net > /dev/null 2>&1 || true
docker volume create ganache_side_data > /dev/null 2>&1 || true
docker volume create ganache_home_data > /dev/null 2>&1 || true
need_to_deploy=true
fi
start_dev_blockchain_networks
echo "Starting ethereum test environment"
BLOCK_TIME="$BLOCK_TIME" docker-compose -f ./src/ethereum-testnet/docker-compose.yml up --build -d
sleep 4
if [[ -n "$need_to_deploy" ]]; then
deploy_all
else
echo "Contracts are already deployed, run clean.sh first if you want to redeploy everything"
fi
else
echo "Deploying to the staging blockchain environment"

View File

@ -23,4 +23,4 @@ fi
# load env for particular environment
source ".env.$TARGET_NETWORK"
docker-compose -p "$NAME" -f ../../src/oracle/docker-compose-test.yml up ${DCU_FLAGS}
docker-compose -p "$NAME" -f ../../src/oracle/docker-compose-test.yml up ${DCU_FLAGS} $@

View File

@ -2,13 +2,24 @@ HOME_RPC_URL=http://ganache_home:8545
HOME_BRIDGE_ADDRESS=0x44c158FE850821ae69DaF37AADF5c539e9d0025B
HOME_TOKEN_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
HOME_START_BLOCK=1
HOME_MAX_FETCH_RANGE_SIZE=10
SIDE_RPC_URL=http://ganache_side:8545
SIDE_SHARED_DB_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_URL=http://http-api:8000
FOREIGN_CHAIN_ID=Binance-Dev
FOREIGN_ASSET=DEV-9BA
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
VALIDATOR_PRIVATE_KEY=2be3f252e16541bf1bb2d4a517d2bf173e6d09f2d765d32c64dc50515aec63ea
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5001
SIGN_RESTART_PORT=6001
LOG_LEVEL=debug
KEYGEN_RESTART_PORT=7001
LOG_LEVEL=trace

View File

@ -1,14 +1,25 @@
HOME_RPC_URL=https://kovan.infura.io/v3/5d7bd94c50ed43fab1cb8e74f58678b0
HOME_BRIDGE_ADDRESS=0x6ADCa5e691341fb9de8927d15c0a89B83A4E665e
HOME_TOKEN_ADDRESS=0x57d2533B640cfb58f8f1F69C14c089968Da9fdFc
HOME_START_BLOCK=13276224
HOME_BRIDGE_ADDRESS=0x01eD0d6350542E7643cB7bba4bccc96FedE0B616
HOME_TOKEN_ADDRESS=0x7c7daEf752C80A6d229D4a642B9336ceCd7e26b0
HOME_START_BLOCK=14760000
HOME_MAX_FETCH_RANGE_SIZE=50
SIDE_RPC_URL=https://sokol.poa.network
SIDE_SHARED_DB_ADDRESS=0xda9a1cA2Fcb18cAB02934269369627D2b4ea8902
SIDE_SHARED_DB_ADDRESS=0x7B307e73Ba9808BcA7cf24F1E7Ae9372faCeD102
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
#VALIDATOR_PRIVATE_KEY is taken from .keys
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5001
SIGN_RESTART_PORT=6001
KEYGEN_RESTART_PORT=7001
LOG_LEVEL=info

View File

@ -2,13 +2,24 @@ HOME_RPC_URL=http://ganache_home:8545
HOME_BRIDGE_ADDRESS=0x44c158FE850821ae69DaF37AADF5c539e9d0025B
HOME_TOKEN_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
HOME_START_BLOCK=1
HOME_MAX_FETCH_RANGE_SIZE=10
SIDE_RPC_URL=http://ganache_side:8545
SIDE_SHARED_DB_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_URL=http://http-api:8000
FOREIGN_CHAIN_ID=Binance-Dev
FOREIGN_ASSET=DEV-9BA
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
VALIDATOR_PRIVATE_KEY=e59d58c77b791f98f10187117374ae9c589d48a62720ec6a5e142b0cc134f685
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5002
SIGN_RESTART_PORT=6002
LOG_LEVEL=debug
KEYGEN_RESTART_PORT=7002
LOG_LEVEL=trace

View File

@ -1,14 +1,25 @@
HOME_RPC_URL=https://kovan.infura.io/v3/5d7bd94c50ed43fab1cb8e74f58678b0
HOME_BRIDGE_ADDRESS=0x6ADCa5e691341fb9de8927d15c0a89B83A4E665e
HOME_TOKEN_ADDRESS=0x57d2533B640cfb58f8f1F69C14c089968Da9fdFc
HOME_START_BLOCK=13276224
HOME_BRIDGE_ADDRESS=0x01eD0d6350542E7643cB7bba4bccc96FedE0B616
HOME_TOKEN_ADDRESS=0x7c7daEf752C80A6d229D4a642B9336ceCd7e26b0
HOME_START_BLOCK=14760000
HOME_MAX_FETCH_RANGE_SIZE=50
SIDE_RPC_URL=https://sokol.poa.network
SIDE_SHARED_DB_ADDRESS=0xda9a1cA2Fcb18cAB02934269369627D2b4ea8902
SIDE_SHARED_DB_ADDRESS=0x7B307e73Ba9808BcA7cf24F1E7Ae9372faCeD102
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
#VALIDATOR_PRIVATE_KEY is taken from .keys
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5002
SIGN_RESTART_PORT=6002
KEYGEN_RESTART_PORT=7002
LOG_LEVEL=info

View File

@ -2,13 +2,24 @@ HOME_RPC_URL=http://ganache_home:8545
HOME_BRIDGE_ADDRESS=0x44c158FE850821ae69DaF37AADF5c539e9d0025B
HOME_TOKEN_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
HOME_START_BLOCK=1
HOME_MAX_FETCH_RANGE_SIZE=10
SIDE_RPC_URL=http://ganache_side:8545
SIDE_SHARED_DB_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_URL=http://http-api:8000
FOREIGN_CHAIN_ID=Binance-Dev
FOREIGN_ASSET=DEV-9BA
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
VALIDATOR_PRIVATE_KEY=afaa4d4d6e54d25b0bf0361e3fd6cef562f6311bf6200de2dd0aa4cab63ae3b5
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5003
SIGN_RESTART_PORT=6003
LOG_LEVEL=debug
KEYGEN_RESTART_PORT=7003
LOG_LEVEL=trace

View File

@ -1,14 +1,25 @@
HOME_RPC_URL=https://kovan.infura.io/v3/5d7bd94c50ed43fab1cb8e74f58678b0
HOME_BRIDGE_ADDRESS=0x6ADCa5e691341fb9de8927d15c0a89B83A4E665e
HOME_TOKEN_ADDRESS=0x57d2533B640cfb58f8f1F69C14c089968Da9fdFc
HOME_START_BLOCK=13276224
HOME_BRIDGE_ADDRESS=0x01eD0d6350542E7643cB7bba4bccc96FedE0B616
HOME_TOKEN_ADDRESS=0x7c7daEf752C80A6d229D4a642B9336ceCd7e26b0
HOME_START_BLOCK=14760000
HOME_MAX_FETCH_RANGE_SIZE=50
SIDE_RPC_URL=https://sokol.poa.network
SIDE_SHARED_DB_ADDRESS=0xda9a1cA2Fcb18cAB02934269369627D2b4ea8902
SIDE_SHARED_DB_ADDRESS=0x7B307e73Ba9808BcA7cf24F1E7Ae9372faCeD102
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_CHAIN_ID=Binance-Chain-Nile
FOREIGN_ASSET=KFT-94F
FOREIGN_FETCH_MAX_TIME_INTERVAL=60000
FOREIGN_FETCH_INTERVAL=5000
FOREIGN_FETCH_BLOCK_TIME_OFFSET=10000
SIGN_ATTEMPT_TIMEOUT=120000
SIGN_NONCE_CHECK_INTERVAL=10000
SEND_TIMEOUT=60000
KEYGEN_ATTEMPT_TIMEOUT=120000
KEYGEN_EPOCH_CHECK_INTERVAL=10000
#VALIDATOR_PRIVATE_KEY is taken from .keys
@ -16,4 +27,6 @@ VOTES_PROXY_PORT=5003
SIGN_RESTART_PORT=6003
KEYGEN_RESTART_PORT=7003
LOG_LEVEL=info

14
package.json Normal file
View File

@ -0,0 +1,14 @@
{
"name": "bridge",
"version": "0.0.1",
"devDependencies": {
"eslint": "^6.6.0",
"eslint-config-airbnb": "^18.0.1",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-node": "^10.0.0",
"eslint-plugin-truffle": "0.3.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -0,0 +1,19 @@
FROM ubuntu:19.10
ARG BNC_VERSION=0.6.2
RUN apt-get update && \
apt-get install -y git git-lfs
WORKDIR /binaries
RUN GIT_LFS_SKIP_SMUDGE=1 git clone --depth 1 https://github.com/binance-chain/node-binary.git .
RUN git lfs pull -I fullnode/testnet/${BNC_VERSION}/linux
RUN git lfs pull -I cli/testnet/${BNC_VERSION}/linux
RUN ./fullnode/testnet/${BNC_VERSION}/linux/bnbchaind testnet --acc-prefix tbnb --chain-id Binance-Dev --v 1
RUN sed -i "s/publishTransfer = false/publishTransfer = true/" ./mytestnet/node0/gaiad/config/app.toml && \
sed -i "s/publishLocal = false/publishLocal = true/" ./mytestnet/node0/gaiad/config/app.toml && \
sed -i "s/BEP12Height = 9223372036854775807/BEP12Height = 1/" ./mytestnet/node0/gaiad/config/app.toml

View File

@ -0,0 +1,13 @@
FROM alpine:3.9.4
ARG BNC_VERSION=0.6.2
WORKDIR /api-server
COPY --from=testnet-binaries /binaries/cli/testnet/${BNC_VERSION}/linux/tbnbcli ./
RUN echo 12345678 | ./tbnbcli keys add key --no-backup
EXPOSE 8080
ENTRYPOINT ["./tbnbcli", "api-server", "--chain-id", "Binance-Dev", "--laddr", "tcp://0.0.0.0:8080", "--node"]

View File

@ -0,0 +1,42 @@
version: '3.0'
services:
node:
build: node
image: bnc-testnet
networks:
- binance_rpc_net
ports:
- '26657:26657'
volumes:
- 'binance_marketdata:/root/.bnbchaind/marketdata'
- 'binance_data:/root/.bnbchaind/data'
api-server:
build: api-server
image: bnc-api-server
networks:
- binance_rpc_net
ports:
- '8080:8080'
command: ["http://node:26657"]
http-api:
build: http-api
image: bnc-http-api
environment:
FOREIGN_RPC_URL: 'http://node:26657'
FOREIGN_API_SERVER_URL: 'http://api-server:8080'
networks:
- binance_net
- binance_rpc_net
ports:
- '8000:8000'
volumes:
- 'binance_marketdata:/http-api/marketdata'
networks:
binance_net:
external: true
binance_rpc_net:
volumes:
binance_marketdata:
external: true
binance_data:
external: true

View File

@ -0,0 +1,15 @@
FROM node:10.16.0-alpine
ARG BNC_VERSION=0.6.2
WORKDIR /http-api
COPY --from=testnet-binaries /binaries/cli/testnet/${BNC_VERSION}/linux/tbnbcli ./
COPY ./package.json ./
RUN npm install
COPY ./index.js ./parser.js ./
ENTRYPOINT ["node", "./index.js"]

View File

@ -0,0 +1,163 @@
const { execSync } = require('child_process')
const express = require('express')
const axios = require('axios')
const BN = require('bignumber.js')
const createParser = require('./parser')
const rpcClient = axios.create({
baseURL: process.env.FOREIGN_RPC_URL,
timeout: 10000
})
const apiClient = axios.create({
baseURL: process.env.FOREIGN_API_SERVER_URL,
timeout: 10000
})
const transfers = []
const parser = createParser('/http-api/marketdata/marketdata.json', 20 * 1024)
parser.eventEmitter.on('object', (obj) => {
obj.Transfers.forEach((event) => {
// eslint-disable-next-line no-param-reassign
event.Timestamp = Math.ceil(obj.Timestamp / 10 ** 6)
transfers.push(event)
})
})
const app = express()
app.use('/api/v1/broadcast', (req, res, next) => {
req.rawBody = ''
req.on('data', (chunk) => {
req.rawBody += chunk.toString()
})
req.on('end', () => {
next()
})
})
function wrap(f) {
return async (req, res) => {
try {
await f(req, res)
} catch (e) {
res.status(404).end()
}
}
}
async function handleTx(req, res) {
const {
tx, hash, height, result
} = JSON.parse(
execSync(`./tbnbcli tx ${req.params.hash} --node "http://node:26657" --chain-id Binance-Dev`)
)
res.send({
code: 0,
hash,
height,
log: result.log,
ok: true,
tx
})
}
async function handleTransactions(req, res) {
// eslint-disable-next-line no-empty
while (parser.update()) {}
const {
address, side, txAsset, txType, startTime, endTime
} = req.query
if (txType !== 'TRANSFER' || side !== 'RECEIVE') {
res.status(400).send('Given parameters are not supported')
}
const filtered = transfers.filter((event) => event.Timestamp >= parseInt(startTime, 10)
&& event.Timestamp <= parseInt(endTime, 10)
&& event.To.length === 1
&& event.To[0].Addr === address
&& event.To[0].Coins.length === 1
&& event.To[0].Coins[0].denom === txAsset)
res.send({
tx: filtered.map((tx) => ({
txHash: tx.TxHash,
memo: tx.Memo,
value: new BN(tx.To[0].Coins[0].amount).dividedBy(10 ** 8).toFixed(8, 3)
})),
total: filtered.length
})
}
async function handleTime(req, res) {
const response = (await rpcClient.get('/status')).data
res.send({
ap_time: response.result.sync_info.latest_block_time,
block_time: response.result.sync_info.latest_block_time
})
}
async function handleAccount(req, res) {
const response = (await apiClient.get(`/api/v1/account/${req.params.account}`)).data
res.send(response)
}
async function handleAccountSequence(req, res) {
const response = (await apiClient.get(`/api/v1/account/${req.params.account}`)).data
res.send({ sequence: response.sequence })
}
async function handleNodeInfo(req, res) {
const response = (await rpcClient.get('/status')).data
res.send(response.result)
}
async function handleFees(req, res) {
const response = (await apiClient.get('/api/v1/fees')).data
res.send(response)
}
async function handleBroadcast(req, res) {
if (req.query.sync !== 'true') {
res.status(400).send('Async broadcast is not supported')
} else {
const response = await rpcClient.get('/broadcast_tx_sync', {
params: {
tx: `0x${req.rawBody}`
}
})
if (response.data.error) {
res.status(500).send({
code: 500,
failed_tx_index: 0,
message: 'RPC error -32603 - Internal error: Tx already exists in cache',
success_tx_results: []
})
} else if (response.data.result.code === 65546) {
res.status(400).send({
code: 400,
failed_tx_index: 0,
message: '3417218964BNB < 1000DEV-BA6',
success_tx_results: []
})
} else if (response.data.result) {
res.send([response.data.result])
} else {
res.status(400).end()
}
}
}
app.get('/api/v1/tx/:hash', wrap(handleTx))
app.get('/api/v1/time', wrap(handleTime))
app.get('/api/v1/transactions', wrap(handleTransactions))
app.get('/api/v1/account/:account', wrap(handleAccount))
app.get('/api/v1/account/:account/sequence', wrap(handleAccountSequence))
app.get('/api/v1/node-info', wrap(handleNodeInfo))
app.get('/api/v1/fees', wrap(handleFees))
app.post('/api/v1/broadcast', wrap(handleBroadcast))
app.listen(8000, () => {
// eslint-disable-next-line no-console
console.log('Listening on port 8000')
})

View File

@ -0,0 +1,12 @@
{
"name": "api-server",
"version": "0.0.1",
"dependencies": {
"express": "4.17.1",
"axios": "0.19.0",
"bignumber.js": "9.0.0"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -0,0 +1,44 @@
const events = require('events')
const fs = require('fs')
function createParser(file, bufferSize) {
const buf = Buffer.alloc(bufferSize)
const eventEmitter = new events.EventEmitter()
let fd
let position = 0
let end = 0
return {
update() {
if (!fd) {
try {
fd = fs.openSync(file, 'r')
} catch (e) {
return 0
}
}
const bytesRead = fs.readSync(fd, buf, position, buf.length - position, null)
for (let i = position; i < position + bytesRead; i += 1) {
if (buf[i] === 10) {
const obj = buf.slice(end, i)
end = i + 1
eventEmitter.emit('object', JSON.parse(obj))
}
}
position += bytesRead
if (buf.length - position < bufferSize / 2) {
buf.copy(buf, 0, end, position)
position -= end
end = 0
}
return bytesRead
},
close() {
fs.closeSync(fd)
},
eventEmitter
}
}
module.exports = createParser

View File

@ -0,0 +1,14 @@
FROM alpine:3.9.4
ARG BNC_VERSION=0.6.2
WORKDIR /bnc
COPY --from=testnet-binaries /binaries/fullnode/testnet/${BNC_VERSION}/linux/bnbchaind ./
COPY --from=testnet-binaries /binaries/cli/testnet/${BNC_VERSION}/linux/tbnbcli ./
COPY --from=testnet-binaries /binaries/mytestnet/node0/gaiacli /root/.bnbcli
COPY --from=testnet-binaries /binaries/mytestnet/node0/gaiad /root/.bnbchaind
EXPOSE 26657
ENTRYPOINT ["./bnbchaind", "start"]

11
src/deploy/.eslintrc Normal file
View File

@ -0,0 +1,11 @@
{
"extends": [
"../../.eslintrc"
],
"plugins": [
"truffle"
],
"env": {
"truffle/globals": true
}
}

View File

@ -9,9 +9,11 @@ VALIDATOR_ADDRESS_2=0xaa006899b0ec407de930ba8a166defe59bbfd3dc
VALIDATOR_ADDRESS_3=0x6352e3e6038e05b9da00c84ae851308f9774f883
#VALIDATOR_ADDRESS_4=0x4db6b4bd0a3fdc03b027a60f1c48f05c572312aa
THRESHOLD=1
THRESHOLD=2
MIN_TX_LIMIT=10000000000000000
MAX_TX_LIMIT=100000000000000000000
MAX_TX_LIMIT=1000000000000000000000
BLOCKS_RANGE_SIZE=25
BLOCKS_RANGE_SIZE=10
CLOSE_EPOCH_FLAG=true

View File

@ -3,16 +3,18 @@ HOME_RPC_URL=https://kovan.infura.io/v3/5d7bd94c50ed43fab1cb8e74f58678b0
#HOME_PRIVATE_KEY is taken from src/deploy/.keys.staging
#Set to '0x' for redeployment of token contract in staging environment
HOME_TOKEN_ADDRESS=0x57d2533B640cfb58f8f1F69C14c089968Da9fdFc
HOME_TOKEN_ADDRESS=0x7c7daEf752C80A6d229D4a642B9336ceCd7e26b0
VALIDATOR_ADDRESS_1=0xaaaaB15630f63cA01bb50943AaAb4008CB53748D
VALIDATOR_ADDRESS_2=0xbbbb63D6Fc58bD14dAF9eeF653650c4D10f3dBC8
VALIDATOR_ADDRESS_3=0xcccc27ae510b63E30eC3C68AAD7DdD2578bD62ed
#VALIDATOR_ADDRESS_4=0xdddd9300e32fe162bA420f7313651Fd901C2ed71
THRESHOLD=1
THRESHOLD=2
MIN_TX_LIMIT=10000000000000000
MAX_TX_LIMIT=100000000000000000000
BLOCKS_RANGE_SIZE=25
CLOSE_EPOCH_FLAG=true

View File

@ -0,0 +1,3 @@
{
"extends": "solhint:default"
}

View File

@ -9,8 +9,6 @@ RUN npm install truffle-hdwallet-provider
RUN truffle obtain --solc 0.5.9
COPY truffle-config-build.js /build/truffle-config.js
RUN mkdir temp \
&& cd temp \
&& truffle init \
@ -19,6 +17,7 @@ RUN mkdir temp \
&& cd .. \
&& rm -rf temp
COPY truffle-config-build.js /build/truffle-config.js
COPY contracts/openzeppelin-solidity /build/contracts/openzeppelin-solidity
RUN truffle compile

View File

@ -0,0 +1,174 @@
pragma solidity ^0.5.0;
import './openzeppelin-solidity/contracts/token/ERC20/IERC20.sol';
contract BasicBridge {
uint32 constant UPPER_BOUND = 0xffffffff;
event EpochEnd(uint16 indexed epoch);
event EpochClose(uint16 indexed epoch);
event ForceSign();
event NewEpoch(uint16 indexed oldEpoch, uint16 indexed newEpoch);
event NewEpochCancelled(uint16 indexed epoch);
event NewFundsTransfer(uint16 indexed oldEpoch, uint16 indexed newEpoch);
event EpochStart(uint16 indexed epoch, uint x, uint y);
struct State {
address[] validators;
uint32 startBlock;
uint32 endBlock;
uint32 nonce;
uint16 threshold;
uint16 rangeSize;
bool closeEpoch;
uint x;
uint y;
}
enum Status {
READY, // bridge is in ready to perform operations
CLOSING_EPOCH, // generating transaction for blocking binance side of the bridge
VOTING, // voting for changing in next epoch, but still ready
KEYGEN, //keygen, can be cancelled
FUNDS_TRANSFER // funds transfer, cannot be cancelled
}
mapping(uint16 => State) public states;
Status public status;
uint16 public epoch;
uint16 public nextEpoch;
uint96 minTxLimit;
uint96 maxTxLimit;
IERC20 public tokenContract;
modifier ready {
require(status == Status.READY, "Not in ready state");
_;
}
modifier closingEpoch {
require(status == Status.CLOSING_EPOCH, "Not in closing epoch state");
_;
}
modifier voting {
require(status == Status.VOTING, "Not in voting state");
_;
}
modifier keygen {
require(status == Status.KEYGEN, "Not in keygen state");
_;
}
modifier fundsTransfer {
require(status == Status.FUNDS_TRANSFER, "Not in funds transfer state");
_;
}
function getParties() view public returns (uint16) {
return getParties(epoch);
}
function getNextParties() view public returns (uint16) {
return getParties(nextEpoch);
}
function getParties(uint16 _epoch) view public returns (uint16) {
return uint16(states[_epoch].validators.length);
}
function getThreshold() view public returns (uint16) {
return getThreshold(epoch);
}
function getNextThreshold() view public returns (uint16) {
return getThreshold(nextEpoch);
}
function getThreshold(uint16 _epoch) view public returns (uint16) {
return states[_epoch].threshold;
}
function getStartBlock() view public returns (uint32) {
return getStartBlock(epoch);
}
function getStartBlock(uint16 _epoch) view public returns (uint32) {
return states[_epoch].startBlock;
}
function getRangeSize() view public returns (uint16) {
return getRangeSize(epoch);
}
function getNextRangeSize() view public returns (uint16) {
return getRangeSize(nextEpoch);
}
function getRangeSize(uint16 _epoch) view public returns (uint16) {
return states[_epoch].rangeSize;
}
function getNonce() view public returns (uint32) {
return getNonce(epoch);
}
function getNonce(uint16 _epoch) view public returns (uint32) {
return states[_epoch].nonce;
}
function getX() view public returns (uint) {
return states[epoch].x;
}
function getY() view public returns (uint) {
return states[epoch].y;
}
function getCloseEpoch() view public returns (bool) {
return getCloseEpoch(epoch);
}
function getNextCloseEpoch() view public returns (bool) {
return getCloseEpoch(nextEpoch);
}
function getCloseEpoch(uint16 _epoch) view public returns (bool) {
return states[_epoch].closeEpoch;
}
function getPartyId() view public returns (uint16) {
address[] memory validators = getValidators();
for (uint i = 0; i < getParties(); i++) {
if (validators[i] == msg.sender)
return uint16(i + 1);
}
return 0;
}
function getNextPartyId(address a) view public returns (uint16) {
address[] memory validators = getNextValidators();
for (uint i = 0; i < getNextParties(); i++) {
if (validators[i] == a)
return uint16(i + 1);
}
return 0;
}
function getValidators() view public returns (address[] memory) {
return getValidators(epoch);
}
function getNextValidators() view public returns (address[] memory) {
return getValidators(nextEpoch);
}
function getValidators(uint16 _epoch) view public returns (address[] memory) {
return states[_epoch].validators;
}
}

View File

@ -1,63 +1,23 @@
pragma solidity ^0.5.0;
import './openzeppelin-solidity/contracts/token/ERC20/IERC20.sol';
import "./MessageHandler.sol";
contract Bridge {
event ExchangeRequest(uint value, uint nonce);
event NewEpoch(uint indexed oldEpoch, uint indexed newEpoch);
event NewEpochCancelled(uint indexed epoch);
event NewFundsTransfer(uint indexed oldEpoch, uint indexed newEpoch);
event EpochStart(uint indexed epoch, uint x, uint y);
contract Bridge is MessageHandler {
event ExchangeRequest(uint96 value, uint32 nonce);
struct State {
address[] validators;
uint threshold;
uint rangeSize;
uint startBlock;
uint nonce;
uint x;
uint y;
}
mapping(bytes32 => bool) usedExchangeRanges;
enum Status {
READY, // bridge is in ready to perform operations
VOTING, // voting for changing in next epoch, but still ready
KEYGEN, //keygen, can be cancelled
FUNDS_TRANSFER // funds transfer, cannot be cancelled
}
enum Vote {
CONFIRM_KEYGEN,
CONFIRM_FUNDS_TRANSFER,
START_VOTING,
ADD_VALIDATOR,
REMOVE_VALIDATOR,
CHANGE_THRESHOLD,
CHANGE_RANGE_SIZE,
START_KEYGEN,
CANCEL_KEYGEN,
TRANSFER
}
mapping(uint => State) states;
mapping(bytes32 => uint) public dbTransferCount;
mapping(bytes32 => bool) public dbTransfer;
mapping(bytes32 => uint) public votesCount;
mapping(bytes32 => bool) public votes;
mapping(bytes32 => bool) public usedRange;
Status public status;
uint public epoch;
uint public nextEpoch;
uint minTxLimit;
uint maxTxLimit;
constructor(uint threshold, address[] memory validators, address _tokenContract, uint[2] memory limits, uint rangeSize) public {
constructor(
uint16 threshold,
address[] memory validators,
address _tokenContract,
uint96[2] memory limits,
uint16 rangeSize,
bool closeEpoch
) public {
require(validators.length > 0);
require(threshold < validators.length);
require(threshold <= validators.length);
tokenContract = IERC20(_tokenContract);
@ -65,7 +25,17 @@ contract Bridge {
status = Status.KEYGEN;
nextEpoch = 1;
states[nextEpoch] = State(validators, threshold, rangeSize, 0, uint(-1), 0, 0);
states[nextEpoch] = State({
validators : validators,
threshold : threshold,
rangeSize : rangeSize,
startBlock : 0,
endBlock : UPPER_BOUND,
nonce : UPPER_BOUND,
x : 0,
y : 0,
closeEpoch : closeEpoch
});
minTxLimit = limits[0];
maxTxLimit = limits[1];
@ -73,299 +43,16 @@ contract Bridge {
emit NewEpoch(0, 1);
}
IERC20 public tokenContract;
modifier ready {
require(status == Status.READY, "Not in ready state");
_;
}
modifier readyOrVoting {
require(status == Status.READY || status == Status.VOTING, "Not in ready or voting state");
_;
}
modifier voting {
require(status == Status.VOTING, "Not in voting state");
_;
}
modifier keygen {
require(status == Status.KEYGEN, "Not in keygen state");
_;
}
modifier fundsTransfer {
require(status == Status.FUNDS_TRANSFER, "Not in funds transfer state");
_;
}
modifier currentValidator {
require(getPartyId() != 0, "Not a current validator");
_;
}
function exchange(uint value) public ready {
function exchange(uint96 value) public ready {
require(value >= minTxLimit && value >= 10 ** 10 && value <= maxTxLimit);
uint txRange = (block.number - getStartBlock()) / getRangeSize();
if (!usedRange[keccak256(abi.encodePacked(txRange, epoch))]) {
usedRange[keccak256(abi.encodePacked(txRange, epoch))] = true;
uint32 txRange = (uint32(block.number) - getStartBlock()) / uint32(getRangeSize());
if (!usedExchangeRanges[keccak256(abi.encodePacked(txRange, epoch))]) {
usedExchangeRanges[keccak256(abi.encodePacked(txRange, epoch))] = true;
states[epoch].nonce++;
}
tokenContract.transferFrom(msg.sender, address(this), value);
emit ExchangeRequest(value, getNonce());
}
function transfer(bytes32 hash, address to, uint value) public readyOrVoting currentValidator {
if (tryVote(Vote.TRANSFER, hash, to, value)) {
tokenContract.transfer(to, value);
}
}
function confirmKeygen(uint x, uint y) public keygen {
require(getNextPartyId(msg.sender) != 0, "Not a next validator");
if (tryConfirm(Vote.CONFIRM_KEYGEN, x, y)) {
states[nextEpoch].x = x;
states[nextEpoch].y = y;
if (nextEpoch == 1) {
status = Status.READY;
states[nextEpoch].startBlock = block.number;
states[nextEpoch].nonce = uint(-1);
epoch = nextEpoch;
emit EpochStart(epoch, x, y);
}
else {
status = Status.FUNDS_TRANSFER;
emit NewFundsTransfer(epoch, nextEpoch);
}
}
}
function confirmFundsTransfer() public fundsTransfer currentValidator {
require(epoch > 0, "First epoch does not need funds transfer");
if (tryConfirm(Vote.CONFIRM_FUNDS_TRANSFER)) {
status = Status.READY;
states[nextEpoch].startBlock = block.number;
states[nextEpoch].nonce = uint(-1);
epoch = nextEpoch;
emit EpochStart(epoch, getX(), getY());
}
}
function getParties() view public returns (uint) {
return getParties(epoch);
}
function getNextParties() view public returns (uint) {
return getParties(nextEpoch);
}
function getParties(uint _epoch) view public returns (uint) {
return states[_epoch].validators.length;
}
function getThreshold() view public returns (uint) {
return getThreshold(epoch);
}
function getNextThreshold() view public returns (uint) {
return getThreshold(nextEpoch);
}
function getThreshold(uint _epoch) view public returns (uint) {
return states[_epoch].threshold;
}
function getStartBlock() view public returns (uint) {
return getStartBlock(epoch);
}
function getStartBlock(uint _epoch) view public returns (uint) {
return states[_epoch].startBlock;
}
function getRangeSize() view public returns (uint) {
return getRangeSize(epoch);
}
function getNextRangeSize() view public returns (uint) {
return getRangeSize(nextEpoch);
}
function getRangeSize(uint _epoch) view public returns (uint) {
return states[_epoch].rangeSize;
}
function getNonce() view public returns (uint) {
return getNonce(epoch);
}
function getNonce(uint _epoch) view public returns (uint) {
return states[_epoch].nonce;
}
function getX() view public returns (uint) {
return states[epoch].x;
}
function getY() view public returns (uint) {
return states[epoch].y;
}
function getPartyId() view public returns (uint) {
address[] memory validators = getValidators();
for (uint i = 0; i < getParties(); i++) {
if (validators[i] == msg.sender)
return i + 1;
}
return 0;
}
function getNextPartyId(address a) view public returns (uint) {
address[] memory validators = getNextValidators();
for (uint i = 0; i < getNextParties(); i++) {
if (validators[i] == a)
return i + 1;
}
return 0;
}
function getValidators() view public returns (address[] memory) {
return states[epoch].validators;
}
function getNextValidators() view public returns (address[] memory) {
return states[nextEpoch].validators;
}
function startVoting() public readyOrVoting currentValidator {
if (tryVote(Vote.START_VOTING)) {
nextEpoch++;
status = Status.VOTING;
states[nextEpoch].threshold = getThreshold();
states[nextEpoch].validators = getValidators();
states[nextEpoch].rangeSize = getRangeSize();
}
}
function voteAddValidator(address validator) public voting currentValidator {
require(getNextPartyId(validator) == 0, "Already a validator");
if (tryVote(Vote.ADD_VALIDATOR, validator)) {
states[nextEpoch].validators.push(validator);
}
}
function voteRemoveValidator(address validator) public voting currentValidator {
require(getNextPartyId(validator) != 0, "Already not a validator");
if (tryVote(Vote.REMOVE_VALIDATOR, validator)) {
_removeValidator(validator);
}
}
function _removeValidator(address validator) private {
for (uint i = 0; i < getNextParties() - 1; i++) {
if (states[nextEpoch].validators[i] == validator) {
states[nextEpoch].validators[i] = getNextValidators()[getNextParties() - 1];
break;
}
}
delete states[nextEpoch].validators[getNextParties() - 1];
states[nextEpoch].validators.length--;
}
function voteChangeThreshold(uint threshold) public voting currentValidator {
if (tryVote(Vote.CHANGE_THRESHOLD, threshold)) {
states[nextEpoch].threshold = threshold;
}
}
function voteChangeRangeSize(uint rangeSize) public voting currentValidator {
if (tryVote(Vote.CHANGE_RANGE_SIZE, rangeSize)) {
states[nextEpoch].rangeSize = rangeSize;
}
}
function voteStartKeygen() public voting currentValidator {
if (tryVote(Vote.START_KEYGEN)) {
status = Status.KEYGEN;
emit NewEpoch(epoch, nextEpoch);
}
}
function voteCancelKeygen() public keygen currentValidator {
if (tryVote(Vote.CANCEL_KEYGEN)) {
status = Status.VOTING;
emit NewEpochCancelled(nextEpoch);
}
}
function tryVote(Vote voteType) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, nextEpoch));
return putVote(vote);
}
function tryVote(Vote voteType, address addr) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, nextEpoch, addr));
return putVote(vote);
}
function tryVote(Vote voteType, uint num) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, nextEpoch, num));
return putVote(vote);
}
function tryVote(Vote voteType, bytes32 hash, address to, uint value) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, hash, to, value));
return putVote(vote);
}
function tryConfirm(Vote voteType) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, nextEpoch));
return putConfirm(vote);
}
function tryConfirm(Vote voteType, uint x, uint y) private returns (bool) {
bytes32 vote = keccak256(abi.encodePacked(voteType, nextEpoch, x, y));
return putConfirm(vote);
}
function putVote(bytes32 vote) private returns (bool) {
bytes32 personalVote = personalizeVote(vote);
require(!votes[personalVote], "Voted already");
votes[personalVote] = true;
if (votesCount[vote] == getThreshold()) {
votesCount[vote] = 2 ** 255;
return true;
} else {
votesCount[vote]++;
return false;
}
}
function putConfirm(bytes32 vote) private returns (bool) {
bytes32 personalVote = personalizeVote(vote);
require(!votes[personalVote], "Confirmed already");
votes[personalVote] = true;
if (votesCount[vote] == getNextThreshold()) {
votesCount[vote] = 2 ** 255;
return true;
} else {
votesCount[vote]++;
return false;
}
}
function personalizeVote(bytes32 vote) private view returns (bytes32) {
return keccak256(abi.encodePacked(vote, msg.sender));
}
}

View File

@ -0,0 +1,127 @@
pragma solidity ^0.5.0;
import "./BasicBridge.sol";
contract Government is BasicBridge {
enum Action {
CONFIRM_KEYGEN,
CONFIRM_FUNDS_TRANSFER,
CONFIRM_CLOSE_EPOCH,
VOTE_START_VOTING,
VOTE_ADD_VALIDATOR,
VOTE_REMOVE_VALIDATOR,
VOTE_CHANGE_THRESHOLD,
VOTE_CHANGE_RANGE_SIZE,
VOTE_CHANGE_CLOSE_EPOCH,
VOTE_START_KEYGEN,
VOTE_CANCEL_KEYGEN,
TRANSFER
}
function _confirmKeygen(uint x, uint y) internal keygen {
states[nextEpoch].x = x;
states[nextEpoch].y = y;
states[nextEpoch].nonce = UPPER_BOUND;
if (nextEpoch == 1) {
status = Status.READY;
states[nextEpoch].startBlock = uint32(block.number);
epoch = nextEpoch;
emit EpochStart(epoch, x, y);
}
else {
status = Status.FUNDS_TRANSFER;
emit NewFundsTransfer(epoch, nextEpoch);
}
}
function _confirmFundsTransfer() internal fundsTransfer {
require(epoch > 0, "First epoch does not need funds transfer");
status = Status.READY;
states[nextEpoch].startBlock = uint32(block.number);
epoch = nextEpoch;
emit EpochStart(epoch, getX(), getY());
}
function _confirmCloseEpoch() internal closingEpoch {
status = Status.VOTING;
emit EpochEnd(epoch);
}
function _startVoting() internal ready {
states[epoch].endBlock = uint32(block.number);
nextEpoch++;
states[nextEpoch].threshold = getThreshold();
states[nextEpoch].validators = getValidators();
states[nextEpoch].rangeSize = getRangeSize();
states[nextEpoch].closeEpoch = getCloseEpoch();
if (getCloseEpoch()) {
status = Status.CLOSING_EPOCH;
emit ForceSign();
emit EpochClose(epoch);
} else {
status = Status.VOTING;
emit ForceSign();
emit EpochEnd(epoch);
}
}
function _addValidator(address validator) internal voting {
require(getNextPartyId(validator) == 0, "Already a validator");
states[nextEpoch].validators.push(validator);
}
function _removeValidator(address validator) internal voting {
require(getNextPartyId(validator) != 0, "Already not a validator");
uint16 lastPartyId = getNextParties() - 1;
for (uint i = 0; i < lastPartyId; i++) {
if (states[nextEpoch].validators[i] == validator) {
states[nextEpoch].validators[i] = getNextValidators()[lastPartyId];
break;
}
}
delete states[nextEpoch].validators[lastPartyId];
states[nextEpoch].validators.length--;
}
function _changeThreshold(uint16 threshold) internal voting {
require(threshold > 0, "Invalid threshold value");
states[nextEpoch].threshold = threshold;
}
function _changeRangeSize(uint16 rangeSize) internal voting {
require(rangeSize > 0, "Invalid range size");
states[nextEpoch].rangeSize = rangeSize;
}
function _changeCloseEpoch(bool closeEpoch) internal voting {
states[nextEpoch].closeEpoch = closeEpoch;
}
function _startKeygen() internal voting {
require(getNextThreshold() <= getNextParties(), "Invalid threshold number");
status = Status.KEYGEN;
emit NewEpoch(epoch, nextEpoch);
}
function _cancelKeygen() internal keygen {
status = Status.VOTING;
emit NewEpochCancelled(nextEpoch);
}
function _transfer(address to, uint96 value) internal {
if (tokenContract.balanceOf(address(this)) >= value) {
tokenContract.transfer(to, value);
} else {
tokenContract.approve(to, value);
}
}
}

View File

@ -0,0 +1,39 @@
pragma solidity ^0.5.0;
contract MessageDecoder {
// [0] - action type
// [1,2] - epoch
// [3..] - payload
function _decodeUint16(bytes memory message) pure internal returns (uint16 a) {
assembly {
a := mload(add(message, 5))
}
}
function _decodeBoolean(bytes memory message) pure internal returns (bool a) {
assembly {
a := and(mload(add(message, 4)), 1)
}
}
function _decodeAddress(bytes memory message) pure internal returns (address a) {
assembly {
a := mload(add(message, 23))
}
}
function _decodeKeygen(bytes memory message) pure internal returns (uint a, uint b) {
assembly {
a := mload(add(message, 35))
b := mload(add(message, 67))
}
}
function _decodeTransfer(bytes memory message) pure internal returns (address a, uint96 b) {
assembly {
a := mload(add(message, 55))
b := mload(add(message, 67))
}
}
}

View File

@ -0,0 +1,131 @@
pragma solidity ^0.5.0;
import "./Government.sol";
import "./MessageDecoder.sol";
contract MessageHandler is Government, MessageDecoder {
uint constant SIGNATURE_SIZE = 65;
mapping(bytes32 => bool) public handledMessages;
function applyMessage(bytes memory message, bytes memory signatures) public {
(bytes32 msgHash, uint16 msgEpoch) = checkSignedMessage(message, signatures);
handledMessages[msgHash] = true;
Action msgAction = Action(uint8(message[0]));
if (msgAction == Action.CONFIRM_KEYGEN || msgAction == Action.VOTE_CANCEL_KEYGEN) {
require(msgEpoch == nextEpoch, "Incorrect message epoch");
} else if (msgAction == Action.TRANSFER) {
require(msgEpoch <= epoch, "Incorrect message epoch");
} else {
require(msgEpoch == epoch, "Incorrect message epoch");
}
if (msgAction == Action.CONFIRM_KEYGEN) {
// [3,34] - x, [35,66] - y
require(message.length == 67, "Incorrect message length");
(uint x, uint y) = _decodeKeygen(message);
_confirmKeygen(x, y);
} else if (msgAction == Action.CONFIRM_FUNDS_TRANSFER) {
require(message.length == 3, "Incorrect message length");
_confirmFundsTransfer();
} else if (msgAction == Action.CONFIRM_CLOSE_EPOCH) {
require(message.length == 3, "Incorrect message length");
_confirmCloseEpoch();
} else if (msgAction == Action.VOTE_START_VOTING) {
require(message.length == 3, "Incorrect message length");
_startVoting();
} else if (msgAction == Action.VOTE_ADD_VALIDATOR) {
// [3,22] - address, [23,31] - extra data
require(message.length == 32, "Incorrect message length");
address validator = _decodeAddress(message);
_addValidator(validator);
} else if (msgAction == Action.VOTE_REMOVE_VALIDATOR) {
// [3,22] - address, [23,31] - extra data
require(message.length == 32, "Incorrect message length");
address validator = _decodeAddress(message);
_removeValidator(validator);
} else if (msgAction == Action.VOTE_CHANGE_THRESHOLD) {
// [3,4] - threshold, [5,31] - extra data
require(message.length == 32, "Incorrect message length");
uint16 threshold = _decodeUint16(message);
_changeThreshold(threshold);
} else if (msgAction == Action.VOTE_CHANGE_RANGE_SIZE) {
// [3,4] - rangeSize, [5,31] - extra data
require(message.length == 32, "Incorrect message length");
uint16 rangeSize = _decodeUint16(message);
_changeRangeSize(rangeSize);
} else if (msgAction == Action.VOTE_CHANGE_CLOSE_EPOCH) {
// [3] - closeEpoch, [4,31] - extra data
require(message.length == 32, "Incorrect message length");
bool closeEpoch = _decodeBoolean(message);
_changeCloseEpoch(closeEpoch);
} else if (msgAction == Action.VOTE_START_KEYGEN) {
require(message.length == 3, "Incorrect message length");
_startKeygen();
} else if (msgAction == Action.VOTE_CANCEL_KEYGEN) {
require(message.length == 3, "Incorrect message length");
_cancelKeygen();
} else if (msgAction == Action.TRANSFER) {
// [3,34] - txHash, [35,54] - address, [55,66] - value
require(message.length == 67, "Incorrect message length");
(address to, uint96 value) = _decodeTransfer(message);
_transfer(to, value);
} else {
revert("Unknown message action");
}
}
function checkSignedMessage(bytes memory message, bytes memory signatures) view public returns (bytes32, uint16) {
require(signatures.length % SIGNATURE_SIZE == 0, "Incorrect signatures length");
bytes32 msgHash;
if (message.length == 3) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n3", message));
} else if (message.length == 32) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n32", message));
} else if (message.length == 67) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n67", message));
} else {
revert("Incorrect message length");
}
require(!handledMessages[msgHash], "Tx was already handled");
uint16 msgEpoch;
assembly {
msgEpoch := mload(add(message, 3))
}
require(msgEpoch <= nextEpoch, "Invalid epoch number");
uint signaturesNum = signatures.length / SIGNATURE_SIZE;
require(signaturesNum >= getThreshold(msgEpoch), "Not enough signatures");
address[] memory possibleValidators = getValidators(msgEpoch);
bytes32 r;
bytes32 s;
uint8 v;
for (uint i = 0; i < signaturesNum; i++) {
uint offset = i * SIGNATURE_SIZE;
assembly {
r := mload(add(add(signatures, 32), offset))
s := mload(add(add(signatures, 64), offset))
v := byte(0, mload(add(add(signatures, 96), offset)))
}
address signer = ecrecover(msgHash, v, r, s);
uint j;
for (j = 0; j < possibleValidators.length; j++) {
if (possibleValidators[j] == signer) {
delete possibleValidators[j];
break;
}
}
require(j != possibleValidators.length, "Not a validator signature");
}
return (msgHash, msgEpoch);
}
}

View File

@ -1,20 +1,21 @@
const Bridge = artifacts.require('Bridge')
const addresses = Object.entries(process.env)
.filter(([ key ]) => key.startsWith('VALIDATOR_ADDRESS'))
.map(([ , value ]) => value)
.filter(([key]) => key.startsWith('VALIDATOR_ADDRESS'))
.map(([, value]) => value)
const {
THRESHOLD, HOME_TOKEN_ADDRESS, MIN_TX_LIMIT, MAX_TX_LIMIT, BLOCKS_RANGE_SIZE
THRESHOLD, HOME_TOKEN_ADDRESS, MIN_TX_LIMIT, MAX_TX_LIMIT, BLOCKS_RANGE_SIZE, CLOSE_EPOCH_FLAG
} = process.env
module.exports = deployer => {
module.exports = (deployer) => {
deployer.deploy(
Bridge,
THRESHOLD,
addresses,
HOME_TOKEN_ADDRESS,
[ MIN_TX_LIMIT, MAX_TX_LIMIT ],
BLOCKS_RANGE_SIZE
[MIN_TX_LIMIT, MAX_TX_LIMIT],
BLOCKS_RANGE_SIZE,
CLOSE_EPOCH_FLAG === 'true'
)
}

View File

@ -0,0 +1,3 @@
{
"extends": "solhint:default"
}

View File

@ -9,8 +9,6 @@ RUN npm install truffle-hdwallet-provider
RUN truffle obtain --solc 0.5.9
COPY truffle-config-build.js /build/truffle-config.js
RUN mkdir temp \
&& cd temp \
&& truffle init \
@ -19,6 +17,7 @@ RUN mkdir temp \
&& cd .. \
&& rm -rf temp
COPY truffle-config-build.js /build/truffle-config.js
COPY contracts /build/contracts
RUN truffle compile

View File

@ -0,0 +1,17 @@
pragma solidity ^0.5.0;
contract KeyValueStorage {
mapping(bytes32 => bytes) public db;
function setData(bytes32 id, bytes32 key, bytes memory data) public {
db[encodeKey(msg.sender, id, key)] = data;
}
function getData(address from, bytes32 id, bytes32 key) view public returns (bytes memory) {
return db[encodeKey(from, id, key)];
}
function encodeKey(address sender, bytes32 id, bytes32 key) private pure returns (bytes32 hash) {
return keccak256(abi.encodePacked(sender, id, key));
}
}

View File

@ -1,42 +1,7 @@
pragma solidity ^0.5.0;
contract SharedDB {
mapping(bytes32 => bytes) public db;
mapping(bytes32 => uint) public signupsCount;
mapping(bytes32 => uint) public dbSignups;
import "./KeyValueStorage.sol";
import "./SignedMessageStorage.sol";
import "./SignupStorage.sol";
function signupSign(bytes32 hash) public {
require(dbSignups[keccak256(abi.encodePacked(msg.sender, hash))] == 0, "Already signuped");
dbSignups[keccak256(abi.encodePacked(msg.sender, hash))] = ++signupsCount[hash];
}
function getSignupNumber(bytes32 hash, address[] memory validators, address validator) view public returns (uint) {
if (dbSignups[keccak256(abi.encodePacked(validator, hash))] == 0)
return 0;
uint id = 1;
for (uint i = 0; i < validators.length; i++) {
uint vid = dbSignups[keccak256(abi.encodePacked(validators[i], hash))];
if (vid > 0 && vid < dbSignups[keccak256(abi.encodePacked(validator, hash))])
id++;
}
return id;
}
function getSignupAddress(bytes32 hash, address[] memory validators, uint signupNumber) view public returns (address) {
for (uint i = 0; i < validators.length; i++) {
if (getSignupNumber(hash, validators, validators[i]) == signupNumber) {
return validators[i];
}
}
return address(0);
}
function setData(bytes32 hash, bytes32 key, bytes memory data) public {
db[keccak256(abi.encodePacked(msg.sender, hash, key))] = data;
}
function getData(address from, bytes32 hash, bytes32 key) view public returns (bytes memory) {
return db[keccak256(abi.encodePacked(from, hash, key))];
}
}
contract SharedDB is KeyValueStorage, SignedMessageStorage, SignupStorage {}

View File

@ -0,0 +1,56 @@
pragma solidity ^0.5.0;
contract SignedMessageStorage {
event NewMessage(bytes32 msgHash);
struct SignedMessage {
bytes message;
mapping(address => bytes) signatures;
}
mapping(bytes32 => SignedMessage) public signedMessages;
function addSignature(bytes memory message, bytes memory rsv) public {
require(message.length > 0, "Incorrect message length");
require(rsv.length == 65, "Incorrect signature length");
bytes32 msgHash;
if (message.length == 3) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n3", message));
} else if (message.length == 32) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n32", message));
} else if (message.length == 67) {
msgHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n67", message));
} else {
revert("Incorrect message length");
}
bytes32 r;
bytes32 s;
uint8 v;
assembly {
r := mload(add(rsv, 32))
s := mload(add(rsv, 64))
v := byte(0, mload(add(rsv, 96)))
}
require(ecrecover(msgHash, v, r, s) == msg.sender);
if (signedMessages[msgHash].message.length == 0) {
signedMessages[msgHash].message = message;
emit NewMessage(msgHash);
}
signedMessages[msgHash].signatures[msg.sender] = rsv;
}
function getSignatures(bytes32 msgHash, address[] memory validators) public view returns (bytes memory) {
bytes memory result;
for (uint i = 0; i < validators.length; i++) {
result = abi.encodePacked(result, signedMessages[msgHash].signatures[validators[i]]);
}
return result;
}
}

View File

@ -0,0 +1,52 @@
pragma solidity ^0.5.0;
contract SignupStorage {
struct SignupsCounter {
uint16 count;
mapping(address => uint16) id;
}
mapping(bytes32 => SignupsCounter) public signups;
function signup(bytes32 hash) public {
require(signups[hash].id[msg.sender] == 0, "Already signuped");
signups[hash].id[msg.sender] = ++signups[hash].count;
}
function isSignuped(bytes32 hash) public view returns (bool) {
return isSignuped(hash, msg.sender);
}
function isSignuped(bytes32 hash, address validator) public view returns (bool) {
return signups[hash].id[validator] > 0;
}
function getSignupNumber(
bytes32 hash,
address[] memory validators,
address validator
) view public returns (uint16) {
if (signups[hash].id[validator] == 0)
return 0;
uint16 id = 1;
for (uint i = 0; i < validators.length; i++) {
uint16 vid = signups[hash].id[validators[i]];
if (vid > 0 && vid < signups[hash].id[validator])
id++;
}
return id;
}
function getSignupAddress(
bytes32 hash,
address[] memory validators,
uint16 signupNumber
) view public returns (address) {
for (uint i = 0; i < validators.length; i++) {
if (getSignupNumber(hash, validators, validators[i]) == signupNumber) {
return validators[i];
}
}
return address(0);
}
}

View File

@ -1,5 +1,5 @@
const SharedDB = artifacts.require('SharedDB')
module.exports = deployer => {
module.exports = (deployer) => {
deployer.deploy(SharedDB)
}

View File

@ -9,8 +9,6 @@ RUN npm install truffle-hdwallet-provider
RUN truffle obtain --solc 0.5.9
COPY truffle-config-build.js /build/truffle-config.js
RUN mkdir temp \
&& cd temp \
&& truffle init \
@ -19,6 +17,7 @@ RUN mkdir temp \
&& cd .. \
&& rm -rf temp
COPY truffle-config-build.js /build/truffle-config.js
COPY contracts/openzeppelin-solidity /build/contracts/openzeppelin-solidity
RUN truffle compile

View File

@ -0,0 +1,8 @@
{
"extends": [
"../../.eslintrc"
],
"rules": {
"no-console": 0
}
}

View File

@ -0,0 +1,37 @@
version: '3.0'
services:
ganache_side:
image: trufflesuite/ganache-cli:latest
command: ["-m", "shrug dwarf easily blade trigger lucky reopen cage lake scatter desk boat", "-i", "33", "--db", "/app/db", "-b", "${BLOCK_TIME}", "--noVMErrorsOnRPCResponse"]
networks:
- ethereum_side_rpc_net
ports:
- '7545:8545'
volumes:
- 'ganache_side_data:/app/db'
ganache_home:
image: trufflesuite/ganache-cli:latest
command: ["-m", "shrug dwarf easily blade trigger lucky reopen cage lake scatter desk boat", "-i", "44", "--db", "/app/db", "-b", "${BLOCK_TIME}", "--noVMErrorsOnRPCResponse"]
networks:
- ethereum_home_rpc_net
ports:
- '8545:8545'
volumes:
- 'ganache_home_data:/app/db'
side-oracle:
build: side-oracle
image: side-oracle
env_file: side-oracle/.env.development
networks:
- ethereum_side_rpc_net
- ethereum_home_rpc_net
networks:
ethereum_side_rpc_net:
external: true
ethereum_home_rpc_net:
external: true
volumes:
ganache_side_data:
external: true
ganache_home_data:
external: true

View File

@ -0,0 +1,9 @@
HOME_RPC_URL=http://ganache_home:8545
HOME_BRIDGE_ADDRESS=0x44c158FE850821ae69DaF37AADF5c539e9d0025B
HOME_PRIVATE_KEY=fd5c416a8d497a343d4ee1ac7a8f407450ae115b0001cdd6f2dad715baa3bc25
SIDE_RPC_URL=http://ganache_side:8545
SIDE_SHARED_DB_ADDRESS=0xd5fE0D28e058D375b0b038fFbB446Da37E85fFdc
SIDE_MAX_FETCH_RANGE_SIZE=100

View File

@ -0,0 +1,14 @@
FROM node:10.16.0-alpine
WORKDIR /side-oracle
RUN apk update && \
apk add libssl1.1 libressl-dev curl
COPY ./package.json /side-oracle/
RUN npm install
COPY ./index.js ./
ENTRYPOINT ["node", "index.js"]

View File

@ -0,0 +1,117 @@
const ethers = require('ethers')
const {
HOME_PRIVATE_KEY, HOME_RPC_URL, HOME_BRIDGE_ADDRESS, SIDE_RPC_URL, SIDE_SHARED_DB_ADDRESS
} = process.env
const SIDE_MAX_FETCH_RANGE_SIZE = parseInt(process.env.SIDE_MAX_FETCH_RANGE_SIZE, 10)
const bridgeAbi = [
'function applyMessage(bytes message, bytes signatures)',
'function getThreshold(uint16 epoch) view returns (uint16)',
'function getValidators(uint16 epoch) view returns (address[])'
]
const sharedDbAbi = [
'event NewMessage(bytes32 msgHash)',
'function signedMessages(bytes32 hash) view returns (bytes)',
'function getSignatures(bytes32 msgHash, address[] validators) view returns (bytes)'
]
let homeProvider
let sideProvider
let bridge
let sharedDb
let homeWallet
let nonce
let blockNumber = 0
async function delay(ms) {
await new Promise((res) => setTimeout(res, ms))
}
async function handleNewMessage(event) {
const { msgHash } = event.values
const message = await sharedDb.signedMessages(msgHash)
const epoch = parseInt(message.slice(4, 8), 16)
const [threshold, validators] = await Promise.all([
bridge.getThreshold(epoch),
bridge.getValidators(epoch)
])
while (true) {
const signatures = await sharedDb.getSignatures(msgHash, validators)
if (signatures.length === 2) {
console.log('Skipping event')
break
}
if ((signatures.length - 2) / 130 >= threshold) {
console.log('Sending applyMessage request')
const tx = await bridge.applyMessage(message, signatures, {
gasLimit: 1000000,
nonce
})
const receipt = await tx.wait()
console.log(`Used gas: ${receipt.gasUsed.toNumber()}`)
nonce += 1
break
}
}
}
async function initialize() {
while (true) {
try {
sideProvider = new ethers.providers.JsonRpcProvider(SIDE_RPC_URL)
homeProvider = new ethers.providers.JsonRpcProvider(HOME_RPC_URL)
homeWallet = new ethers.Wallet(HOME_PRIVATE_KEY, homeProvider)
bridge = new ethers.Contract(HOME_BRIDGE_ADDRESS, bridgeAbi, homeWallet)
sharedDb = new ethers.Contract(SIDE_SHARED_DB_ADDRESS, sharedDbAbi, sideProvider)
nonce = await homeWallet.getTransactionCount()
break
} catch (e) {
console.log('Cannot create providers')
await delay(1000)
}
}
}
async function loop() {
const latestBlockNumber = await sideProvider.getBlockNumber()
if (latestBlockNumber < blockNumber) {
console.log(`No block after ${latestBlockNumber}`)
return
}
const endBlock = Math.min(latestBlockNumber, blockNumber + SIDE_MAX_FETCH_RANGE_SIZE - 1)
console.log(`Watching events in blocks #${blockNumber}-${endBlock}`)
const bridgeEvents = (await sideProvider.getLogs({
address: SIDE_SHARED_DB_ADDRESS,
fromBlock: blockNumber,
toBlock: endBlock,
topics: [
sharedDb.interface.events.NewMessage.topic
]
}))
for (let i = 0; i < bridgeEvents.length; i += 1) {
const event = sharedDb.interface.parseLog(bridgeEvents[i])
console.log('Consumed event', event)
await handleNewMessage(event)
}
blockNumber = endBlock + 1
}
async function main() {
await initialize()
while (true) {
await delay(2000)
await loop()
}
}
main()

View File

@ -0,0 +1,10 @@
{
"name": "side-oracle",
"version": "0.0.1",
"dependencies": {
"ethers": "4.0.33"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -9,6 +9,6 @@ COPY ./bncWatcher/package.json /watcher/
RUN npm install
COPY ./bncWatcher/bncWatcher.js ./shared/db.js ./shared/logger.js ./shared/crypto.js /watcher/
COPY ./bncWatcher/bncWatcher.js ./shared/db.js ./shared/logger.js ./shared/crypto.js ./shared/amqp.js ./shared/wait.js /watcher/
ENTRYPOINT ["node", "bncWatcher.js"]

View File

@ -6,90 +6,175 @@ const { computeAddress } = require('ethers').utils
const logger = require('./logger')
const redis = require('./db')
const { publicKeyToAddress } = require('./crypto')
const { delay, retry } = require('./wait')
const { connectRabbit, assertQueue } = require('./amqp')
const { FOREIGN_URL, PROXY_URL, FOREIGN_ASSET } = process.env
const {
FOREIGN_URL, PROXY_URL, FOREIGN_ASSET, RABBITMQ_URL
} = process.env
const FOREIGN_FETCH_INTERVAL = parseInt(process.env.FOREIGN_FETCH_INTERVAL, 10)
const FOREIGN_FETCH_BLOCK_TIME_OFFSET = parseInt(process.env.FOREIGN_FETCH_BLOCK_TIME_OFFSET, 10)
const FOREIGN_FETCH_MAX_TIME_INTERVAL = parseInt(process.env.FOREIGN_FETCH_MAX_TIME_INTERVAL, 10)
const foreignHttpClient = axios.create({ baseURL: FOREIGN_URL })
const proxyHttpClient = axios.create({ baseURL: PROXY_URL })
async function initialize () {
if (await redis.get('foreignTime') === null) {
logger.info('Set default foreign time')
await redis.set('foreignTime', Date.now() - 2 * 30 * 24 * 60 * 60 * 1000)
let channel
let epochTimeIntervalsQueue
function getForeignAddress(epoch) {
const keysFile = `/keys/keys${epoch}.store`
try {
const publicKey = JSON.parse(fs.readFileSync(keysFile))[5]
return publicKeyToAddress(publicKey)
} catch (e) {
return null
}
}
async function main () {
const newTransactions = await fetchNewTransactions()
if (newTransactions === null || newTransactions.length === 0) {
await new Promise(r => setTimeout(r, 5000))
return
}
if (newTransactions.length)
logger.info(`Found ${newTransactions.length} new transactions`)
else
logger.debug(`Found 0 new transactions`)
for (const tx of newTransactions.reverse()) {
if (tx.memo !== 'funding') {
const publicKeyEncoded = (await getTx(tx.txHash)).signatures[0].pub_key.value
await proxyHttpClient
.post('/transfer', {
to: computeAddress(Buffer.from(publicKeyEncoded, 'base64')),
value: new BN(tx.value).multipliedBy(10 ** 18).integerValue(),
hash: `0x${tx.txHash}`
})
}
await redis.set('foreignTime', Date.parse(tx.timeStamp))
}
}
function getTx(hash) {
return foreignHttpClient
.get(`/api/v1/tx/${hash}`, {
async function getTx(hash) {
const response = await retry(() => foreignHttpClient.get(
`/api/v1/tx/${hash}`,
{
params: {
format: 'json'
}
})
.then(res => res.data.tx.value)
.catch(() => getTx(hash))
}
))
return response.data.tx.value
}
async function fetchNewTransactions () {
async function getBlockTime() {
const response = await retry(() => foreignHttpClient.get('/api/v1/time'))
return Date.parse(response.data.block_time) - FOREIGN_FETCH_BLOCK_TIME_OFFSET
}
async function fetchNewTransactions(address, startTime, endTime) {
logger.debug('Fetching new transactions')
const startTime = parseInt(await redis.get('foreignTime')) + 1
const address = getLastForeignAddress()
if (address === null)
return null
logger.debug('Sending api transactions request')
return foreignHttpClient
.get('/api/v1/transactions', {
params: {
address,
side: 'RECEIVE',
txAsset: FOREIGN_ASSET,
txType: 'TRANSFER',
startTime,
endTime: startTime + 3 * 30 * 24 * 60 * 60 * 1000,
}
})
.then(res => res.data.tx)
.catch(() => fetchNewTransactions())
}
function getLastForeignAddress () {
const epoch = Math.max(0, ...fs.readdirSync('/keys').map(x => parseInt(x.split('.')[0].substr(4))))
if (epoch === 0)
return null
const keysFile = `/keys/keys${epoch}.store`
const publicKey = JSON.parse(fs.readFileSync(keysFile))[5]
return publicKeyToAddress(publicKey)
}
initialize().then(async () => {
while (true) {
await main()
const params = {
address,
side: 'RECEIVE',
txAsset: FOREIGN_ASSET,
txType: 'TRANSFER',
startTime,
endTime
}
})
logger.trace('Transactions fetch params %o', params)
return (
await retry(() => foreignHttpClient.get('/api/v1/transactions', { params }))
).data.tx
}
async function fetchTimeIntervalsQueue() {
let epoch = null
let startTime = null
let endTime = null
const lastBncBlockTime = await getBlockTime()
logger.trace(`Binance last block timestamp ${lastBncBlockTime}`)
while (true) {
const msg = await epochTimeIntervalsQueue.get()
if (msg === false) {
break
}
const data = JSON.parse(msg.content)
let accept = false
logger.trace('Consumed time interval event %o', data)
if (epoch !== null && epoch !== data.epoch) {
logger.warn('Two consequently events have different epochs, should not be like this')
channel.nack(msg, false, true)
break
}
if (data.startTime) {
logger.trace('Set foreign time', data)
await redis.set(`foreignTime${data.epoch}`, data.startTime)
channel.ack(msg)
break
}
if (epoch === null) {
accept = true
epoch = data.epoch
startTime = await redis.get(`foreignTime${epoch}`)
logger.trace(`Retrieved epoch ${epoch} and start time ${startTime} from redis`)
if (startTime === null) {
logger.warn(`Empty foreign time for epoch ${epoch}`)
}
}
if ((data.prolongedTime - startTime < FOREIGN_FETCH_MAX_TIME_INTERVAL || accept)
&& data.prolongedTime < lastBncBlockTime) {
endTime = data.prolongedTime
channel.ack(msg)
} else {
logger.trace('Requeuing current queue message')
channel.nack(msg, false, true)
break
}
}
return {
epoch,
startTime,
endTime
}
}
async function initialize() {
channel = await connectRabbit(RABBITMQ_URL)
logger.info('Connecting to epoch time intervals queue')
epochTimeIntervalsQueue = await assertQueue(channel, 'epochTimeIntervalsQueue')
}
async function loop() {
const { epoch, startTime, endTime } = await fetchTimeIntervalsQueue()
if (!startTime || !endTime) {
logger.debug('Nothing to fetch')
await delay(FOREIGN_FETCH_INTERVAL)
return
}
const address = getForeignAddress(epoch)
if (!address) {
logger.debug('Validator is not included in current epoch')
await redis.set(`foreignTime${epoch}`, endTime)
await delay(FOREIGN_FETCH_INTERVAL)
return
}
const transactions = await fetchNewTransactions(address, startTime, endTime)
if (transactions.length === 0) {
logger.debug('Found 0 new transactions')
await redis.set(`foreignTime${epoch}`, endTime)
await delay(FOREIGN_FETCH_INTERVAL)
return
}
logger.info(`Found ${transactions.length} new transactions`)
logger.trace('%o', transactions)
for (let i = transactions.length - 1; i >= 0; i -= 1) {
const tx = transactions[i]
if (tx.memo === '') {
const publicKeyEncoded = (await getTx(tx.txHash)).signatures[0].pub_key.value
await proxyHttpClient.post('/transfer', {
to: computeAddress(Buffer.from(publicKeyEncoded, 'base64')),
value: new BN(tx.value).multipliedBy(10 ** 18).toString(16),
hash: tx.txHash,
epoch
})
}
}
await redis.set(`foreignTime${epoch}`, endTime)
}
async function main() {
await initialize()
while (true) {
await loop()
}
}
main()

View File

@ -2,6 +2,7 @@
"name": "watcher",
"version": "0.0.1",
"dependencies": {
"amqplib": "0.5.3",
"ioredis": "4.10.0",
"axios": "0.19.0",
"bech32": "1.1.3",
@ -9,6 +10,9 @@
"ethers": "4.0.33",
"pino": "5.13.4",
"pino-pretty": "3.2.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -9,26 +9,23 @@ services:
- HOME_RPC_URL
- HOME_BRIDGE_ADDRESS
- HOME_TOKEN_ADDRESS
- HOME_CHAIN_ID
- SIDE_RPC_URL
- SIDE_SHARED_DB_ADDRESS
- SIDE_CHAIN_ID
- VALIDATOR_PRIVATE_KEY
- FOREIGN_URL
- FOREIGN_ASSET
- LOG_LEVEL
- "GAS_LIMIT_FACTOR=3"
- "MAX_GAS_LIMIT=6000000"
volumes:
- '../deploy/deploy-test/build/contracts/IERC20.json:/proxy/contracts_data/IERC20.json'
- '../deploy/deploy-home/build/contracts/Bridge.json:/proxy/contracts_data/Bridge.json'
- '../deploy/deploy-side/build/contracts/SharedDB.json:/proxy/contracts_data/SharedDB.json'
- 'GAS_LIMIT_FACTOR=3'
- 'MAX_GAS_LIMIT=6000000'
ports:
- '${VOTES_PROXY_PORT}:8002'
networks:
- test_network
- blockchain_home
- blockchain_side
test_network:
aliases:
- local_proxy
ethereum_home_rpc_net:
ethereum_side_rpc_net:
binance_net:
keygen:
image: keygen-client
build:
@ -36,10 +33,14 @@ services:
context: .
environment:
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- 'PROXY_URL=http://proxy:8001'
- 'PROXY_URL=http://local_proxy:8001'
- KEYGEN_ATTEMPT_TIMEOUT
- KEYGEN_EPOCH_CHECK_INTERVAL
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'
ports:
- '${KEYGEN_RESTART_PORT}:8001'
networks:
- test_network
signer:
@ -49,10 +50,13 @@ services:
context: .
environment:
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- 'PROXY_URL=http://proxy:8001'
- 'PROXY_URL=http://local_proxy:8001'
- FOREIGN_CHAIN_ID
- FOREIGN_URL
- FOREIGN_ASSET
- SIGN_ATTEMPT_TIMEOUT
- SIGN_NONCE_CHECK_INTERVAL
- SEND_TIMEOUT
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'
@ -60,12 +64,14 @@ services:
- '${SIGN_RESTART_PORT}:8001'
networks:
- test_network
- binance_net
redis:
image: redis:5.0.5-alpine
image: redis
build:
dockerfile: redis/Dockerfile
context: .
volumes:
- '${PWD}/${TARGET_NETWORK}/db:/data'
- './configs/redis.conf:/usr/local/etc/redis/redis.conf'
command: ["redis-server", "/usr/local/etc/redis/redis.conf"]
networks:
- test_network
rabbitmq:
@ -86,17 +92,15 @@ services:
- HOME_RPC_URL
- HOME_BRIDGE_ADDRESS
- HOME_TOKEN_ADDRESS
- HOME_CHAIN_ID
- HOME_START_BLOCK
- BLOCKS_RANGE_SIZE
- VALIDATOR_PRIVATE_KEY
- HOME_MAX_FETCH_RANGE_SIZE
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- LOG_LEVEL
volumes:
- '../deploy/deploy-home/build/contracts/Bridge.json:/watcher/contracts_data/Bridge.json'
networks:
- test_network
- blockchain_home
- ethereum_home_rpc_net
bnc-watcher:
build:
dockerfile: bncWatcher/Dockerfile
@ -106,15 +110,21 @@ services:
- FOREIGN_URL
- FOREIGN_ASSET
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- 'PROXY_URL=http://proxy:8001'
- 'PROXY_URL=http://local_proxy:8001'
- FOREIGN_FETCH_MAX_TIME_INTERVAL
- FOREIGN_FETCH_INTERVAL
- FOREIGN_FETCH_BLOCK_TIME_OFFSET
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'
networks:
- test_network
- binance_net
networks:
test_network:
blockchain_side:
ethereum_side_rpc_net:
external: true
blockchain_home:
ethereum_home_rpc_net:
external: true
binance_net:
external: true

View File

@ -9,20 +9,14 @@ services:
- HOME_RPC_URL
- HOME_BRIDGE_ADDRESS
- HOME_TOKEN_ADDRESS
- HOME_CHAIN_ID
- SIDE_RPC_URL
- SIDE_SHARED_DB_ADDRESS
- SIDE_CHAIN_ID
- VALIDATOR_PRIVATE_KEY
- FOREIGN_URL
- FOREIGN_ASSET
- LOG_LEVEL
- "GAS_LIMIT_FACTOR=3"
- "MAX_GAS_LIMIT=6000000"
volumes:
- '../deploy/deploy-test/build/contracts/IERC20.json:/proxy/contracts_data/IERC20.json'
- '../deploy/deploy-home/build/contracts/Bridge.json:/proxy/contracts_data/Bridge.json'
- '../deploy/deploy-side/build/contracts/SharedDB.json:/proxy/contracts_data/SharedDB.json'
- 'GAS_LIMIT_FACTOR=3'
- 'MAX_GAS_LIMIT=6000000'
ports:
- '${VOTES_PROXY_PORT}:8002'
networks:
@ -37,9 +31,13 @@ services:
environment:
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- 'PROXY_URL=http://proxy:8001'
- KEYGEN_ATTEMPT_TIMEOUT
- KEYGEN_EPOCH_CHECK_INTERVAL
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'
ports:
- '${KEYGEN_RESTART_PORT}:8001'
networks:
- keygen-proxy-net
- rabbit-keygen-net
@ -55,6 +53,9 @@ services:
- FOREIGN_CHAIN_ID
- FOREIGN_URL
- FOREIGN_ASSET
- SIGN_ATTEMPT_TIMEOUT
- SIGN_NONCE_CHECK_INTERVAL
- SEND_TIMEOUT
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'
@ -65,11 +66,12 @@ services:
- rabbit-signer-net
- redis-signer-net
redis:
image: redis:5.0.5-alpine
image: redis
build:
dockerfile: redis/Dockerfile
context: .
volumes:
- '${PWD}/${TARGET_NETWORK}/db:/data'
- './configs/redis.conf:/usr/local/etc/redis/redis.conf'
command: ["redis-server", "/usr/local/etc/redis/redis.conf"]
networks:
- redis-signer-net
- redis-keygen-net
@ -96,13 +98,11 @@ services:
- HOME_RPC_URL
- HOME_BRIDGE_ADDRESS
- HOME_TOKEN_ADDRESS
- HOME_CHAIN_ID
- HOME_START_BLOCK
- VALIDATOR_PRIVATE_KEY
- HOME_MAX_FETCH_RANGE_SIZE
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- LOG_LEVEL
volumes:
- '../deploy/deploy-home/build/contracts/Bridge.json:/watcher/contracts_data/Bridge.json'
networks:
- rabbit-ethwatcher-net
- redis-ethwatcher-net
@ -116,6 +116,9 @@ services:
- FOREIGN_ASSET
- 'RABBITMQ_URL=amqp://rabbitmq:5672'
- 'PROXY_URL=http://proxy:8001'
- FOREIGN_FETCH_MAX_TIME_INTERVAL
- FOREIGN_FETCH_INTERVAL
- FOREIGN_FETCH_BLOCK_TIME_OFFSET
- LOG_LEVEL
volumes:
- '${PWD}/${TARGET_NETWORK}/keys:/keys'

View File

@ -9,6 +9,6 @@ COPY ./ethWatcher/package.json /watcher/
RUN npm install
COPY ./ethWatcher/ethWatcher.js ./shared/db.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js /watcher/
COPY ./ethWatcher/ethWatcher.js ./shared/db.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js ./shared/wait.js /watcher/
ENTRYPOINT ["node", "ethWatcher.js"]

View File

@ -1,5 +1,4 @@
const Web3 = require('web3')
const utils = require('ethers').utils
const ethers = require('ethers')
const BN = require('bignumber.js')
const axios = require('axios')
@ -7,39 +6,63 @@ const logger = require('./logger')
const redis = require('./db')
const { connectRabbit, assertQueue } = require('./amqp')
const { publicKeyToAddress } = require('./crypto')
const { delay, retry } = require('./wait')
const abiBridge = require('./contracts_data/Bridge.json').abi
const {
HOME_RPC_URL, HOME_BRIDGE_ADDRESS, RABBITMQ_URL, HOME_START_BLOCK, VALIDATOR_PRIVATE_KEY
} = process.env
const HOME_MAX_FETCH_RANGE_SIZE = parseInt(process.env.HOME_MAX_FETCH_RANGE_SIZE, 10)
const { HOME_RPC_URL, HOME_BRIDGE_ADDRESS, RABBITMQ_URL, HOME_START_BLOCK, VALIDATOR_PRIVATE_KEY } = process.env
const homeWeb3 = new Web3(HOME_RPC_URL)
const bridge = new homeWeb3.eth.Contract(abiBridge, HOME_BRIDGE_ADDRESS)
const validatorAddress = homeWeb3.eth.accounts.privateKeyToAccount(`0x${VALIDATOR_PRIVATE_KEY}`).address
const provider = new ethers.providers.JsonRpcProvider(HOME_RPC_URL)
const bridgeAbi = [
'event ExchangeRequest(uint96 value, uint32 nonce)',
'event EpochEnd(uint16 indexed epoch)',
'event NewEpoch(uint16 indexed oldEpoch, uint16 indexed newEpoch)',
'event NewEpochCancelled(uint16 indexed epoch)',
'event NewFundsTransfer(uint16 indexed oldEpoch, uint16 indexed newEpoch)',
'event EpochStart(uint16 indexed epoch, uint256 x, uint256 y)',
'event EpochClose(uint16 indexed epoch)',
'event ForceSign()',
'function getThreshold(uint16 epoch) view returns (uint16)',
'function getParties(uint16 epoch) view returns (uint16)',
'function getRangeSize(uint16 epoch) view returns (uint16)',
'function getValidators(uint16 epoch) view returns (address[])'
]
const bridge = new ethers.Contract(HOME_BRIDGE_ADDRESS, bridgeAbi, provider)
const validatorAddress = ethers.utils.computeAddress(`0x${VALIDATOR_PRIVATE_KEY}`)
const foreignNonce = []
let channel
let exchangeQueue
let signQueue
let keygenQueue
let cancelKeygenQueue
let epochTimeIntervalsQueue
let chainId
let blockNumber
let foreignNonce = []
let epoch
let epochStart
let redisTx
let rangeSize
let lastTransactionBlockNumber
let isCurrentValidator
let activeEpoch
async function resetFutureMessages (queue) {
async function getBlockTimestamp(n) {
return (await provider.getBlock(n, false)).timestamp
}
async function resetFutureMessages(queue) {
logger.debug(`Resetting future messages in queue ${queue.name}`)
const { messageCount } = await channel.checkQueue(queue.name)
if (messageCount) {
logger.info(`Filtering ${messageCount} reloaded messages from queue ${queue.name}`)
const backup = await assertQueue(channel, `${queue.name}.backup`)
do {
while (true) {
const message = await queue.get()
if (message === false)
if (message === false) {
break
}
const data = JSON.parse(message.content)
if (data.blockNumber < blockNumber) {
logger.debug('Saving message %o', data)
@ -48,173 +71,76 @@ async function resetFutureMessages (queue) {
logger.debug('Dropping message %o', data)
}
channel.ack(message)
} while (true)
}
logger.debug('Dropped messages came from future')
do {
while (true) {
const message = await backup.get()
if (message === false)
if (message === false) {
break
}
const data = JSON.parse(message.content)
logger.debug('Requeuing message %o', data)
queue.send(data)
channel.ack(message)
} while (true)
}
logger.debug('Redirected messages back to initial queue')
}
}
async function initialize () {
channel = await connectRabbit(RABBITMQ_URL)
exchangeQueue = await assertQueue(channel, 'exchangeQueue')
signQueue = await assertQueue(channel, 'signQueue')
keygenQueue = await assertQueue(channel, 'keygenQueue')
cancelKeygenQueue = await assertQueue(channel, 'cancelKeygenQueue')
const events = await bridge.getPastEvents('EpochStart', {
fromBlock: 1
})
epoch = events.length ? events[events.length - 1].returnValues.epoch.toNumber() : 0
logger.info(`Current epoch ${epoch}`)
epochStart = events.length ? events[events.length - 1].blockNumber : 1
const saved = (parseInt(await redis.get('homeBlock')) + 1) || parseInt(HOME_START_BLOCK)
if (epochStart > saved) {
logger.info(`Data in db is outdated, starting from epoch ${epoch}, block #${epochStart}`)
blockNumber = epochStart
rangeSize = (await bridge.methods.getRangeSize().call()).toNumber()
await redis.multi()
.set('homeBlock', blockNumber - 1)
.set(`foreignNonce${epoch}`, 0)
.exec()
foreignNonce[epoch] = 0
} else {
logger.info('Restoring epoch and block number from local db')
blockNumber = saved
foreignNonce[epoch] = parseInt(await redis.get(`foreignNonce${epoch}`)) || 0
}
isCurrentValidator = (await bridge.methods.getValidators().call()).includes(validatorAddress)
if (isCurrentValidator) {
logger.info(`${validatorAddress} is a current validator`)
} else {
logger.info(`${validatorAddress} is not a current validator`)
}
await resetFutureMessages(keygenQueue)
await resetFutureMessages(cancelKeygenQueue)
await resetFutureMessages(exchangeQueue)
await resetFutureMessages(signQueue)
logger.debug(`Sending start commands`)
await axios.get('http://keygen:8001/start')
await axios.get('http://signer:8001/start')
}
async function main () {
logger.debug(`Watching events in block #${blockNumber}`)
if (await homeWeb3.eth.getBlock(blockNumber) === null) {
logger.debug('No block')
await new Promise(r => setTimeout(r, 1000))
return
}
redisTx = redis.multi()
const bridgeEvents = await bridge.getPastEvents('allEvents', {
fromBlock: blockNumber,
toBlock: blockNumber
})
for (const event of bridgeEvents) {
switch (event.event) {
case 'NewEpoch':
await sendKeygen(event)
break
case 'NewEpochCancelled':
sendKeygenCancellation(event)
break
case 'NewFundsTransfer':
isCurrentValidator && await sendSignFundsTransfer(event)
break
case 'ExchangeRequest':
isCurrentValidator && await sendSign(event)
break
case 'EpochStart':
await processEpochStart(event)
break
}
}
if ((blockNumber + 1 - epochStart) % rangeSize === 0) {
logger.info('Reached end of the current block range')
if (lastTransactionBlockNumber > blockNumber - rangeSize) {
logger.info('Sending message to start signature generation for the ended range')
await sendStartSign()
}
}
blockNumber++
// Exec redis tx
await redisTx.incr('homeBlock').exec()
await redis.save()
}
initialize().then(async () => {
while (true) {
await main()
}
})
async function sendKeygen (event) {
const newEpoch = event.returnValues.newEpoch.toNumber()
async function sendKeygen(event) {
const { newEpoch } = event.values
keygenQueue.send({
epoch: newEpoch,
blockNumber,
threshold: (await bridge.methods.getThreshold(newEpoch).call()).toNumber(),
parties: (await bridge.methods.getParties(newEpoch).call()).toNumber()
threshold: await bridge.getThreshold(newEpoch),
parties: await bridge.getParties(newEpoch)
})
logger.debug('Sent keygen start event')
}
function sendKeygenCancellation (event) {
const epoch = event.returnValues.epoch.toNumber()
function sendKeygenCancellation(event) {
const eventEpoch = event.values.epoch
cancelKeygenQueue.send({
epoch,
epoch: eventEpoch,
blockNumber
})
logger.debug('Sent keygen cancellation event')
}
async function sendSignFundsTransfer (event) {
const newEpoch = event.returnValues.newEpoch.toNumber()
const oldEpoch = event.returnValues.oldEpoch.toNumber()
async function sendSignFundsTransfer(event) {
const { newEpoch, oldEpoch } = event.values
signQueue.send({
epoch: oldEpoch,
blockNumber,
newEpoch,
nonce: foreignNonce[oldEpoch],
threshold: (await bridge.methods.getThreshold(oldEpoch).call()).toNumber(),
parties: (await bridge.methods.getParties(oldEpoch).call()).toNumber()
threshold: await bridge.getThreshold(oldEpoch),
parties: await bridge.getParties(oldEpoch)
})
logger.debug('Sent sign funds transfer event')
foreignNonce[oldEpoch]++
foreignNonce[oldEpoch] += 1
redisTx.incr(`foreignNonce${oldEpoch}`)
}
async function sendSign (event) {
const tx = await homeWeb3.eth.getTransaction(event.transactionHash)
const msg = utils.serializeTransaction({
async function sendSign(event, transactionHash) {
const tx = await provider.getTransaction(transactionHash)
const msg = ethers.utils.serializeTransaction({
nonce: tx.nonce,
gasPrice: `0x${new BN(tx.gasPrice).toString(16)}`,
gasLimit: `0x${new BN(tx.gas).toString(16)}`,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: `0x${new BN(tx.value).toString(16)}`,
data: tx.input,
chainId: await homeWeb3.eth.net.getId()
data: tx.data,
chainId
})
const hash = ethers.utils.keccak256(msg)
const publicKey = ethers.utils.recoverPublicKey(hash, {
r: tx.r,
s: tx.s,
v: tx.v
})
const hash = homeWeb3.utils.sha3(msg)
const publicKey = utils.recoverPublicKey(hash, { r: tx.r, s: tx.s, v: tx.v })
const msgToQueue = {
epoch,
blockNumber,
@ -222,8 +148,8 @@ async function sendSign (event) {
x: publicKey.substr(4, 64),
y: publicKey.substr(68, 64)
}),
value: (new BN(event.returnValues.value)).dividedBy(10 ** 18).toFixed(8, 3),
nonce: event.returnValues.nonce.toNumber()
value: (new BN(event.values.value)).dividedBy(10 ** 18).toFixed(8, 3),
nonce: event.values.nonce
}
exchangeQueue.send(msgToQueue)
@ -234,23 +160,24 @@ async function sendSign (event) {
logger.debug(`Set lastTransactionBlockNumber to ${blockNumber}`)
}
async function sendStartSign () {
redisTx.incr(`foreignNonce${epoch}`)
async function sendStartSign() {
signQueue.send({
epoch,
blockNumber,
nonce: foreignNonce[epoch]++,
threshold: (await bridge.methods.getThreshold(epoch).call()).toNumber(),
parties: (await bridge.methods.getParties(epoch).call()).toNumber()
nonce: foreignNonce[epoch],
threshold: await bridge.getThreshold(epoch),
parties: await bridge.getParties(epoch)
})
foreignNonce[epoch] += 1
redisTx.incr(`foreignNonce${epoch}`)
}
async function processEpochStart (event) {
epoch = event.returnValues.epoch.toNumber()
async function processEpochStart(event) {
epoch = event.values.epoch
epochStart = blockNumber
logger.info(`Epoch ${epoch} started`)
rangeSize = (await bridge.methods.getRangeSize().call()).toNumber()
isCurrentValidator = (await bridge.methods.getValidators().call()).includes(validatorAddress)
rangeSize = await bridge.getRangeSize(epoch)
isCurrentValidator = (await bridge.getValidators(epoch)).includes(validatorAddress)
if (isCurrentValidator) {
logger.info(`${validatorAddress} is a current validator`)
} else {
@ -259,3 +186,195 @@ async function processEpochStart (event) {
logger.info(`Updated range size to ${rangeSize}`)
foreignNonce[epoch] = 0
}
async function sendEpochClose() {
logger.debug(`Consumed epoch ${epoch} close event`)
signQueue.send({
closeEpoch: epoch,
blockNumber,
nonce: foreignNonce[epoch],
threshold: await bridge.getThreshold(epoch),
parties: await bridge.getParties(epoch)
})
foreignNonce[epoch] += 1
redisTx.incr(`foreignNonce${epoch}`)
}
async function initialize() {
channel = await connectRabbit(RABBITMQ_URL)
exchangeQueue = await assertQueue(channel, 'exchangeQueue')
signQueue = await assertQueue(channel, 'signQueue')
keygenQueue = await assertQueue(channel, 'keygenQueue')
cancelKeygenQueue = await assertQueue(channel, 'cancelKeygenQueue')
epochTimeIntervalsQueue = await assertQueue(channel, 'epochTimeIntervalsQueue')
activeEpoch = !!(await redis.get('activeEpoch'))
chainId = (await provider.getNetwork()).chainId
const events = (await provider.getLogs({
address: HOME_BRIDGE_ADDRESS,
fromBlock: 1,
toBlock: 'latest',
topics: bridge.filters.EpochStart().topics
})).map((log) => bridge.interface.parseLog(log))
epoch = events.length ? events[events.length - 1].values.epoch : 0
logger.info(`Current epoch ${epoch}`)
epochStart = events.length ? events[events.length - 1].blockNumber : 1
const saved = (parseInt(await redis.get('homeBlock'), 10) + 1) || parseInt(HOME_START_BLOCK, 10)
if (epochStart > saved) {
logger.info(`Data in db is outdated, starting from epoch ${epoch}, block #${epochStart}`)
blockNumber = epochStart
await redis.multi()
.set('homeBlock', blockNumber - 1)
.set(`foreignNonce${epoch}`, 0)
.exec()
foreignNonce[epoch] = 0
} else {
logger.info('Restoring epoch and block number from local db')
blockNumber = saved
foreignNonce[epoch] = parseInt(await redis.get(`foreignNonce${epoch}`), 10) || 0
}
rangeSize = await bridge.getRangeSize(epoch)
logger.debug(`Range size ${rangeSize}`)
logger.debug('Checking if current validator')
isCurrentValidator = (await bridge.getValidators(epoch)).includes(validatorAddress)
if (isCurrentValidator) {
logger.info(`${validatorAddress} is a current validator`)
} else {
logger.info(`${validatorAddress} is not a current validator`)
}
await resetFutureMessages(keygenQueue)
await resetFutureMessages(cancelKeygenQueue)
await resetFutureMessages(exchangeQueue)
await resetFutureMessages(signQueue)
await resetFutureMessages(epochTimeIntervalsQueue)
logger.debug('Sending start commands')
await axios.get('http://keygen:8001/start')
await axios.get('http://signer:8001/start')
}
async function loop() {
const latestBlockNumber = await provider.getBlockNumber()
if (latestBlockNumber < blockNumber) {
logger.debug(`No block after ${latestBlockNumber}`)
await delay(2000)
return
}
const endBlock = Math.min(latestBlockNumber, blockNumber + HOME_MAX_FETCH_RANGE_SIZE - 1)
redisTx = redis.multi()
logger.debug(`Watching events in blocks #${blockNumber}-${endBlock}`)
const bridgeEvents = (await provider.getLogs({
address: HOME_BRIDGE_ADDRESS,
fromBlock: blockNumber,
toBlock: endBlock,
topics: []
}))
for (let curBlockNumber = blockNumber, i = 0; curBlockNumber <= endBlock; curBlockNumber += 1) {
const rangeOffset = (curBlockNumber + 1 - epochStart) % rangeSize
const rangeStart = curBlockNumber - (rangeOffset || rangeSize)
let epochTimeUpdated = false
while (i < bridgeEvents.length && bridgeEvents[i].blockNumber === curBlockNumber) {
const event = bridge.interface.parseLog(bridgeEvents[i])
logger.trace('Consumed event %o %o', event, bridgeEvents[i])
switch (event.name) {
case 'NewEpoch':
if ((await bridge.getValidators(event.values.newEpoch)).includes(validatorAddress)) {
await sendKeygen(event)
}
break
case 'NewEpochCancelled':
if ((await bridge.getValidators(event.values.epoch)).includes(validatorAddress)) {
sendKeygenCancellation(event)
}
break
case 'NewFundsTransfer':
if (isCurrentValidator) {
await sendSignFundsTransfer(event)
}
break
case 'ExchangeRequest':
if (isCurrentValidator) {
await sendSign(event, bridgeEvents[i].transactionHash)
}
break
case 'EpochStart':
await processEpochStart(event)
await redis.set('activeEpoch', true)
activeEpoch = true
epochTimeIntervalsQueue.send({
blockNumber: curBlockNumber,
startTime: await retry(() => getBlockTimestamp(curBlockNumber)) * 1000,
epoch
})
epochTimeUpdated = true
break
case 'EpochEnd':
logger.debug(`Consumed epoch ${epoch} end event`)
await redis.set('activeEpoch', false)
activeEpoch = false
epochTimeIntervalsQueue.send({
blockNumber: curBlockNumber,
prolongedTime: await retry(() => getBlockTimestamp(curBlockNumber)) * 1000,
epoch
})
break
case 'EpochClose':
if (isCurrentValidator) {
await sendEpochClose()
}
break
case 'ForceSign':
if (isCurrentValidator && lastTransactionBlockNumber > rangeStart) {
logger.debug('Consumed force sign event')
lastTransactionBlockNumber = 0
redisTx.set('lastTransactionBlockNumber', 0)
await sendStartSign()
}
break
default:
logger.warn('Unknown event %o', event)
}
i += 1
}
if (curBlockNumber === endBlock && !epochTimeUpdated && epoch > 0 && activeEpoch) {
epochTimeIntervalsQueue.send({
blockNumber: curBlockNumber,
prolongedTime: await retry(() => getBlockTimestamp(curBlockNumber)) * 1000,
epoch
})
}
if (rangeOffset === 0) {
logger.info('Reached end of the current block range')
if (isCurrentValidator && lastTransactionBlockNumber > curBlockNumber - rangeSize) {
logger.info('Sending message to start signature generation for the ended range')
await sendStartSign()
}
}
}
blockNumber = endBlock + 1
// Exec redis tx
await redisTx.set('homeBlock', endBlock).exec()
await redis.save()
}
async function main() {
await initialize()
while (true) {
await loop()
}
}
main()

View File

@ -4,12 +4,14 @@
"dependencies": {
"ioredis": "4.10.0",
"amqplib": "0.5.3",
"web3": "1.0.0-beta.55",
"ethers": "4.0.33",
"bignumber.js": "9.0.0",
"bech32": "1.1.3",
"pino": "5.13.4",
"pino-pretty": "3.2.1",
"axios": "0.19.0"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -6,6 +6,6 @@ COPY ./proxy/package.json /proxy/
RUN npm install
COPY ./proxy/index.js ./proxy/encode.js ./proxy/decode.js ./proxy/sendTx.js ./shared/logger.js ./shared/crypto.js /proxy/
COPY ./proxy/index.js ./proxy/encode.js ./proxy/decode.js ./proxy/sendTx.js ./proxy/contractsAbi.js ./proxy/utils.js ./shared/logger.js ./shared/crypto.js ./shared/wait.js /proxy/
ENTRYPOINT ["node", "index.js"]

View File

@ -0,0 +1,38 @@
const tokenAbi = [
'function balanceOf(address account) view returns (uint256)'
]
const bridgeAbi = [
'function getX() view returns (uint256)',
'function getY() view returns (uint256)',
'function epoch() view returns (uint16)',
'function getRangeSize() view returns (uint16)',
'function getNextRangeSize() view returns (uint16)',
'function getStartBlock() view returns (uint32)',
'function getNonce() view returns (uint16)',
'function nextEpoch() view returns (uint16)',
'function getThreshold() view returns (uint16)',
'function getNextThreshold() view returns (uint16)',
'function getValidators() view returns (address[])',
'function getNextValidators() view returns (address[])',
'function getCloseEpoch() view returns (bool)',
'function getNextCloseEpoch() view returns (bool)',
'function status() view returns (uint8)',
'function votesCount(bytes32) view returns (uint16)',
'function getNextPartyId(address a) view returns (uint16)'
]
const sharedDbAbi = [
'function getSignupAddress(bytes32 hash, address[] validators, uint16 signupNumber) view returns (address)',
'function getData(address from, bytes32 hash, bytes32 key) view returns (bytes)',
'function getSignupNumber(bytes32 hash, address[] validators, address validator) view returns (uint16)',
'function isSignuped(bytes32 hash) view returns (bool)',
'function setData(bytes32 hash, bytes32 key, bytes data)',
'function signup(bytes32 hash)',
'function addSignature(bytes message, bytes rsv)',
'function getSignatures(bytes32 msgHash, address[] validators) view returns (bytes)'
]
module.exports = {
tokenAbi,
bridgeAbi,
sharedDbAbi
}

View File

@ -1,18 +1,19 @@
const BN = require('bn.js')
function Tokenizer (_buffer) {
function Tokenizer(_buffer) {
const buffer = _buffer
let position = 0
return {
isEmpty: function () {
isEmpty() {
return position === buffer.length
},
parse: function (length = 32, base = 16) {
parse(length = 32, base = 16) {
const res = new BN(buffer.slice(position, position + length)).toString(base)
position += length
return res
},
byte: function () {
byte() {
// eslint-disable-next-line no-plusplus
return buffer[position++]
}
}
@ -21,7 +22,7 @@ function Tokenizer (_buffer) {
const keygenDecoders = [
null,
// round 1
function (tokenizer) {
(tokenizer) => {
const res = {
e: {
n: tokenizer.parse(256, 10)
@ -37,31 +38,30 @@ const keygenDecoders = [
return res
},
// round 2
function (tokenizer) {
return {
blind_factor: tokenizer.parse(),
y_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
}
(tokenizer) => ({
blind_factor: tokenizer.parse(),
y_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
}
},
}),
// round 3
function (tokenizer) {
(tokenizer) => {
const res = {
ciphertext: [],
tag: []
}
for (let i = 0; i < 32; i++) {
const ciphertextLength = tokenizer.byte() // probably 32
for (let i = 0; i < ciphertextLength; i += 1) {
res.ciphertext.push(tokenizer.byte())
}
for (let i = 0; i < 16; i++) {
while (!tokenizer.isEmpty()) {
res.tag.push(tokenizer.byte())
}
return res
},
// round 4
function (tokenizer) {
(tokenizer) => {
const res = {
parameters: {
threshold: tokenizer.byte(),
@ -72,157 +72,139 @@ const keygenDecoders = [
while (!tokenizer.isEmpty()) {
res.commitments.push({
x: tokenizer.parse(),
y: tokenizer.parse(),
y: tokenizer.parse()
})
}
return res
},
// round 5
function (tokenizer) {
return {
pk: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
pk_t_rand_commitment: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
challenge_response: tokenizer.parse()
}
}
(tokenizer) => ({
pk: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
pk_t_rand_commitment: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
challenge_response: tokenizer.parse()
})
]
const signDecoders = [
// round 0
function (tokenizer) {
return tokenizer.byte()
},
(tokenizer) => tokenizer.byte(),
// round 1
function (tokenizer) {
return [
{
com: tokenizer.parse()
},
{
c: tokenizer.parse(512)
}
]
},
(tokenizer) => [
{
com: tokenizer.parse()
},
{
c: tokenizer.parse(512)
}
],
// round 2
function (tokenizer) {
(tokenizer) => {
const res = []
for (let i = 0; i < 2; i++) {
for (let i = 0; i < 2; i += 1) {
res[i] = {
c: tokenizer.parse(512),
b_proof: {
pk: {
x: tokenizer.parse(),
y: tokenizer.parse(),
y: tokenizer.parse()
},
pk_t_rand_commitment: {
x: tokenizer.parse(),
y: tokenizer.parse(),
y: tokenizer.parse()
},
challenge_response: tokenizer.parse(),
challenge_response: tokenizer.parse()
},
beta_tag_proof: {
pk: {
x: tokenizer.parse(),
y: tokenizer.parse(),
y: tokenizer.parse()
},
pk_t_rand_commitment: {
x: tokenizer.parse(),
y: tokenizer.parse(),
y: tokenizer.parse()
},
challenge_response: tokenizer.parse(),
challenge_response: tokenizer.parse()
}
}
}
return res
},
// round 3
function (tokenizer) {
return tokenizer.parse()
},
(tokenizer) => tokenizer.parse(),
// round 4
function (tokenizer) {
return {
blind_factor: tokenizer.parse(),
g_gamma_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
}
(tokenizer) => ({
blind_factor: tokenizer.parse(),
g_gamma_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
}
},
}),
// round 5
function (tokenizer) {
return {
com: tokenizer.parse()
}
},
(tokenizer) => ({
com: tokenizer.parse()
}),
// round 6
function (tokenizer) {
return [
{
V_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
A_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
B_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
blind_factor: tokenizer.parse()
},
{
T: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
A3: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
z1: tokenizer.parse(),
z2: tokenizer.parse()
}
]
},
// round 7
function (tokenizer) {
return {
com: tokenizer.parse()
}
},
// round 8
function (tokenizer) {
return {
u_i: {
(tokenizer) => [
{
V_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
t_i: {
A_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
B_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
blind_factor: tokenizer.parse()
},
{
T: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
A3: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
z1: tokenizer.parse(),
z2: tokenizer.parse()
}
},
],
// round 7
(tokenizer) => ({
com: tokenizer.parse()
}),
// round 8
(tokenizer) => ({
u_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
t_i: {
x: tokenizer.parse(),
y: tokenizer.parse()
},
blind_factor: tokenizer.parse()
}),
// round 9
function (tokenizer) {
return tokenizer.parse()
},
(tokenizer) => tokenizer.parse()
]
module.exports = function (isKeygen, round, value) {
value = Buffer.from(value.substr(2), 'hex')
const tokenizer = Tokenizer(value)
const roundNumber = parseInt(round[round.length - 1])
function decode(isKeygen, round, value) {
const newValue = Buffer.from(value.substr(2), 'hex')
const tokenizer = Tokenizer(newValue)
const roundNumber = parseInt(round[round.length - 1], 10)
const decoder = (isKeygen ? keygenDecoders : signDecoders)[roundNumber]
return JSON.stringify(decoder(tokenizer))
}
module.exports = decode

View File

@ -2,42 +2,44 @@ const BN = require('bignumber.js')
const { padZeros } = require('./crypto')
function makeBuffer (value, length = 32, base = 16) {
function makeBuffer(value, length = 32, base = 16) {
return Buffer.from(padZeros(new BN(value, base).toString(16), length * 2), 'hex')
}
const keygenEncoders = [
null,
// round 1
function * (value) {
function* g(value) {
yield makeBuffer(value.e.n, 256, 10)
yield makeBuffer(value.com)
for (let x of value.correct_key_proof.sigma_vec) {
yield makeBuffer(x, 256, 10)
for (let i = 0; i < value.correct_key_proof.sigma_vec.length; i += 1) {
yield makeBuffer(value.correct_key_proof.sigma_vec[i], 256, 10)
}
},
// round 2
function * (value) {
function* g(value) {
yield makeBuffer(value.blind_factor)
yield makeBuffer(value.y_i.x)
yield makeBuffer(value.y_i.y)
},
// round 3
function * (value) {
yield Buffer.from(value.ciphertext) // 32 bytes
yield Buffer.from(value.tag) // 16 bytes
function* g(value) {
yield Buffer.from([value.ciphertext.length])
yield Buffer.from(value.ciphertext) // 32 bytes or less
yield Buffer.from(value.tag) // 16 bytes or less
},
// round 4
function * (value) {
yield Buffer.from([ value.parameters.threshold ]) // 1 byte
yield Buffer.from([ value.parameters.share_count ]) // 1 byte
for (let x of value.commitments) {
function* g(value) {
yield Buffer.from([value.parameters.threshold]) // 1 byte
yield Buffer.from([value.parameters.share_count]) // 1 byte
for (let i = 0; i < value.commitments.length; i += 1) {
const x = value.commitments[i]
yield makeBuffer(x.x)
yield makeBuffer(x.y)
}
},
// round 5
function * (value) {
function* g(value) {
yield makeBuffer(value.pk.x)
yield makeBuffer(value.pk.y)
yield makeBuffer(value.pk_t_rand_commitment.x)
@ -48,17 +50,17 @@ const keygenEncoders = [
const signEncoders = [
// round 0
function * (value) {
yield Buffer.from([ value ])
function* g(value) {
yield Buffer.from([value])
},
// round 1
function * (value) {
function* g(value) {
yield makeBuffer(value[0].com)
yield makeBuffer(value[1].c, 512)
},
// round 2
function * (value) {
for (let i = 0; i < 2; i++) {
function* g(value) {
for (let i = 0; i < 2; i += 1) {
yield makeBuffer(value[i].c, 512)
yield makeBuffer(value[i].b_proof.pk.x)
yield makeBuffer(value[i].b_proof.pk.y)
@ -73,21 +75,21 @@ const signEncoders = [
}
},
// round 3
function * (value) {
function* g(value) {
yield makeBuffer(value)
},
// round 4
function * (value) {
function* g(value) {
yield makeBuffer(value.blind_factor)
yield makeBuffer(value.g_gamma_i.x)
yield makeBuffer(value.g_gamma_i.y)
},
// round 5
function * (value) {
function* g(value) {
yield makeBuffer(value.com)
},
// round 6
function * (value) {
function* g(value) {
yield makeBuffer(value[0].V_i.x)
yield makeBuffer(value[0].V_i.y)
yield makeBuffer(value[0].A_i.x)
@ -103,11 +105,11 @@ const signEncoders = [
yield makeBuffer(value[1].z2)
},
// round 7
function * (value) {
function* g(value) {
yield makeBuffer(value.com)
},
// round 8
function * (value) {
function* g(value) {
yield makeBuffer(value.u_i.x)
yield makeBuffer(value.u_i.y)
yield makeBuffer(value.t_i.x)
@ -115,23 +117,23 @@ const signEncoders = [
yield makeBuffer(value.blind_factor)
},
// round 9
function * (value) {
function* g(value) {
yield makeBuffer(value)
},
}
]
module.exports = function (isKeygen, round, value) {
function encode(isKeygen, round, value) {
const parsedValue = JSON.parse(value)
const roundNumber = parseInt(round[round.length - 1])
const roundNumber = parseInt(round[round.length - 1], 10)
const encoder = (isKeygen ? keygenEncoders : signEncoders)[roundNumber]
const generator = encoder(parsedValue)
const buffers = []
let next
while (true) {
next = generator.next()
if (next.done)
break
let next = generator.next()
while (!next.done) {
buffers.push(next.value)
next = generator.next()
}
return Buffer.concat(buffers)
}
module.exports = encode

View File

@ -1,44 +1,475 @@
const express = require('express')
const Web3 = require('web3')
const AsyncLock = require('async-lock')
const axios = require('axios')
const BN = require('bignumber.js')
const { utils } = require('ethers')
const ethers = require('ethers')
const { tokenAbi, bridgeAbi, sharedDbAbi } = require('./contractsAbi')
const {
Ok, Err, decodeStatus
} = require('./utils')
const encode = require('./encode')
const decode = require('./decode')
const { createSender, waitForReceipt } = require('./sendTx')
const logger = require('./logger')
const { publicKeyToAddress } = require('./crypto')
const { publicKeyToAddress, padZeros } = require('./crypto')
const {
HOME_RPC_URL, HOME_BRIDGE_ADDRESS, SIDE_RPC_URL, SIDE_SHARED_DB_ADDRESS, VALIDATOR_PRIVATE_KEY, HOME_CHAIN_ID,
SIDE_CHAIN_ID, HOME_TOKEN_ADDRESS, FOREIGN_URL, FOREIGN_ASSET
HOME_RPC_URL, HOME_BRIDGE_ADDRESS, SIDE_RPC_URL, SIDE_SHARED_DB_ADDRESS, VALIDATOR_PRIVATE_KEY,
HOME_TOKEN_ADDRESS, FOREIGN_URL, FOREIGN_ASSET
} = process.env
const abiSharedDb = require('./contracts_data/SharedDB.json').abi
const abiBridge = require('./contracts_data/Bridge.json').abi
const abiToken = require('./contracts_data/IERC20.json').abi
const homeWeb3 = new Web3(HOME_RPC_URL, null, { transactionConfirmationBlocks: 1 })
const sideWeb3 = new Web3(SIDE_RPC_URL, null, { transactionConfirmationBlocks: 1 })
const bridge = new homeWeb3.eth.Contract(abiBridge, HOME_BRIDGE_ADDRESS)
const token = new homeWeb3.eth.Contract(abiToken, HOME_TOKEN_ADDRESS)
const sharedDb = new sideWeb3.eth.Contract(abiSharedDb, SIDE_SHARED_DB_ADDRESS)
const validatorAddress = homeWeb3.eth.accounts.privateKeyToAccount(`0x${VALIDATOR_PRIVATE_KEY}`).address
const Action = {
CONFIRM_KEYGEN: 0,
CONFIRM_FUNDS_TRANSFER: 1,
CONFIRM_CLOSE_EPOCH: 2,
VOTE_START_VOTING: 3,
VOTE_ADD_VALIDATOR: 4,
VOTE_REMOVE_VALIDATOR: 5,
VOTE_CHANGE_THRESHOLD: 6,
VOTE_CHANGE_RANGE_SIZE: 7,
VOTE_CHANGE_CLOSE_EPOCH: 8,
VOTE_START_KEYGEN: 9,
VOTE_CANCEL_KEYGEN: 10,
TRANSFER: 11
}
const homeProvider = new ethers.providers.JsonRpcProvider(HOME_RPC_URL)
const sideProvider = new ethers.providers.JsonRpcProvider(SIDE_RPC_URL)
const homeWallet = new ethers.Wallet(VALIDATOR_PRIVATE_KEY, homeProvider)
const sideWallet = new ethers.Wallet(VALIDATOR_PRIVATE_KEY, sideProvider)
const token = new ethers.Contract(HOME_TOKEN_ADDRESS, tokenAbi, homeWallet)
const bridge = new ethers.Contract(HOME_BRIDGE_ADDRESS, bridgeAbi, homeWallet)
const sharedDb = new ethers.Contract(SIDE_SHARED_DB_ADDRESS, sharedDbAbi, sideWallet)
const validatorAddress = homeWallet.address
const httpClient = axios.create({ baseURL: FOREIGN_URL })
const lock = new AsyncLock()
let homeValidatorNonce
let sideValidatorNonce
let homeSender
let sideSender
const app = express()
app.use(express.json())
app.use(express.urlencoded({ extended: true }))
const votesProxyApp = express()
function sideSendQuery(query) {
return lock.acquire('side', async () => {
logger.debug('Sending side query')
const senderResponse = await sideSender({
data: query,
to: SIDE_SHARED_DB_ADDRESS,
nonce: sideValidatorNonce
})
if (senderResponse !== true) {
sideValidatorNonce += 1
}
return senderResponse
})
}
async function status(req, res) {
logger.debug('Status call')
const [bridgeEpoch, bridgeStatus] = await Promise.all([
bridge.epoch(),
bridge.status()
])
res.send({
bridgeEpoch,
bridgeStatus
})
logger.debug('Status end')
}
async function get(req, res) {
logger.debug('Get call, %o', req.body.key)
const round = req.body.key.second
const uuid = req.body.key.third
let from
if (uuid.startsWith('k')) {
from = (await bridge.getNextValidators())[parseInt(req.body.key.first, 10) - 1]
} else {
const validators = await bridge.getValidators()
from = await sharedDb.getSignupAddress(
uuid,
validators,
parseInt(req.body.key.first, 10)
)
}
const to = Number(req.body.key.fourth) // 0 if empty
const key = ethers.utils.id(`${round}_${to}`)
const data = await sharedDb.getData(from, ethers.utils.id(uuid), key)
if (data.length > 2) {
logger.trace(`Received encoded data: ${data}`)
const decoded = decode(uuid[0] === 'k', round, data)
logger.trace('Decoded data: %o', decoded)
res.send(Ok({
key: req.body.key,
value: decoded
}))
} else {
setTimeout(() => res.send(Err(null)), 1000)
}
logger.debug('Get end')
}
async function set(req, res) {
logger.debug('Set call')
const round = req.body.key.second
const uuid = req.body.key.third
const to = Number(req.body.key.fourth)
const key = ethers.utils.id(`${round}_${to}`)
logger.trace('Received data: %o', req.body.value)
const encoded = encode(uuid[0] === 'k', round, req.body.value)
logger.trace(`Encoded data: ${encoded.toString('hex')}`)
logger.trace(`Received data: ${req.body.value.length} bytes, encoded data: ${encoded.length} bytes`)
const query = sharedDb.interface.functions.setData.encode([ethers.utils.id(uuid), key, encoded])
await sideSendQuery(query)
res.send(Ok(null))
logger.debug('Set end')
}
async function signupKeygen(req, res) {
logger.debug('SignupKeygen call')
const epoch = await bridge.nextEpoch()
const partyId = await bridge.getNextPartyId(validatorAddress)
logger.debug('Checking previous attempts')
let attempt = 1
let uuid
while (true) {
uuid = `k${epoch}_${attempt}`
const data = await sharedDb.getData(validatorAddress, ethers.utils.id(uuid), ethers.utils.id('round1_0'))
if (data.length === 2) {
break
}
logger.trace(`Attempt ${attempt} is already used`)
attempt += 1
}
logger.debug(`Using attempt ${attempt}`)
if (partyId === 0) {
res.send(Err({ message: 'Not a validator' }))
logger.debug('Not a validator')
} else {
res.send(Ok({
uuid,
number: partyId
}))
logger.debug('SignupKeygen end')
}
}
async function signupSign(req, res) {
logger.debug('SignupSign call')
const msgHash = req.body.third
logger.debug('Checking previous attempts')
let attempt = 1
let uuid
let hash
while (true) {
uuid = `${msgHash}_${attempt}`
hash = ethers.utils.id(uuid)
const data = await sharedDb.isSignuped(hash)
if (!data) {
break
}
logger.trace(`Attempt ${attempt} is already used`)
attempt += 1
}
logger.debug(`Using attempt ${attempt}`)
const query = sharedDb.interface.functions.signup.encode([hash])
const { txHash } = await sideSendQuery(query)
const receipt = await waitForReceipt(SIDE_RPC_URL, txHash)
// Already have signup
if (receipt.status === false) {
res.send(Ok({
uuid: hash,
number: 0
}))
logger.debug('Already have signup')
return
}
const validators = await bridge.getValidators()
const id = await sharedDb.getSignupNumber(hash, validators, validatorAddress)
res.send(Ok({
uuid: hash,
number: id
}))
logger.debug('SignupSign end')
}
function encodeParam(param) {
switch (typeof param) {
case 'string':
if (param.startsWith('0x')) {
return Buffer.from(param.slice(2), 'hex')
}
return Buffer.from(param, 'hex')
case 'number':
return Buffer.from(padZeros(param.toString(16), 4), 'hex')
case 'boolean':
return Buffer.from([param ? 1 : 0])
default:
return null
}
}
function buildMessage(type, ...params) {
logger.debug(`${type}, %o`, params)
return Buffer.concat([
Buffer.from([type]),
...params.map(encodeParam)
])
}
async function processMessage(message) {
const signature = await sideWallet.signMessage(message)
logger.debug('Adding signature to shared db contract')
const query = sharedDb.interface.functions.addSignature.encode([`0x${message.toString('hex')}`, signature])
await sideSendQuery(query)
}
async function confirmKeygen(req, res) {
logger.debug('Confirm keygen call')
const { x, y, epoch } = req.body
const message = buildMessage(Action.CONFIRM_KEYGEN, epoch, padZeros(x, 64), padZeros(y, 64))
await processMessage(message)
res.send()
logger.debug('Confirm keygen end')
}
async function confirmFundsTransfer(req, res) {
logger.debug('Confirm funds transfer call')
const { epoch } = req.body
const message = buildMessage(Action.CONFIRM_FUNDS_TRANSFER, epoch)
await processMessage(message)
res.send()
logger.debug('Confirm funds transfer end')
}
async function confirmCloseEpoch(req, res) {
logger.debug('Confirm close epoch call')
const { epoch } = req.body
const message = buildMessage(Action.CONFIRM_CLOSE_EPOCH, epoch)
await processMessage(message)
res.send()
logger.debug('Confirm close epoch end')
}
async function voteStartVoting(req, res) {
logger.info('Voting for starting new epoch voting process')
const epoch = await bridge.epoch()
const message = buildMessage(Action.VOTE_START_VOTING, epoch)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
async function voteStartKeygen(req, res) {
logger.info('Voting for starting new epoch keygen')
const epoch = await bridge.epoch()
const message = buildMessage(Action.VOTE_START_KEYGEN, epoch)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
async function voteCancelKeygen(req, res) {
logger.info('Voting for cancelling new epoch keygen')
const epoch = await bridge.nextEpoch()
const message = buildMessage(Action.VOTE_CANCEL_KEYGEN, epoch)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
async function voteAddValidator(req, res) {
if (ethers.utils.isHexString(req.params.validator, 20)) {
logger.info('Voting for adding new validator')
const epoch = await bridge.epoch()
const message = buildMessage(
Action.VOTE_ADD_VALIDATOR,
epoch,
req.params.validator,
padZeros(req.attempt, 18)
)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
}
async function voteChangeThreshold(req, res) {
if (/^[0-9]+$/.test(req.params.threshold)) {
logger.info('Voting for changing threshold')
const epoch = await bridge.epoch()
const message = buildMessage(
Action.VOTE_CHANGE_THRESHOLD,
epoch,
parseInt(req.params.threshold, 10),
padZeros(req.attempt, 54)
)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
}
async function voteChangeRangeSize(req, res) {
if (/^[0-9]+$/.test(req.params.rangeSize)) {
logger.info('Voting for changing range size')
const epoch = await bridge.epoch()
const message = buildMessage(
Action.VOTE_CHANGE_RANGE_SIZE,
epoch,
parseInt(req.params.rangeSize, 10),
padZeros(req.attempt, 54)
)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
}
async function voteChangeCloseEpoch(req, res) {
if (req.params.closeEpoch === 'true' || req.params.closeEpoch === 'false') {
logger.info('Voting for changing close epoch')
const epoch = await bridge.epoch()
const message = buildMessage(
Action.VOTE_CHANGE_CLOSE_EPOCH,
epoch,
req.params.closeEpoch === 'true',
padZeros(req.attempt, 56)
)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
}
async function voteRemoveValidator(req, res) {
if (ethers.utils.isHexString(req.params.validator, 20)) {
logger.info('Voting for removing validator')
const epoch = await bridge.epoch()
const message = buildMessage(
Action.VOTE_REMOVE_VALIDATOR,
epoch,
req.params.validator,
padZeros(req.attempt, 18)
)
await processMessage(message)
res.send('Voted\n')
logger.info('Voted successfully')
}
}
async function transfer(req, res) {
logger.info('Transfer start')
const {
hash, to, value, epoch
} = req.body
if (ethers.utils.isHexString(to, 20)) {
logger.info(`Calling transfer to ${to}, 0x${value} tokens`)
const message = buildMessage(Action.TRANSFER, epoch, hash, to, padZeros(value, 24))
logger.info(`Message for sign: ${message.toString('hex')}`)
await processMessage(message)
}
res.send()
logger.info('Transfer end')
}
function getForeignBalances(address) {
return httpClient
.get(`/api/v1/account/${address}`)
.then((res) => res.data.balances.reduce((prev, cur) => {
// eslint-disable-next-line no-param-reassign
prev[cur.symbol] = cur.free
return prev
}, {}))
.catch(() => ({}))
}
async function info(req, res) {
logger.debug('Info start')
try {
const [
x, y, epoch, rangeSize, nextRangeSize, closeEpoch, nextCloseEpoch, epochStartBlock,
foreignNonce, nextEpoch, threshold, nextThreshold, validators, nextValidators, bridgeStatus,
homeBalance
] = await Promise.all([
bridge.getX().then((value) => new BN(value).toString(16)),
bridge.getY().then((value) => new BN(value).toString(16)),
bridge.epoch(),
bridge.getRangeSize(),
bridge.getNextRangeSize(),
bridge.getCloseEpoch(),
bridge.getNextCloseEpoch(),
bridge.getStartBlock(),
bridge.getNonce(),
bridge.nextEpoch(),
bridge.getThreshold(),
bridge.getNextThreshold(),
bridge.getValidators(),
bridge.getNextValidators(),
bridge.status(),
token.balanceOf(HOME_BRIDGE_ADDRESS)
.then((value) => parseFloat(new BN(value).dividedBy(10 ** 18).toFixed(8, 3)))
])
const foreignAddress = publicKeyToAddress({
x,
y
})
const balances = await getForeignBalances(foreignAddress)
const msg = {
epoch,
rangeSize,
nextRangeSize,
epochStartBlock,
nextEpoch,
threshold,
nextThreshold,
closeEpoch,
nextCloseEpoch,
homeBridgeAddress: HOME_BRIDGE_ADDRESS,
foreignBridgeAddress: foreignAddress,
foreignNonce,
validators,
nextValidators,
homeBalance,
foreignBalanceTokens: parseFloat(balances[FOREIGN_ASSET]) || 0,
foreignBalanceNative: parseFloat(balances.BNB) || 0,
bridgeStatus: decodeStatus(bridgeStatus)
}
logger.trace('%o', msg)
res.send(msg)
} catch (e) {
logger.debug('%o', e)
res.send({
message: 'Something went wrong, resend request',
error: e
})
}
logger.debug('Info end')
}
app.get('/status', status)
app.post('/get', get)
app.post('/set', set)
app.post('/signupkeygen', signupKeygen)
@ -46,25 +477,35 @@ app.post('/signupsign', signupSign)
app.post('/confirmKeygen', confirmKeygen)
app.post('/confirmFundsTransfer', confirmFundsTransfer)
app.post('/confirmCloseEpoch', confirmCloseEpoch)
app.post('/transfer', transfer)
const votesProxyApp = express()
votesProxyApp.use(express.json())
votesProxyApp.use(express.urlencoded({ extended: true }))
votesProxyApp.get('/vote/startVoting', voteStartVoting)
votesProxyApp.get('/vote/startKeygen', voteStartKeygen)
votesProxyApp.get('/vote/cancelKeygen', voteCancelKeygen)
votesProxyApp.use('/vote', (req, res, next) => {
if (/^[0-9]+$/.test(req.query.attempt)) {
req.attempt = parseInt(req.query.attempt, 10).toString(16)
logger.debug(`Vote attempt 0x${req.attempt}`)
next()
} else if (!req.query.attempt) {
req.attempt = '0'
logger.debug('Vote attempt 0x00')
next()
}
})
votesProxyApp.get('/vote/addValidator/:validator', voteAddValidator)
votesProxyApp.get('/vote/removeValidator/:validator', voteRemoveValidator)
votesProxyApp.get('/vote/changeThreshold/:threshold', voteChangeThreshold)
votesProxyApp.get('/vote/changeRangeSize/:rangeSize', voteChangeRangeSize)
votesProxyApp.get('/vote/changeCloseEpoch/:closeEpoch', voteChangeCloseEpoch)
votesProxyApp.get('/info', info)
async function main () {
homeValidatorNonce = await homeWeb3.eth.getTransactionCount(validatorAddress)
sideValidatorNonce = await sideWeb3.eth.getTransactionCount(validatorAddress)
async function main() {
sideValidatorNonce = await sideWallet.getTransactionCount()
homeSender = await createSender(HOME_RPC_URL, VALIDATOR_PRIVATE_KEY)
sideSender = await createSender(SIDE_RPC_URL, VALIDATOR_PRIVATE_KEY)
logger.warn(`My validator address in home and side networks is ${validatorAddress}`)
@ -79,309 +520,3 @@ async function main () {
}
main()
function Ok (data) {
return { Ok: data }
}
function Err (data) {
return { Err: data }
}
async function get (req, res) {
logger.debug('Get call, %o', req.body.key)
const round = req.body.key.second
const uuid = req.body.key.third
let from
if (uuid.startsWith('k'))
from = (await bridge.methods.getNextValidators().call())[parseInt(req.body.key.first) - 1]
else {
const validators = await bridge.methods.getValidators().call()
from = await sharedDb.methods.getSignupAddress(uuid, validators, parseInt(req.body.key.first)).call()
}
const to = Number(req.body.key.fourth) // 0 if empty
const key = homeWeb3.utils.sha3(`${round}_${to}`)
const data = await sharedDb.methods.getData(from, sideWeb3.utils.sha3(uuid), key).call()
if (data.length > 2) {
logger.trace(`Received encoded data: ${data}`)
const decoded = decode(uuid[0] === 'k', round, data)
logger.trace('Decoded data: %o', decoded)
res.send(Ok({ key: req.body.key, value: decoded }))
} else {
setTimeout(() => res.send(Err(null)), 1000)
}
logger.debug('Get end')
}
async function set (req, res) {
logger.debug('Set call')
const round = req.body.key.second
const uuid = req.body.key.third
const to = Number(req.body.key.fourth)
const key = homeWeb3.utils.sha3(`${round}_${to}`)
logger.trace('Received data: %o', req.body.value)
const encoded = encode(uuid[0] === 'k', round, req.body.value)
logger.trace(`Encoded data: ${encoded.toString('hex')}`)
logger.trace(`Received data: ${req.body.value.length} bytes, encoded data: ${encoded.length} bytes`)
const query = sharedDb.methods.setData(sideWeb3.utils.sha3(uuid), key, encoded)
await sideSendQuery(query)
res.send(Ok(null))
logger.debug('Set end')
}
async function signupKeygen (req, res) {
logger.debug('SignupKeygen call')
const epoch = (await bridge.methods.nextEpoch().call()).toNumber()
const partyId = (await bridge.methods.getNextPartyId(validatorAddress).call()).toNumber()
if (partyId === 0) {
res.send(Err({ message: 'Not a validator' }))
logger.debug('Not a validator')
} else {
res.send(Ok({ uuid: `k${epoch}`, number: partyId }))
logger.debug('SignupKeygen end')
}
}
async function signupSign (req, res) {
logger.debug('SignupSign call')
const hash = sideWeb3.utils.sha3(`0x${req.body.third}`)
const query = sharedDb.methods.signupSign(hash)
const { txHash } = await sideSendQuery(query)
const receipt = await waitForReceipt(SIDE_RPC_URL, txHash)
// Already have signup
if (receipt.status === false) {
res.send(Ok({ uuid: hash, number: 0 }))
logger.debug('Already have signup')
return
}
const validators = await bridge.methods.getValidators().call()
const id = (await sharedDb.methods.getSignupNumber(hash, validators, validatorAddress).call()).toNumber()
res.send(Ok({ uuid: hash, number: id }))
logger.debug('SignupSign end')
}
async function confirmKeygen (req, res) {
logger.debug('Confirm keygen call')
const { x, y } = req.body[5]
const query = bridge.methods.confirmKeygen(`0x${x}`, `0x${y}`)
await homeSendQuery(query)
res.send()
logger.debug('Confirm keygen end')
}
async function confirmFundsTransfer (req, res) {
logger.debug('Confirm funds transfer call')
const query = bridge.methods.confirmFundsTransfer()
await homeSendQuery(query)
res.send()
logger.debug('Confirm funds transfer end')
}
function sideSendQuery (query) {
return lock.acquire('home', async () => {
logger.debug('Sending side query')
const encodedABI = query.encodeABI()
return await sideSender({
data: encodedABI,
to: SIDE_SHARED_DB_ADDRESS,
nonce: sideValidatorNonce++
})
})
}
function homeSendQuery (query) {
return lock.acquire('home', async () => {
logger.debug('Sending home query')
const encodedABI = query.encodeABI()
return await homeSender({
data: encodedABI,
to: HOME_BRIDGE_ADDRESS,
nonce: homeValidatorNonce++
})
})
}
function parseReason (message) {
const result = /(?<="reason":").*?(?=")/.exec(message)
return result ? result[0] : ''
}
function parseError (message) {
const result = /(?<="error":").*?(?=")/.exec(message)
return result ? result[0] : ''
}
async function sendVote (query, req, res, waitFlag = false) {
try {
let { txHash, gasLimit } = await homeSendQuery(query)
if (txHash) {
while (waitFlag) {
const { status, gasUsed } = await waitForReceipt(HOME_RPC_URL, txHash)
if (status === '0x1') {
logger.debug('Receipt status is OK')
break
}
if (gasLimit === gasUsed) {
logger.info('Sending vote failed due to out of gas revert, retrying with more gas')
const nexTx = await homeSendQuery(query)
txHash = nexTx.txHash
gasLimit = nexTx.gasLimit
} else {
logger.warn(`Vote tx was reverted, txHash ${txHash}`)
break
}
}
res.send('Voted\n')
logger.info('Voted successfully')
} else {
res.send('Failed\n')
logger.info('Failed to vote')
}
} catch (e) {
logger.debug(e)
}
}
async function voteStartVoting (req, res) {
logger.info('Voting for starting new epoch voting process')
const query = bridge.methods.startVoting()
sendVote(query, req, res, true)
}
async function voteStartKeygen (req, res) {
logger.info('Voting for starting new epoch keygen')
const query = bridge.methods.voteStartKeygen()
sendVote(query, req, res)
}
async function voteCancelKeygen (req, res) {
logger.info('Voting for cancelling new epoch keygen')
const query = bridge.methods.voteCancelKeygen()
sendVote(query, req, res)
}
async function voteAddValidator (req, res) {
logger.info('Voting for adding new validator')
const query = bridge.methods.voteAddValidator(req.params.validator)
sendVote(query, req, res)
}
async function voteChangeThreshold (req, res) {
logger.info('Voting for changing threshold')
const query = bridge.methods.voteChangeThreshold(req.params.threshold)
sendVote(query, req, res)
}
async function voteRemoveValidator (req, res) {
logger.info('Voting for removing validator')
const query = bridge.methods.voteRemoveValidator(req.params.validator)
sendVote(query, req, res, true)
}
function decodeStatus (status) {
switch (status) {
case 0:
return 'ready'
case 1:
return 'voting'
case 2:
return 'keygen'
case 3:
return 'funds_transfer'
}
}
function boundX(x) {
try {
return x.toNumber()
} catch (e) {
return -1
}
}
async function info (req, res) {
logger.debug('Info start')
try {
const [ x, y, epoch, rangeSize, nextRangeSize, epochStartBlock, foreignNonce, nextEpoch, threshold, nextThreshold, validators, nextValidators, status, homeBalance ] = await Promise.all([
bridge.methods.getX().call().then(x => new BN(x).toString(16)),
bridge.methods.getY().call().then(x => new BN(x).toString(16)),
bridge.methods.epoch().call().then(x => x.toNumber()),
bridge.methods.getRangeSize().call().then(x => x.toNumber()),
bridge.methods.getNextRangeSize().call().then(x => x.toNumber()),
bridge.methods.getStartBlock().call().then(x => x.toNumber()),
bridge.methods.getNonce().call().then(boundX),
bridge.methods.nextEpoch().call().then(x => x.toNumber()),
bridge.methods.getThreshold().call().then(x => x.toNumber()),
bridge.methods.getNextThreshold().call().then(x => x.toNumber()),
bridge.methods.getValidators().call(),
bridge.methods.getNextValidators().call(),
bridge.methods.status().call(),
token.methods.balanceOf(HOME_BRIDGE_ADDRESS).call().then(x => parseFloat(new BN(x).dividedBy(10 ** 18).toFixed(8, 3)))
])
const [ confirmationsForFundsTransfer, votesForVoting, votesForKeygen, votesForCancelKeygen ] = await Promise.all([
bridge.methods.votesCount(homeWeb3.utils.sha3(utils.solidityPack([ 'uint8', 'uint256' ], [ 1, nextEpoch ]))).call().then(boundX),
bridge.methods.votesCount(homeWeb3.utils.sha3(utils.solidityPack([ 'uint8', 'uint256' ], [ 2, nextEpoch ]))).call().then(boundX),
bridge.methods.votesCount(homeWeb3.utils.sha3(utils.solidityPack([ 'uint8', 'uint256' ], [ 7, nextEpoch ]))).call().then(boundX),
bridge.methods.votesCount(homeWeb3.utils.sha3(utils.solidityPack([ 'uint8', 'uint256' ], [ 8, nextEpoch ]))).call().then(boundX)
])
const foreignAddress = publicKeyToAddress({ x, y })
const balances = await getForeignBalances(foreignAddress)
res.send({
epoch,
rangeSize,
nextRangeSize,
epochStartBlock,
nextEpoch,
threshold,
nextThreshold,
homeBridgeAddress: HOME_BRIDGE_ADDRESS,
foreignBridgeAddress: foreignAddress,
foreignNonce,
validators,
nextValidators,
homeBalance,
foreignBalanceTokens: parseFloat(balances[FOREIGN_ASSET]) || 0,
foreignBalanceNative: parseFloat(balances['BNB']) || 0,
bridgeStatus: decodeStatus(status),
votesForVoting,
votesForKeygen,
votesForCancelKeygen,
confirmationsForFundsTransfer
})
} catch (e) {
res.send({ message: 'Something went wrong, resend request', error: e })
}
logger.debug('Info end')
}
async function transfer (req, res) {
logger.info('Transfer start')
const { hash, to, value } = req.body
if (homeWeb3.utils.isAddress(to)) {
logger.info(`Calling transfer to ${to}, ${value} tokens`)
const query = bridge.methods.transfer(hash, to, '0x' + (new BN(value).toString(16)))
await homeSendQuery(query)
}
res.send()
logger.info('Transfer end')
}
function getForeignBalances (address) {
return httpClient
.get(`/api/v1/account/${address}`)
.then(res => res.data.balances.reduce((prev, cur) => {
prev[cur.symbol] = cur.free
return prev
}, {}))
.catch(err => ({}))
}

View File

@ -2,7 +2,6 @@
"name": "proxy",
"version": "0.0.1",
"dependencies": {
"web3": "1.0.0-beta.55",
"bech32": "1.1.3",
"express": "4.17.1",
"async-lock": "1.2.0",
@ -12,5 +11,8 @@
"ethers": "4.0.37",
"pino": "5.13.4",
"pino-pretty": "3.2.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -1,34 +1,31 @@
const Web3 = require('web3')
const axios = require('axios')
const ethers = require('ethers')
const BN = require('bignumber.js')
const logger = require('./logger')
const { delay, retry } = require('./wait')
const { GAS_LIMIT_FACTOR, MAX_GAS_LIMIT } = process.env
function sendRpcRequest (url, method, params) {
return axios.post(url, {
async function sendRpcRequest(url, method, params) {
logger.trace(`Request to ${url}, method ${method}, params %o`, params)
const response = await retry(() => axios.post(url, {
jsonrpc: '2.0',
method,
params,
id: 1
})
.then(res => res.data)
.catch(async e => {
logger.warn(`Request to ${url}, method ${method} failed, retrying`)
await new Promise(res => setTimeout(res, 1000))
return sendRpcRequest(url, method, params)
})
}))
logger.trace('Response %o', response.data)
return response.data
}
async function createSender (url, privateKey) {
const web3 = new Web3(url, null, { transactionConfirmationBlocks: 1 })
const signer = new ethers.utils.SigningKey(privateKey)
async function createSender(url, privateKey) {
const provider = new ethers.providers.JsonRpcProvider(url)
const wallet = new ethers.Wallet(privateKey, provider)
const chainId = await web3.eth.net.getId()
return async function (tx) {
tx = {
const { chainId } = await provider.getNetwork()
return async function send(tx) {
const newTx = {
data: tx.data,
to: tx.to,
nonce: tx.nonce,
@ -38,29 +35,30 @@ async function createSender (url, privateKey) {
}
try {
logger.trace(`Preparing and sending transaction %o on ${url}`, tx)
const estimate = await sendRpcRequest(url, 'eth_estimateGas', [ {
from: signer.address,
to: tx.to,
data: tx.data,
gasPrice: tx.gasPrice,
value: tx.value,
logger.trace(`Preparing and sending transaction %o on ${url}`, newTx)
const estimate = await sendRpcRequest(url, 'eth_estimateGas', [{
from: wallet.address,
to: newTx.to,
data: newTx.data,
gasPrice: newTx.gasPrice,
value: newTx.value,
gas: `0x${new BN(MAX_GAS_LIMIT).toString(16)}`
} ])
}])
if (estimate.error) {
logger.debug('Gas estimate failed %o', estimate.error)
return false
logger.debug('Gas estimate failed %o, skipping tx, reverting nonce', estimate.error)
return true
}
const gasLimit = BN.min(new BN(estimate.result, 16).multipliedBy(GAS_LIMIT_FACTOR), MAX_GAS_LIMIT)
tx.gasLimit = `0x${new BN(gasLimit).toString(16)}`
const gasLimit = BN.min(
new BN(estimate.result, 16).multipliedBy(GAS_LIMIT_FACTOR),
MAX_GAS_LIMIT
)
newTx.gasLimit = `0x${new BN(gasLimit).toString(16)}`
logger.trace(`Estimated gas to ${gasLimit}`)
const hash = web3.utils.sha3(ethers.utils.serializeTransaction(tx))
const signature = signer.signDigest(hash)
const signedTx = ethers.utils.serializeTransaction(tx, signature)
const signedTx = await wallet.sign(newTx)
const { result, error } = await sendRpcRequest(url, 'eth_sendRawTransaction', [ signedTx ])
const { result, error } = await sendRpcRequest(url, 'eth_sendRawTransaction', [signedTx])
// handle nonce error
// handle insufficient funds error
if (error) {
@ -68,7 +66,10 @@ async function createSender (url, privateKey) {
return false
}
return { txHash: result, gasLimit: tx.gasLimit }
return {
txHash: result,
gasLimit: newTx.gasLimit
}
} catch (e) {
logger.warn('Something failed, %o', e)
return false
@ -76,16 +77,20 @@ async function createSender (url, privateKey) {
}
}
async function waitForReceipt (url, txHash) {
async function waitForReceipt(url, txHash) {
const provider = new ethers.providers.JsonRpcProvider(url)
while (true) {
const { result, error } = await sendRpcRequest(url, 'eth_getTransactionReceipt', [ txHash ])
const receipt = await provider.getTransactionReceipt(txHash)
if (result === null || error) {
await new Promise(res => setTimeout(res, 1000))
} else {
return result
if (receipt) {
return receipt
}
await delay(1000)
}
}
module.exports = { createSender, waitForReceipt }
module.exports = {
createSender,
waitForReceipt
}

30
src/oracle/proxy/utils.js Normal file
View File

@ -0,0 +1,30 @@
function Ok(data) {
return { Ok: data }
}
function Err(data) {
return { Err: data }
}
function decodeStatus(status) {
switch (status) {
case 0:
return 'ready'
case 1:
return 'closing_epoch'
case 2:
return 'voting'
case 3:
return 'keygen'
case 4:
return 'funds_transfer'
default:
return 'unknown_state'
}
}
module.exports = {
Ok,
Err,
decodeStatus
}

View File

@ -0,0 +1,5 @@
FROM redis:5.0.5-alpine
COPY ./redis/redis.conf /usr/local/etc/redis/
CMD ["redis-server", "/usr/local/etc/redis/redis.conf"]

View File

@ -0,0 +1,8 @@
{
"extends": [
"../../../.eslintrc"
],
"rules": {
"no-console": 0
}
}

View File

@ -0,0 +1,3 @@
.dockerignore
Dockerfile
run.sh

View File

@ -3,5 +3,8 @@
"version": "0.0.1",
"dependencies": {
"ioredis": "4.14.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -12,7 +12,7 @@ redis.on('error', () => {
})
redis.on('connect', async () => {
await redis.set('homeBlock', parseInt(process.argv[2]))
await redis.set('homeBlock', parseInt(process.argv[2], 10))
await redis.save()
redis.disconnect()
})

View File

@ -0,0 +1,9 @@
{
"extends": [
"../../../.eslintrc"
],
"rules": {
"import/no-extraneous-dependencies": 0,
"node/no-extraneous-require": 0
}
}

View File

@ -1,31 +1,26 @@
const amqp = require('amqplib')
const logger = require('./logger')
function _connectRabbit (url) {
return amqp.connect(url).catch(() => {
logger.debug('Failed to connect to rabbitmqServer, reconnecting')
return new Promise(resolve =>
setTimeout(() => resolve(_connectRabbit(url)), 2000)
)
})
}
const { retry } = require('./wait')
async function connectRabbit(url) {
const connection = await _connectRabbit(url)
return await connection.createChannel()
logger.info('Connecting to RabbitMQ server')
return (await retry(() => amqp.connect(url))).createChannel()
}
async function assertQueue (channel, name) {
async function assertQueue(channel, name) {
const queue = await channel.assertQueue(name)
return {
name: queue.queue,
send: msg => channel.sendToQueue(queue.queue, Buffer.from(JSON.stringify(msg)), {
send: (msg) => channel.sendToQueue(queue.queue, Buffer.from(JSON.stringify(msg)), {
persistent: true
}),
get: consumer => channel.get(queue.queue, consumer),
consume: consumer => channel.consume(queue.queue, consumer)
get: (consumer) => channel.get(queue.queue, consumer),
consume: (consumer) => channel.consume(queue.queue, consumer)
}
}
module.exports = { connectRabbit, assertQueue }
module.exports = {
connectRabbit,
assertQueue
}

View File

@ -1,7 +1,27 @@
const crypto = require('crypto')
const bech32 = require('bech32')
function publicKeyToAddress ({ x, y }) {
function padZeros(s, len) {
while (s.length < len) {
// eslint-disable-next-line no-param-reassign
s = `0${s}`
}
return s
}
function sha256(bytes) {
return crypto.createHash('sha256')
.update(bytes)
.digest('hex')
}
function ripemd160(bytes) {
return crypto.createHash('ripemd160')
.update(bytes)
.digest('hex')
}
function publicKeyToAddress({ x, y }) {
const compact = (parseInt(y[y.length - 1], 16) % 2 ? '03' : '02') + padZeros(x, 64)
const sha256Hash = sha256(Buffer.from(compact, 'hex'))
const hash = ripemd160(Buffer.from(sha256Hash, 'hex'))
@ -9,18 +29,8 @@ function publicKeyToAddress ({ x, y }) {
return bech32.encode('tbnb', words)
}
function padZeros (s, len) {
while (s.length < len)
s = '0' + s
return s
module.exports = {
publicKeyToAddress,
padZeros,
sha256
}
function sha256 (bytes) {
return crypto.createHash('sha256').update(bytes).digest('hex')
}
function ripemd160 (bytes) {
return crypto.createHash('ripemd160').update(bytes).digest('hex')
}
module.exports = { publicKeyToAddress, padZeros, sha256 }

View File

@ -1,6 +1,7 @@
const Redis = require('ioredis')
const logger = require('./logger')
logger.info('Connecting to redis')
const redis = new Redis({
@ -14,7 +15,7 @@ redis.on('connect', () => {
logger.info('Connected to redis')
})
redis.on('error', e => {
redis.on('error', (e) => {
logger.warn('Redis error %o', e)
})

24
src/oracle/shared/wait.js Normal file
View File

@ -0,0 +1,24 @@
const logger = require('./logger')
async function delay(ms) {
await new Promise((res) => setTimeout(res, ms))
}
async function retry(getPromise, n = -1, sleep = 3000) {
while (n) {
try {
return await getPromise()
} catch (e) {
logger.debug(`Promise failed, retrying, ${n - 1} attempts left`)
await delay(sleep)
// eslint-disable-next-line no-param-reassign
n -= 1
}
}
return null
}
module.exports = {
delay,
retry
}

View File

@ -9,7 +9,7 @@ COPY ./tss-keygen/package.json /tss/
RUN npm install
COPY ./tss-keygen/keygen-entrypoint.sh ./tss-keygen/keygen.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js /tss/
COPY ./tss-keygen/keygen-entrypoint.sh ./tss-keygen/keygen.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js ./shared/wait.js /tss/
COPY --from=tss /tss/target/release/gg18_keygen_client /tss/

View File

@ -1,14 +1,156 @@
const exec = require('child_process')
const fs = require('fs')
const express = require('express')
const axios = require('axios')
const logger = require('./logger')
const { connectRabbit, assertQueue } = require('./amqp')
const { publicKeyToAddress } = require('./crypto')
const { delay } = require('./wait')
const { RABBITMQ_URL, PROXY_URL } = process.env
const KEYGEN_ATTEMPT_TIMEOUT = parseInt(process.env.KEYGEN_ATTEMPT_TIMEOUT, 10)
const KEYGEN_EPOCH_CHECK_INTERVAL = parseInt(process.env.KEYGEN_EPOCH_CHECK_INTERVAL, 10)
const KEYGEN_OK = 0
const KEYGEN_EPOCH_INTERRUPT = 1
const KEYGEN_FAILED = 2
const app = express()
const proxyClient = axios.create({ baseURL: PROXY_URL })
let channel
let currentKeygenEpoch = null
let ready = false
async function confirmKeygen({ x, y }, epoch) {
await proxyClient.post('/confirmKeygen', {
x,
y,
epoch
})
}
function writeParams(parties, threshold) {
logger.debug('Writing params')
fs.writeFileSync('./params', JSON.stringify({
parties: parties.toString(),
threshold: (threshold - 1).toString()
}))
}
function killKeygen() {
exec.execSync('pkill gg18_keygen || true')
}
function restart(req, res) {
logger.info('Manual cancelling current keygen attempt')
killKeygen()
res.send('Done')
}
function keygen(keysFile, epoch) {
let restartTimeoutId
let epochDaemonIntervalId
let epochInterrupt
return new Promise((resolve) => {
const cmd = exec.execFile('./keygen-entrypoint.sh', [PROXY_URL, keysFile], (error) => {
logger.trace('Keygen entrypoint exited, %o', error)
clearTimeout(restartTimeoutId)
clearInterval(epochDaemonIntervalId)
currentKeygenEpoch = null
if (fs.existsSync(keysFile)) {
logger.info(`Finished keygen for epoch ${epoch}`)
resolve(KEYGEN_OK)
} else {
logger.warn(`Keygen for epoch ${epoch} failed, will start new attempt`)
resolve(epochInterrupt ? KEYGEN_EPOCH_INTERRUPT : KEYGEN_FAILED)
}
})
cmd.stdout.on('data', (data) => {
const str = data.toString()
if (str.includes('Got all party signups')) {
restartTimeoutId = setTimeout(killKeygen, KEYGEN_ATTEMPT_TIMEOUT)
}
logger.debug(str)
})
cmd.stderr.on('data', (data) => logger.debug(data.toString()))
// Kill keygen if keygen for current epoch is already confirmed
epochDaemonIntervalId = setInterval(async () => {
logger.info(`Checking if bridge has confirmations keygen for epoch ${epoch}`)
const { bridgeEpoch, bridgeStatus } = (await proxyClient.get('/status')).data
logger.trace(`Current bridge epoch: ${bridgeEpoch}, current bridge status: ${bridgeStatus}`)
if (bridgeEpoch > epoch || bridgeStatus > 3) {
logger.info(`Bridge has already confirmed keygen for epoch ${epoch}`)
epochInterrupt = true
// Additional delay, maybe keygen will eventually finish
await delay(5000)
killKeygen()
}
}, KEYGEN_EPOCH_CHECK_INTERVAL)
})
}
async function keygenConsumer(msg) {
const { epoch, parties, threshold } = JSON.parse(msg.content)
logger.info(`Consumed new epoch event, starting keygen for epoch ${epoch}`)
const keysFile = `/keys/keys${epoch}.store`
logger.info('Running ./keygen-entrypoint.sh')
currentKeygenEpoch = epoch
writeParams(parties, threshold)
while (true) {
const keygenResult = await keygen(keysFile, epoch)
if (keygenResult === KEYGEN_OK) {
const publicKey = JSON.parse(fs.readFileSync(keysFile))[5]
logger.warn(`Generated multisig account in binance chain: ${publicKeyToAddress(publicKey)}`)
logger.info('Sending keys confirmation')
await confirmKeygen(publicKey, epoch)
break
} else if (keygenResult === KEYGEN_EPOCH_INTERRUPT) {
logger.warn('Keygen was interrupted by epoch daemon')
break
}
await delay(1000)
}
logger.info('Acking message')
channel.ack(msg)
}
async function main() {
channel = await connectRabbit(RABBITMQ_URL)
logger.info('Connecting to epoch events queue')
const keygenQueue = await assertQueue(channel, 'keygenQueue')
const cancelKeygenQueue = await assertQueue(channel, 'cancelKeygenQueue')
while (!ready) {
await delay(1000)
}
channel.prefetch(1)
keygenQueue.consume(keygenConsumer)
cancelKeygenQueue.consume(async (msg) => {
const { epoch } = JSON.parse(msg.content)
logger.info(`Consumed new cancel event for epoch ${epoch} keygen`)
if (currentKeygenEpoch === epoch) {
logger.info('Cancelling current keygen')
killKeygen()
}
channel.ack(msg)
})
}
app.get('/restart', restart)
app.get('/start', (req, res) => {
logger.info('Ready to start')
ready = true
@ -16,64 +158,4 @@ app.get('/start', (req, res) => {
})
app.listen(8001, () => logger.debug('Listening on 8001'))
let currentKeygenEpoch = null
let ready = false
async function main () {
logger.info('Connecting to RabbitMQ server')
const channel = await connectRabbit(RABBITMQ_URL)
logger.info('Connecting to epoch events queue')
const keygenQueue = await assertQueue(channel, 'keygenQueue')
const cancelKeygenQueue = await assertQueue(channel, 'cancelKeygenQueue')
while (!ready) {
await new Promise(res => setTimeout(res, 1000))
}
channel.prefetch(1)
keygenQueue.consume(msg => {
const { epoch, parties, threshold } = JSON.parse(msg.content)
logger.info(`Consumed new epoch event, starting keygen for epoch ${epoch}`)
const keysFile = `/keys/keys${epoch}.store`
logger.info('Running ./keygen-entrypoint.sh')
currentKeygenEpoch = epoch
logger.debug('Writing params')
fs.writeFileSync('./params', JSON.stringify({ parties: parties.toString(), threshold: threshold.toString() }))
const cmd = exec.execFile('./keygen-entrypoint.sh', [ PROXY_URL, keysFile ], async () => {
currentKeygenEpoch = null
if (fs.existsSync(keysFile)) {
logger.info(`Finished keygen for epoch ${epoch}`)
const publicKey = JSON.parse(fs.readFileSync(keysFile))[5]
logger.warn(`Generated multisig account in binance chain: ${publicKeyToAddress(publicKey)}`)
logger.info('Sending keys confirmation')
await confirmKeygen(keysFile)
} else {
logger.warn(`Keygen for epoch ${epoch} failed`)
}
logger.debug('Ack for keygen message')
channel.ack(msg)
})
cmd.stdout.on('data', data => logger.debug(data.toString()))
cmd.stderr.on('data', data => logger.debug(data.toString()))
})
cancelKeygenQueue.consume(async msg => {
const { epoch } = JSON.parse(msg.content)
logger.info(`Consumed new cancel event for epoch ${epoch} keygen`)
if (currentKeygenEpoch === epoch) {
logger.info('Cancelling current keygen')
exec.execSync('pkill gg18_keygen || true')
}
channel.ack(msg)
})
}
main()
async function confirmKeygen (keysFile) {
exec.execSync(`curl -X POST -H "Content-Type: application/json" -d @"${keysFile}" "${PROXY_URL}/confirmKeygen"`, { stdio: 'pipe' })
}

View File

@ -3,9 +3,13 @@
"version": "0.0.1",
"dependencies": {
"amqplib": "0.5.3",
"axios": "0.19.0",
"bech32": "1.1.3",
"pino": "5.13.4",
"pino-pretty": "3.2.1",
"express": "4.17.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -10,7 +10,7 @@ COPY ./tss-sign/package.json /tss/
RUN npm install --no-optional
COPY ./tss-sign/sign-entrypoint.sh ./tss-sign/signer.js ./tss-sign/tx.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js /tss/
COPY ./tss-sign/sign-entrypoint.sh ./tss-sign/signer.js ./tss-sign/tx.js ./shared/logger.js ./shared/amqp.js ./shared/crypto.js ./shared/wait.js /tss/
COPY --from=tss /tss/target/release/gg18_sign_client /tss/

View File

@ -9,5 +9,8 @@
"express": "4.17.1",
"pino": "5.13.4",
"pino-pretty": "3.2.1"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -1,145 +1,41 @@
const exec = require('child_process')
const fs = require('fs')
const BN = require('bignumber.js')
const axios = require('axios')
const express = require('express')
const logger = require('./logger')
const { connectRabbit, assertQueue } = require('./amqp')
const { publicKeyToAddress, sha256 } = require('./crypto')
const { delay, retry } = require('./wait')
const Transaction = require('./tx')
const app = express()
app.get('/restart/:attempt', restart)
app.get('/start', (req, res) => {
logger.info('Ready to start')
ready = true
res.send()
})
app.listen(8001, () => logger.debug('Listening on 8001'))
const { RABBITMQ_URL, FOREIGN_URL, PROXY_URL, FOREIGN_ASSET } = process.env
const Transaction = require('./tx')
const axios = require('axios')
const {
RABBITMQ_URL, FOREIGN_URL, PROXY_URL, FOREIGN_ASSET
} = process.env
const SIGN_ATTEMPT_TIMEOUT = parseInt(process.env.SIGN_ATTEMPT_TIMEOUT, 10)
const SIGN_NONCE_CHECK_INTERVAL = parseInt(process.env.SIGN_NONCE_CHECK_INTERVAL, 10)
const SEND_TIMEOUT = parseInt(process.env.SEND_TIMEOUT, 10)
const httpClient = axios.create({ baseURL: FOREIGN_URL })
const proxyClient = axios.create({ baseURL: PROXY_URL })
const SIGN_OK = 0
const SIGN_NONCE_INTERRUPT = 1
const SIGN_FAILED = 2
let attempt
let nextAttempt = null
let cancelled
let ready = false
let exchangeQueue
let channel
async function main () {
logger.info('Connecting to RabbitMQ server')
channel = await connectRabbit(RABBITMQ_URL)
logger.info('Connecting to signature events queue')
exchangeQueue = await assertQueue(channel, 'exchangeQueue')
const signQueue = await assertQueue(channel, 'signQueue')
while (!ready) {
await new Promise(res => setTimeout(res, 1000))
}
channel.prefetch(1)
signQueue.consume(async msg => {
const data = JSON.parse(msg.content)
logger.info('Consumed sign event: %o', data)
const { nonce, epoch, newEpoch, parties, threshold } = data
const keysFile = `/keys/keys${epoch}.store`
const { address: from, publicKey } = getAccountFromFile(keysFile)
if (from === '') {
logger.info('No keys found, acking message')
channel.ack(msg)
return
}
const account = await getAccount(from)
logger.debug('Writing params')
fs.writeFileSync('./params', JSON.stringify({ parties: parties.toString(), threshold: threshold.toString() }))
attempt = 1
if (!newEpoch) {
const exchanges = await getExchangeMessages(nonce)
const exchangesData = exchanges.map(msg => JSON.parse(msg.content))
if (exchanges.length > 0 && account.sequence <= nonce) {
const recipients = exchangesData.map(({ value, recipient }) => ({ to: recipient, tokens: value }))
while (true) {
logger.info(`Building corresponding transfer transaction, nonce ${nonce}`)
const tx = new Transaction({
from,
accountNumber: account.account_number,
sequence: nonce,
recipients,
asset: FOREIGN_ASSET,
memo: `Attempt ${attempt}`
})
const hash = sha256(tx.getSignBytes())
logger.info(`Starting signature generation for transaction hash ${hash}`)
const done = await sign(keysFile, hash, tx, publicKey) && await waitForAccountNonce(from, nonce + 1)
if (done) {
exchanges.forEach(msg => channel.ack(msg))
break
}
attempt = nextAttempt ? nextAttempt : attempt + 1
logger.warn(`Sign failed, starting next attempt ${attempt}`)
nextAttempt = null
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
} else if (account.sequence <= nonce) {
const newKeysFile = `/keys/keys${newEpoch}.store`
const { address: to } = getAccountFromFile(newKeysFile)
while (to !== '') {
logger.info(`Building corresponding transaction for transferring all funds, nonce ${nonce}, recipient ${to}`)
const tx = new Transaction({
from,
accountNumber: account.account_number,
sequence: nonce,
recipients: [ {
to,
tokens: account.balances.find(x => x.symbol === FOREIGN_ASSET).free,
bnbs: new BN(account.balances.find(x => x.symbol === 'BNB').free).minus(new BN(60000).div(10 ** 8)),
} ],
asset: FOREIGN_ASSET,
memo: `Attempt ${attempt}`
})
const hash = sha256(tx.getSignBytes())
logger.info(`Starting signature generation for transaction hash ${hash}`)
const done = await sign(keysFile, hash, tx, publicKey) && await waitForAccountNonce(from, nonce + 1)
if (done) {
await confirmFundsTransfer()
break
}
attempt = nextAttempt ? nextAttempt : attempt + 1
logger.warn(`Sign failed, starting next attempt ${attempt}`)
nextAttempt = null
await new Promise(resolve => setTimeout(resolve, 1000))
}
} else {
logger.debug('Tx has been already sent')
}
logger.info('Acking message')
channel.ack(msg)
})
}
main()
async function getExchangeMessages (nonce) {
async function getExchangeMessages(nonce) {
logger.debug('Getting exchange messages')
const messages = []
do {
while (true) {
const msg = await exchangeQueue.get()
if (msg === false) {
break
@ -151,51 +47,35 @@ async function getExchangeMessages (nonce) {
break
}
messages.push(msg)
} while (true)
}
logger.debug(`Found ${messages.length} messages`)
return messages
}
function sign (keysFile, hash, tx, publicKey) {
return new Promise(resolve => {
const cmd = exec.execFile('./sign-entrypoint.sh', [ PROXY_URL, keysFile, hash ], async (error) => {
if (fs.existsSync('signature')) {
logger.info('Finished signature generation')
const signature = JSON.parse(fs.readFileSync('signature'))
logger.debug('%o', signature)
function killSigner() {
exec.execSync('pkill gg18_sign || true')
}
logger.info('Building signed transaction')
const signedTx = tx.addSignature(publicKey, { r: signature[1], s: signature[3] })
function restart(req, res) {
logger.info('Manual cancelling current sign attempt')
killSigner()
cancelled = true
res.send('Done')
}
logger.info('Sending transaction')
logger.debug(signedTx)
await sendTx(signedTx)
resolve(true)
} else if (error === null || error.code === 0) {
resolve(true)
} else {
logger.warn('Sign failed')
resolve(false)
}
})
cmd.stdout.on('data', data => logger.debug(data.toString()))
cmd.stderr.on('data', data => logger.debug(data.toString()))
async function confirmFundsTransfer(epoch) {
await proxyClient.post('/confirmFundsTransfer', {
epoch
})
}
function restart (req, res) {
logger.info('Cancelling current sign')
nextAttempt = req.params.attempt
exec.execSync('pkill gg18_sign || true')
cancelled = true
res.send('Cancelled')
async function confirmCloseEpoch(epoch) {
await proxyClient.post('/confirmCloseEpoch', {
epoch
})
}
function confirmFundsTransfer () {
exec.execSync(`curl -X POST -H "Content-Type: application/json" "${PROXY_URL}/confirmFundsTransfer"`, { stdio: 'pipe' })
}
function getAccountFromFile (file) {
function getAccountFromFile(file) {
logger.debug(`Reading ${file}`)
if (!fs.existsSync(file)) {
logger.debug('No keys found, skipping')
@ -204,48 +84,272 @@ function getAccountFromFile (file) {
const publicKey = JSON.parse(fs.readFileSync(file))[5]
return {
address: publicKeyToAddress(publicKey),
publicKey: publicKey
publicKey
}
}
async function waitForAccountNonce (address, nonce) {
async function getAccount(address) {
logger.info(`Getting account ${address} data`)
const response = await retry(() => httpClient.get(`/api/v1/account/${address}`))
return response.data
}
async function getFee() {
logger.info('Getting fees')
const response = await retry(() => httpClient.get('/api/v1/fees'))
const multiTransferFee = response.data.find((fee) => fee.multi_transfer_fee).multi_transfer_fee
return new BN(multiTransferFee * 2).div(10 ** 8)
}
async function waitForAccountNonce(address, nonce) {
cancelled = false
logger.info(`Waiting for account ${address} to have nonce ${nonce}`)
while (!cancelled) {
const sequence = (await getAccount(address)).sequence
if (sequence >= nonce)
const { sequence } = await getAccount(address)
if (sequence >= nonce) {
break
await new Promise(resolve => setTimeout(resolve, 1000))
}
await delay(1000)
logger.debug('Waiting for needed account nonce')
}
logger.info('Account nonce is OK')
return !cancelled
}
function getAccount (address) {
logger.info(`Getting account ${address} data`)
return httpClient
.get(`/api/v1/account/${address}`)
.then(res => res.data)
.catch(() => {
logger.debug('Retrying')
return getAccount(address)
})
async function sendTx(tx) {
while (true) {
try {
return await httpClient.post('/api/v1/broadcast?sync=true', tx, {
headers: {
'Content-Type': 'text/plain'
}
})
} catch (err) {
logger.trace('Error, response data %o', err.response.data)
if (err.response.data.message.includes('Tx already exists in cache')) {
logger.debug('Tx already exists in cache')
return true
}
if (err.response.data.message.includes(' < ')) {
logger.warn('Insufficient funds, waiting for funds')
await delay(60000)
} else {
logger.info('Something failed, restarting: %o', err.response)
await delay(10000)
}
}
}
}
function sendTx (tx) {
return httpClient
.post(`/api/v1/broadcast?sync=true`, tx, {
headers: {
'Content-Type': 'text/plain'
function sign(keysFile, tx, publicKey, signerAddress) {
let restartTimeoutId
let nonceDaemonIntervalId
let nonceInterrupt = false
const hash = sha256(tx.getSignBytes())
logger.info(`Starting signature generation for transaction hash ${hash}`)
return new Promise((resolve) => {
const cmd = exec.execFile('./sign-entrypoint.sh', [PROXY_URL, keysFile, hash], async (error) => {
logger.trace('Sign entrypoint exited, %o', error)
clearInterval(nonceDaemonIntervalId)
clearTimeout(restartTimeoutId)
if (fs.existsSync('signature')) { // if signature was generated
logger.info('Finished signature generation')
const signature = JSON.parse(fs.readFileSync('signature'))
logger.debug('%o', signature)
logger.info('Building signed transaction')
const signedTx = tx.addSignature(publicKey, {
r: signature[1],
s: signature[3]
})
logger.info('Sending transaction')
logger.debug(signedTx)
await sendTx(signedTx)
// if nonce does not update in some time, cancel process, consider sign as failed
const sendTimeoutId = setTimeout(() => {
cancelled = true
}, SEND_TIMEOUT)
const waitResponse = await waitForAccountNonce(signerAddress, tx.tx.sequence + 1)
clearTimeout(sendTimeoutId)
resolve(waitResponse ? SIGN_OK : SIGN_FAILED)
} else if (error === null || error.code === 0) { // if was already enough parties
const signTimeoutId = setTimeout(() => {
cancelled = true
}, SIGN_ATTEMPT_TIMEOUT)
const waitResponse = await waitForAccountNonce(signerAddress, tx.tx.sequence + 1)
clearTimeout(signTimeoutId)
resolve(waitResponse ? SIGN_OK : SIGN_FAILED)
} else if (error.code === 143) { // if process was killed
logger.warn('Sign process was killed')
resolve(nonceInterrupt ? SIGN_NONCE_INTERRUPT : SIGN_FAILED)
} else if (error.code !== null && error.code !== 0) { // if process has failed
logger.warn('Sign process has failed')
resolve(SIGN_FAILED)
} else {
logger.warn('Unknown error state %o', error)
resolve(SIGN_FAILED)
}
})
.catch(err => {
if (err.response.data.message.includes('Tx already exists in cache'))
logger.debug('Tx already exists in cache')
else {
logger.info('Something failed, restarting: %o', err.response)
return new Promise(resolve => setTimeout(() => resolve(sendTx(tx)), 1000))
cmd.stdout.on('data', (data) => {
const str = data.toString()
if (str.includes('Got all party ids')) {
restartTimeoutId = setTimeout(killSigner, SIGN_ATTEMPT_TIMEOUT)
}
logger.debug(str)
})
cmd.stderr.on('data', (data) => logger.debug(data.toString()))
// Kill signer if current nonce is already processed at some time
nonceDaemonIntervalId = setInterval(async () => {
logger.info(`Checking if account ${signerAddress} has nonce ${tx.tx.sequence + 1}`)
const { sequence } = await getAccount(signerAddress)
if (sequence > tx.tx.sequence) {
logger.info('Account already has needed nonce, cancelling current sign process')
nonceInterrupt = true
// Additional delay, maybe signer will eventually finish
await delay(5000)
killSigner()
}
}, SIGN_NONCE_CHECK_INTERVAL)
})
}
function getAccountBalance(account, asset) {
return account.balances.find((token) => token.symbol === asset).free
}
async function buildTx(from, account, data) {
const { closeEpoch, newEpoch, nonce } = data
const txOptions = {
from,
accountNumber: account.account_number,
sequence: nonce,
asset: FOREIGN_ASSET
}
let exchanges
if (closeEpoch) {
logger.info(`Building corresponding account flags transaction, nonce ${nonce}`)
txOptions.flags = 0x01
} else if (newEpoch) {
const newKeysFile = `/keys/keys${newEpoch}.store`
const to = getAccountFromFile(newKeysFile).address
if (to === '') {
return { tx: null }
}
logger.info(`Building corresponding transaction for transferring all funds, nonce ${nonce}, recipient ${to}`)
const fee = await getFee()
txOptions.recipients = [{
to,
tokens: getAccountBalance(account, FOREIGN_ASSET),
bnbs: new BN(getAccountBalance(account, 'BNB')).minus(fee)
}]
} else {
logger.info(`Building corresponding transfer transaction, nonce ${nonce}`)
exchanges = await getExchangeMessages(nonce)
const exchangesData = exchanges.map((exchangeMsg) => JSON.parse(exchangeMsg.content))
txOptions.recipients = exchangesData.map(({ value, recipient }) => ({
to: recipient,
tokens: value
}))
}
const tx = new Transaction(txOptions)
return {
tx,
exchanges
}
}
function writeParams(parties, threshold) {
logger.debug('Writing params')
fs.writeFileSync('./params', JSON.stringify({
parties: parties.toString(),
threshold: (threshold - 1).toString()
}))
}
async function consumer(msg) {
const data = JSON.parse(msg.content)
logger.info('Consumed sign event: %o', data)
const {
nonce, epoch, newEpoch, parties, threshold, closeEpoch
} = data
const keysFile = `/keys/keys${epoch || closeEpoch}.store`
const { address: from, publicKey } = getAccountFromFile(keysFile)
if (from === '') {
logger.info('No keys found, acking message')
channel.ack(msg)
return
}
const account = await getAccount(from)
if (nonce > account.sequence) {
logger.debug('Tx has been already sent')
logger.info('Acking message (skipped nonce)')
channel.ack(msg)
return
}
writeParams(parties, threshold)
const { tx, exchanges } = await buildTx(from, account, data)
while (tx !== null) {
const signResult = await sign(keysFile, tx, publicKey, from)
if (signResult === SIGN_OK || signResult === SIGN_NONCE_INTERRUPT) {
if (closeEpoch) {
await confirmCloseEpoch(closeEpoch)
} else if (newEpoch) {
await confirmFundsTransfer(epoch)
} else {
// eslint-disable-next-line no-loop-func
exchanges.forEach((exchangeMsg) => channel.ack(exchangeMsg))
}
break
}
logger.warn('Sign failed, starting next attempt')
await delay(1000)
}
logger.info('Acking message')
channel.ack(msg)
}
async function main() {
channel = await connectRabbit(RABBITMQ_URL)
logger.info('Connecting to signature events queue')
exchangeQueue = await assertQueue(channel, 'exchangeQueue')
const signQueue = await assertQueue(channel, 'signQueue')
while (!ready) {
await delay(1000)
}
channel.prefetch(1)
signQueue.consume(consumer)
}
app.get('/restart', restart)
app.get('/start', (req, res) => {
logger.info('Ready to start')
ready = true
res.send()
})
app.listen(8001, () => logger.debug('Listening on 8001'))
main()

View File

@ -10,60 +10,86 @@ const { FOREIGN_CHAIN_ID } = process.env
const BNB_ASSET = 'BNB'
class Transaction {
constructor (options) {
const { from, accountNumber, sequence, recipients, asset, memo = '' } = options
constructor(options) {
const {
from, accountNumber, sequence, recipients, asset, memo = '', flags
} = options
const totalTokens = recipients.reduce((sum, { tokens }) => sum.plus(new BN(tokens || 0)), new BN(0))
const totalBnbs = recipients.reduce((sum, { bnbs }) => sum.plus(new BN(bnbs || 0)), new BN(0))
const senderCoins = []
if (asset && totalTokens.isGreaterThan(0)) {
senderCoins.push({
denom: asset,
amount: totalTokens.multipliedBy(10 ** 8).toNumber(),
})
}
if (totalBnbs.isGreaterThan(0)) {
senderCoins.push({
denom: BNB_ASSET,
amount: totalBnbs.multipliedBy(10 ** 8).toNumber(),
})
}
senderCoins.sort((a, b) => a.denom > b.denom)
let msg
if (flags) {
msg = {
from: crypto.decodeAddress(from),
flags,
msgType: 'NewOrderMsg' // until 'SetAccountFlagsMsg' is not available
}
const inputs = [ {
address: from,
coins: senderCoins
} ]
const outputs = recipients.map(({ to, tokens, bnbs }) => {
const receiverCoins = []
if (asset && tokens) {
receiverCoins.push({
this.signMsg = {
flags,
from
}
} else {
const totalTokens = recipients.reduce(
(sum, { tokens }) => sum.plus(new BN(tokens || 0)), new BN(0)
)
const totalBnbs = recipients.reduce(
(sum, { bnbs }) => sum.plus(new BN(bnbs || 0)), new BN(0)
)
const senderCoins = []
if (asset && totalTokens.isGreaterThan(0)) {
senderCoins.push({
denom: asset,
amount: new BN(tokens).multipliedBy(10 ** 8).toNumber(),
amount: totalTokens.multipliedBy(10 ** 8).toNumber()
})
}
if (bnbs) {
receiverCoins.push({
if (totalBnbs.isGreaterThan(0)) {
senderCoins.push({
denom: BNB_ASSET,
amount: new BN(bnbs).multipliedBy(10 ** 8).toNumber(),
amount: totalBnbs.multipliedBy(10 ** 8).toNumber()
})
}
receiverCoins.sort((a, b) => a.denom > b.denom)
return {
address: to,
coins: receiverCoins
senderCoins.sort((a, b) => a.denom > b.denom)
const inputs = [{
address: from,
coins: senderCoins
}]
const outputs = recipients.map(({ to, tokens, bnbs }) => {
const receiverCoins = []
if (asset && tokens) {
receiverCoins.push({
denom: asset,
amount: new BN(tokens).multipliedBy(10 ** 8).toNumber()
})
}
if (bnbs) {
receiverCoins.push({
denom: BNB_ASSET,
amount: new BN(bnbs).multipliedBy(10 ** 8).toNumber()
})
}
receiverCoins.sort((a, b) => a.denom > b.denom)
return {
address: to,
coins: receiverCoins
}
})
msg = {
inputs: inputs.map((x) => ({
...x,
address: crypto.decodeAddress(x.address)
})),
outputs: outputs.map((x) => ({
...x,
address: crypto.decodeAddress(x.address)
})),
msgType: 'MsgSend'
}
})
const msg = {
inputs: inputs.map((x) => ({...x, address: crypto.decodeAddress(x.address)})),
outputs: outputs.map((x) => ({...x, address: crypto.decodeAddress(x.address)})),
msgType: 'MsgSend'
}
this.signMsg = {
inputs,
outputs
this.signMsg = {
inputs,
outputs
}
}
this.tx = new TransactionBnc({
@ -72,30 +98,32 @@ class Transaction {
memo,
msg,
sequence,
type: msg.msgType,
type: msg.msgType
})
}
getSignBytes () {
getSignBytes() {
return this.tx.getSignBytes(this.signMsg)
}
addSignature (publicKey, signature) {
addSignature(publicKey, signature) {
const yLast = parseInt(publicKey.y[publicKey.y.length - 1], 16)
const n = new BN('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141', 16)
const s = new BN(signature.s, 16)
if (s.gt(n.div(2))) {
logger.debug('Normalizing s')
// eslint-disable-next-line no-param-reassign
signature.s = n.minus(s).toString(16)
}
const publicKeyEncoded = Buffer.from('eb5ae98721' + (yLast % 2 ? '03' : '02') + padZeros(publicKey.x, 64), 'hex')
this.tx.signatures = [ {
const publicKeyEncoded = Buffer.from(`eb5ae98721${yLast % 2 ? '03' : '02'}${padZeros(publicKey.x, 64)}`, 'hex')
this.tx.signatures = [{
pub_key: publicKeyEncoded,
signature: Buffer.from(padZeros(signature.r, 64) + padZeros(signature.s, 64), 'hex'),
account_number: this.tx.account_number,
sequence: this.tx.sequence,
} ]
sequence: this.tx.sequence
}]
return this.tx.serialize()
.replace(/ce6dc043/, 'bea6e301') // until 'SetAccountFlagsMsg' is not available
}
}

View File

@ -0,0 +1,8 @@
{
"extends": [
"../../.eslintrc"
],
"rules": {
"no-console": 0
}
}

View File

@ -1 +0,0 @@
FOREIGN_PRIVATE_KEY=0000000000000000000000000000000000000000000000000000000000000000

View File

@ -0,0 +1,4 @@
.dockerignore
Dockerfile
run.sh
.env.*

View File

@ -1,2 +1,2 @@
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_ASSET=KFT-94F
FOREIGN_URL=http://http-api:8000
FOREIGN_ASSET=DEV-9BA

View File

@ -3,5 +3,8 @@
"version": "0.0.1",
"dependencies": {
"axios": "0.19.0"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -9,4 +9,8 @@ TARGET_NETWORK=${TARGET_NETWORK:=development}
docker build -t binance-balance . > /dev/null
docker run --rm --env-file ".env.$TARGET_NETWORK" binance-balance $@
if [[ "$TARGET_NETWORK" == "development" ]]; then
docker run --rm --network binance_net --env-file ".env.$TARGET_NETWORK" binance-balance $@
else
docker run --rm --env-file ".env.$TARGET_NETWORK" binance-balance $@
fi

View File

@ -5,10 +5,14 @@ const { FOREIGN_URL, FOREIGN_ASSET } = process.env
const address = process.argv[2]
const httpClient = axios.create({ baseURL: FOREIGN_URL })
httpClient
.get(`/api/v1/account/${address}`)
.then(res => {
console.log(`BNB: ${parseFloat(res.data.balances.find(x => x.symbol === 'BNB').free)}`)
console.log(`${FOREIGN_ASSET}: ${parseFloat(res.data.balances.find(x => x.symbol === FOREIGN_ASSET).free)}`)
})
.catch(console.log)
function main() {
httpClient
.get(`/api/v1/account/${address}`)
.then((res) => {
console.log(`BNB: ${parseFloat(res.data.balances.find((token) => token.symbol === 'BNB').free)}`)
console.log(`${FOREIGN_ASSET}: ${parseFloat(res.data.balances.find((token) => token.symbol === FOREIGN_ASSET).free)}`)
})
.catch(console.log)
}
main()

View File

@ -0,0 +1,4 @@
.dockerignore
Dockerfile
run.sh
.env.*

View File

@ -1,4 +1,4 @@
FOREIGN_URL=https://testnet-dex.binance.org/
FOREIGN_ASSET=KFT-94F
FOREIGN_URL=http://http-api:8000
FOREIGN_ASSET=DEV-9BA
#FOREIGN_PRIVATE_KEY is taken from src/test-services/.keys.development
FOREIGN_PRIVATE_KEY=dd5ec5a7abe9d1fff21170ae591085f000fc6fd9ca0107fe047593f44e328e40

View File

@ -3,5 +3,8 @@
"version": "0.0.1",
"dependencies": {
"@binance-chain/javascript-sdk": "2.14.4"
},
"engines": {
"node": ">=10.6.0"
}
}

View File

@ -9,4 +9,8 @@ TARGET_NETWORK=${TARGET_NETWORK:=development}
docker build -t binance-send . > /dev/null
docker run --rm --env-file ".env.$TARGET_NETWORK" --env-file "../.keys.$TARGET_NETWORK" -e "PRIVATE_KEY=$PRIVATE_KEY" binance-send $@
if [[ "$TARGET_NETWORK" == "development" ]]; then
docker run --rm --network binance_net --env-file ".env.$TARGET_NETWORK" -e "PRIVATE_KEY=$PRIVATE_KEY" binance-send $@
else
docker run --rm --env-file ".env.$TARGET_NETWORK" --env-file "../.keys.$TARGET_NETWORK" -e "PRIVATE_KEY=$PRIVATE_KEY" binance-send $@
fi

View File

@ -6,7 +6,7 @@ const PRIVATE_KEY = process.env.PRIVATE_KEY || FOREIGN_PRIVATE_KEY
const client = new Bnc(FOREIGN_URL)
async function main () {
async function main() {
client.chooseNetwork('testnet')
await client.setPrivateKey(PRIVATE_KEY)
@ -40,13 +40,14 @@ async function main () {
receipt = await client.multiSend(from, outputs, 'funding')
} else {
console.log(`From ${from} to ${to}, ${tokens} ${FOREIGN_ASSET}'`)
receipt = await client.transfer(from, to, tokens, FOREIGN_ASSET, 'exchange')
receipt = await client.transfer(from, to, tokens, FOREIGN_ASSET, '')
}
if (receipt.status === 200)
if (receipt.status === 200) {
console.log(receipt.result[0].hash)
else
} else {
console.log(receipt)
}
}
main()

View File

@ -0,0 +1,4 @@
.dockerignore
Dockerfile
run.sh
.env.*

View File

@ -1,2 +1,2 @@
HOME_RPC_URL=https://kovan.infura.io/v3/5d7bd94c50ed43fab1cb8e74f58678b0
HOME_TOKEN_ADDRESS=0x57d2533B640cfb58f8f1F69C14c089968Da9fdFc
HOME_TOKEN_ADDRESS=0x7c7daEf752C80A6d229D4a642B9336ceCd7e26b0

View File

@ -6,6 +6,6 @@ COPY package.json /test/
RUN npm install
COPY testGetEthereumBalance.js IERC20.json /test/
COPY testGetEthereumBalance.js /test/
ENTRYPOINT ["node", "testGetEthereumBalance.js"]

Some files were not shown because too many files have changed in this diff Show More