2019-07-07 12:58:35 -07:00
|
|
|
const axios = require('axios')
|
|
|
|
const BN = require('bignumber.js')
|
|
|
|
const fs = require('fs')
|
2019-07-15 12:41:02 -07:00
|
|
|
const { computeAddress } = require('ethers').utils
|
2019-07-07 12:58:35 -07:00
|
|
|
|
2019-10-06 03:36:29 -07:00
|
|
|
const logger = require('./logger')
|
2019-10-08 10:45:28 -07:00
|
|
|
const redis = require('./db')
|
|
|
|
const { publicKeyToAddress } = require('./crypto')
|
2019-11-03 02:54:34 -08:00
|
|
|
const { delay, retry } = require('./wait')
|
2019-11-07 06:06:12 -08:00
|
|
|
const { connectRabbit, assertQueue } = require('./amqp')
|
2019-10-06 03:36:29 -07:00
|
|
|
|
2019-11-07 06:06:12 -08:00
|
|
|
const {
|
|
|
|
FOREIGN_URL, PROXY_URL, FOREIGN_ASSET, RABBITMQ_URL
|
|
|
|
} = process.env
|
2019-07-07 12:58:35 -07:00
|
|
|
|
2019-11-01 11:43:25 -07:00
|
|
|
const FOREIGN_FETCH_INTERVAL = parseInt(process.env.FOREIGN_FETCH_INTERVAL, 10)
|
|
|
|
const FOREIGN_FETCH_BLOCK_TIME_OFFSET = parseInt(process.env.FOREIGN_FETCH_BLOCK_TIME_OFFSET, 10)
|
2019-11-07 06:06:12 -08:00
|
|
|
const FOREIGN_FETCH_MAX_TIME_INTERVAL = parseInt(process.env.FOREIGN_FETCH_MAX_TIME_INTERVAL, 10)
|
2019-10-31 12:43:34 -07:00
|
|
|
|
2019-07-07 12:58:35 -07:00
|
|
|
const foreignHttpClient = axios.create({ baseURL: FOREIGN_URL })
|
|
|
|
const proxyHttpClient = axios.create({ baseURL: PROXY_URL })
|
|
|
|
|
2019-11-07 06:06:12 -08:00
|
|
|
let channel
|
|
|
|
let epochTimeIntervalsQueue
|
|
|
|
|
|
|
|
function getForeignAddress(epoch) {
|
|
|
|
const keysFile = `/keys/keys${epoch}.store`
|
|
|
|
try {
|
|
|
|
const publicKey = JSON.parse(fs.readFileSync(keysFile))[5]
|
|
|
|
return publicKeyToAddress(publicKey)
|
|
|
|
} catch (e) {
|
2019-11-01 11:43:25 -07:00
|
|
|
return null
|
2019-07-07 12:58:35 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-03 02:54:34 -08:00
|
|
|
async function getTx(hash) {
|
|
|
|
const response = await retry(() => foreignHttpClient.get(
|
|
|
|
`/api/v1/tx/${hash}`,
|
|
|
|
{
|
2019-07-15 12:41:02 -07:00
|
|
|
params: {
|
|
|
|
format: 'json'
|
|
|
|
}
|
2019-11-03 02:54:34 -08:00
|
|
|
}
|
|
|
|
))
|
|
|
|
return response.data.tx.value
|
2019-07-15 12:41:02 -07:00
|
|
|
}
|
|
|
|
|
2019-11-03 02:54:34 -08:00
|
|
|
async function getBlockTime() {
|
|
|
|
const response = await retry(() => foreignHttpClient.get('/api/v1/time'))
|
|
|
|
return Date.parse(response.data.block_time) - FOREIGN_FETCH_BLOCK_TIME_OFFSET
|
2019-10-31 08:46:14 -07:00
|
|
|
}
|
|
|
|
|
2019-11-07 06:06:12 -08:00
|
|
|
async function fetchNewTransactions(address, startTime, endTime) {
|
2019-10-06 03:36:29 -07:00
|
|
|
logger.debug('Fetching new transactions')
|
2019-10-31 08:46:14 -07:00
|
|
|
const params = {
|
|
|
|
address,
|
|
|
|
side: 'RECEIVE',
|
|
|
|
txAsset: FOREIGN_ASSET,
|
|
|
|
txType: 'TRANSFER',
|
|
|
|
startTime,
|
2019-11-01 11:43:25 -07:00
|
|
|
endTime
|
2019-10-31 08:46:14 -07:00
|
|
|
}
|
2019-11-03 02:54:34 -08:00
|
|
|
|
2019-11-07 06:06:12 -08:00
|
|
|
logger.trace('Transactions fetch params %o', params)
|
|
|
|
return (
|
|
|
|
await retry(() => foreignHttpClient.get('/api/v1/transactions', { params }))
|
|
|
|
).data.tx
|
|
|
|
}
|
2019-11-03 02:54:34 -08:00
|
|
|
|
2019-11-07 06:06:12 -08:00
|
|
|
async function fetchTimeIntervalsQueue() {
|
|
|
|
let epoch = null
|
|
|
|
let startTime = null
|
|
|
|
let endTime = null
|
|
|
|
const lastBncBlockTime = await getBlockTime()
|
|
|
|
logger.trace(`Binance last block timestamp ${lastBncBlockTime}`)
|
|
|
|
while (true) {
|
|
|
|
const msg = await epochTimeIntervalsQueue.get()
|
|
|
|
if (msg === false) {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
const data = JSON.parse(msg.content)
|
|
|
|
logger.trace('Consumed time interval event %o', data)
|
|
|
|
if (epoch !== null && epoch !== data.epoch) {
|
|
|
|
logger.warn('Two consequently events have different epochs, should not be like this')
|
|
|
|
channel.nack(msg, false, true)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if (data.startTime) {
|
|
|
|
logger.trace('Set foreign time', data)
|
|
|
|
await redis.set(`foreignTime${data.epoch}`, data.startTime)
|
|
|
|
channel.ack(msg)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if (epoch === null) {
|
|
|
|
epoch = data.epoch
|
|
|
|
startTime = await redis.get(`foreignTime${epoch}`)
|
|
|
|
logger.trace(`Retrieved epoch ${epoch} and start time ${startTime} from redis`)
|
|
|
|
if (startTime === null) {
|
|
|
|
logger.warn(`Empty foreign time for epoch ${epoch}`)
|
|
|
|
}
|
|
|
|
} /*
|
|
|
|
if (data.endTime) {
|
|
|
|
if (data.endTime - startTime < FOREIGN_FETCH_MAX_TIME_INTERVAL
|
|
|
|
&& data.endTime < lastBncBlockTime) {
|
|
|
|
endTime = data.endTime
|
|
|
|
channel.ack(msg)
|
|
|
|
} else {
|
|
|
|
logger.trace('Requeuing current queue message')
|
|
|
|
channel.nack(msg, false, true)
|
|
|
|
}
|
|
|
|
break
|
|
|
|
} */
|
|
|
|
if (data.prolongedTime - startTime < FOREIGN_FETCH_MAX_TIME_INTERVAL
|
|
|
|
&& data.prolongedTime < lastBncBlockTime) {
|
|
|
|
endTime = data.prolongedTime
|
|
|
|
channel.ack(msg)
|
|
|
|
} else {
|
|
|
|
logger.trace('Requeuing current queue message')
|
|
|
|
channel.nack(msg, false, true)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2019-11-03 02:54:34 -08:00
|
|
|
return {
|
2019-11-07 06:06:12 -08:00
|
|
|
epoch,
|
|
|
|
startTime,
|
2019-11-03 02:54:34 -08:00
|
|
|
endTime
|
2019-10-31 08:46:14 -07:00
|
|
|
}
|
2019-07-07 12:58:35 -07:00
|
|
|
}
|
|
|
|
|
2019-11-01 11:43:25 -07:00
|
|
|
async function initialize() {
|
2019-11-07 06:06:12 -08:00
|
|
|
channel = await connectRabbit(RABBITMQ_URL)
|
|
|
|
logger.info('Connecting to epoch time intervals queue')
|
|
|
|
epochTimeIntervalsQueue = await assertQueue(channel, 'epochTimeIntervalsQueue')
|
2019-07-07 12:58:35 -07:00
|
|
|
}
|
|
|
|
|
2019-11-03 02:54:34 -08:00
|
|
|
async function loop() {
|
2019-11-07 06:06:12 -08:00
|
|
|
const { epoch, startTime, endTime } = await fetchTimeIntervalsQueue()
|
|
|
|
|
|
|
|
if (!startTime || !endTime) {
|
|
|
|
logger.debug('Nothing to fetch')
|
|
|
|
await delay(FOREIGN_FETCH_INTERVAL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const address = getForeignAddress(epoch)
|
|
|
|
|
|
|
|
if (!address) {
|
|
|
|
logger.debug('Validator is not included in current epoch')
|
|
|
|
await redis.set(`foreignTime${epoch}`, endTime)
|
|
|
|
await delay(FOREIGN_FETCH_INTERVAL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const transactions = await fetchNewTransactions(address, startTime, endTime)
|
|
|
|
|
|
|
|
if (transactions.length === 0) {
|
2019-11-01 11:43:25 -07:00
|
|
|
logger.debug('Found 0 new transactions')
|
2019-11-07 06:06:12 -08:00
|
|
|
await redis.set(`foreignTime${epoch}`, endTime)
|
2019-11-03 02:54:34 -08:00
|
|
|
await delay(FOREIGN_FETCH_INTERVAL)
|
2019-11-01 11:43:25 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(`Found ${transactions.length} new transactions`)
|
|
|
|
logger.trace('%o', transactions)
|
|
|
|
|
|
|
|
for (let i = transactions.length - 1; i >= 0; i -= 1) {
|
|
|
|
const tx = transactions[i]
|
|
|
|
if (tx.memo !== 'funding') {
|
|
|
|
const publicKeyEncoded = (await getTx(tx.txHash)).signatures[0].pub_key.value
|
2019-11-07 06:06:12 -08:00
|
|
|
await proxyHttpClient.post('/transfer', {
|
|
|
|
to: computeAddress(Buffer.from(publicKeyEncoded, 'base64')),
|
|
|
|
value: new BN(tx.value)
|
|
|
|
.multipliedBy(10 ** 18)
|
|
|
|
.integerValue(),
|
|
|
|
hash: `0x${tx.txHash}`
|
|
|
|
})
|
2019-11-01 11:43:25 -07:00
|
|
|
}
|
2019-07-07 12:58:35 -07:00
|
|
|
}
|
2019-11-07 06:06:12 -08:00
|
|
|
await redis.set(`foreignTime${epoch}`, endTime)
|
2019-11-01 11:43:25 -07:00
|
|
|
}
|
|
|
|
|
2019-11-03 02:54:34 -08:00
|
|
|
async function main() {
|
|
|
|
await initialize()
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
await loop()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
main()
|