2014-05-29 11:52:31 -07:00
var fs = require ( 'fs' ) ;
2014-03-09 19:31:58 -07:00
var redis = require ( 'redis' ) ;
2014-03-11 18:56:19 -07:00
var async = require ( 'async' ) ;
2014-03-09 19:31:58 -07:00
var Stratum = require ( 'stratum-pool' ) ;
2014-04-26 15:24:06 -07:00
var util = require ( 'stratum-pool/lib/util.js' ) ;
2014-03-09 19:31:58 -07:00
2014-03-11 18:56:19 -07:00
2014-03-09 19:31:58 -07:00
module . exports = function ( logger ) {
var poolConfigs = JSON . parse ( process . env . pools ) ;
2014-03-30 16:04:54 -07:00
var enabledPools = [ ] ;
2014-03-09 19:31:58 -07:00
Object . keys ( poolConfigs ) . forEach ( function ( coin ) {
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
if ( poolOptions . paymentProcessing &&
poolOptions . paymentProcessing . enabled )
2014-03-30 16:04:54 -07:00
enabledPools . push ( coin ) ;
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-30 16:04:54 -07:00
async . filter ( enabledPools , function ( coin , callback ) {
SetupForPool ( logger , poolConfigs [ coin ] , function ( setupResults ) {
2016-11-27 11:27:08 -08:00
callback ( null , setupResults ) ;
2014-03-30 16:04:54 -07:00
} ) ;
2016-11-27 11:27:08 -08:00
} , function ( err , results ) {
results . forEach ( function ( coin ) {
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-30 16:04:54 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
logger . debug ( logSystem , logComponent , 'Payment processing setup to run every '
+ processingConfig . paymentInterval + ' second(s) with daemon ('
+ processingConfig . daemon . user + '@' + processingConfig . daemon . host + ':' + processingConfig . daemon . port
2014-05-02 14:59:46 -07:00
+ ') and redis (' + poolOptions . redis . host + ':' + poolOptions . redis . port + ')' ) ;
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
} ) ;
} ) ;
} ;
function SetupForPool ( logger , poolOptions , setupFinished ) {
2014-03-09 19:31:58 -07:00
2014-03-26 14:08:34 -07:00
2014-03-30 16:04:54 -07:00
var coin = poolOptions . coin . name ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-09 19:31:58 -07:00
2014-03-22 23:16:06 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2017-03-08 20:06:39 -08:00
var opidCount = 0 ;
2014-03-22 23:16:06 -07:00
2014-05-06 19:30:31 -07:00
var daemon = new Stratum . daemon . interface ( [ processingConfig . daemon ] , function ( severity , message ) {
logger [ severity ] ( logSystem , logComponent , message ) ;
} ) ;
2014-05-02 14:59:46 -07:00
var redisClient = redis . createClient ( poolOptions . redis . port , poolOptions . redis . host ) ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var magnitude ;
var minPaymentSatoshis ;
var coinPrecision ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var paymentInterval ;
2014-03-30 16:04:54 -07:00
2016-11-27 11:27:08 -08:00
function validateAddress ( callback ) {
daemon . cmd ( 'validateaddress' , [ poolOptions . address ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-29 19:47:57 -08:00
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateTAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'validateaddress' , [ poolOptions . tAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateZAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'z_validateaddress' , [ poolOptions . zAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-27 11:27:08 -08:00
} , true ) ;
}
function getBalance ( callback ) {
daemon . cmd ( 'getbalance' , [ ] , function ( result ) {
if ( result . error ) {
return callback ( true ) ;
}
try {
var d = result . data . split ( 'result":' ) [ 1 ] . split ( ',' ) [ 0 ] . split ( '.' ) [ 1 ] ;
magnitude = parseInt ( '10' + new Array ( d . length ) . join ( '0' ) ) ;
minPaymentSatoshis = parseInt ( processingConfig . minimumPayment * magnitude ) ;
coinPrecision = magnitude . toString ( ) . length - 1 ;
}
catch ( e ) {
logger . error ( logSystem , logComponent , 'Error detecting number of satoshis in a coin, cannot do payment processing. Tried parsing: ' + result . data ) ;
return callback ( true ) ;
}
callback ( ) ;
} , true , true ) ;
}
function asyncComplete ( err ) {
2014-03-30 16:04:54 -07:00
if ( err ) {
setupFinished ( false ) ;
return ;
}
2017-03-08 20:06:39 -08:00
if ( paymentInterval ) {
clearInterval ( paymentInterval ) ;
}
2014-05-02 14:59:46 -07:00
paymentInterval = setInterval ( function ( ) {
2014-03-30 16:04:54 -07:00
try {
processPayments ( ) ;
} catch ( e ) {
throw e ;
2014-03-09 19:31:58 -07:00
}
2014-03-30 16:04:54 -07:00
} , processingConfig . paymentInterval * 1000 ) ;
setTimeout ( processPayments , 100 ) ;
setupFinished ( true ) ;
2016-11-27 11:27:08 -08:00
}
2014-03-30 16:04:54 -07:00
2016-12-18 18:28:59 -08:00
async . parallel ( [ validateAddress , validateTAddress , validateZAddress , getBalance ] , asyncComplete ) ;
2014-03-09 19:31:58 -07:00
2016-11-27 14:54:08 -08:00
//get t_address coinbalance
2016-12-27 22:08:31 -08:00
function listUnspent ( addr , notAddr , minConf , displayBool , callback ) {
if ( addr !== null ) {
2017-03-08 20:06:39 -08:00
var args = [ minConf , 99999999 , [ addr ] ] ;
2016-12-27 22:08:31 -08:00
} else {
2017-03-08 20:06:39 -08:00
addr = 'Payout wallet' ;
var args = [ minConf , 99999999 ] ;
2016-12-27 22:08:31 -08:00
}
daemon . cmd ( 'listunspent' , args , function ( result ) {
2016-11-27 14:54:08 -08:00
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2016-12-29 01:47:08 -08:00
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to get t-addr [' + addr + '] balance with RPC listunspent.'
2016-12-29 02:33:44 -08:00
+ JSON . stringify ( result . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
var tBalance = 0 ;
2017-03-08 20:06:39 -08:00
if ( result [ 0 ] . response != null && result [ 0 ] . response . length > 0 ) {
for ( var i = 0 , len = result [ 0 ] . response . length ; i < len ; i ++ ) {
if ( result [ 0 ] . response [ i ] . address !== notAddr ) {
tBalance = tBalance + ( result [ 0 ] . response [ i ] . amount * magnitude ) ;
}
2016-12-27 22:08:31 -08:00
}
2016-11-27 14:54:08 -08:00
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-03-08 20:06:39 -08:00
logger . special ( logSystem , logComponent , addr + ' balance of ' + ( tBalance / magnitude ) . toFixed ( 8 ) ) ;
2016-12-27 08:42:48 -08:00
}
callback ( null , tBalance . toFixed ( 8 ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
// get z_address coinbalance
2016-12-27 08:42:48 -08:00
function listUnspentZ ( addr , minConf , displayBool , callback ) {
daemon . cmd ( 'z_getbalance' , [ addr , minConf ] , function ( result ) {
2016-11-27 14:54:08 -08:00
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2016-12-27 08:42:48 -08:00
if ( result [ 0 ] . error ) {
logger . error ( logSystem , logComponent , 'Error trying to get coin balance with RPC z_getbalance.' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
var zBalance = 0 ;
if ( result [ 0 ] . response != null ) {
zBalance = result [ 0 ] . response ;
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-03-08 20:06:39 -08:00
logger . special ( logSystem , logComponent , addr . substring ( 0 , 14 ) + '...' + addr . substring ( addr . length - 14 ) + ' balance: ' + ( zBalance ) . toFixed ( 8 ) ) ;
2016-12-27 08:42:48 -08:00
}
callback ( null , ( zBalance * magnitude ) . toFixed ( 8 ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
//send t_address balance to z_address
function sendTToZ ( callback , tBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-01-04 17:44:16 -08:00
if ( ( tBalance - 10000 ) < 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendTToZ is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
var amount = balanceRound ( ( tBalance - 10000 ) / magnitude ) ;
var params = [ poolOptions . address , [ { 'address' : poolOptions . zAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to shield mined balance ' + JSON . stringify ( result . error ) ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
opidCount ++ ;
logger . special ( logSystem , logComponent , 'Shield mined balance ' + amount ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-03-08 20:06:39 -08:00
2016-11-27 14:54:08 -08:00
// send z_address balance to t_address
function sendZToT ( callback , zBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-01-04 17:44:16 -08:00
if ( ( zBalance - 10000 ) < 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendZToT is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
var amount = balanceRound ( ( zBalance - 10000 ) / magnitude ) ;
// no more than 100 ZEC at a time
if ( amount > 100.0 )
amount = 100.0 ;
var params = [ poolOptions . zAddress , [ { 'address' : poolOptions . tAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to send z_address coin balance to payout t_address.'
2016-11-27 14:54:08 -08:00
+ JSON . stringify ( result . error ) ) ;
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
opidCount ++ ;
logger . special ( logSystem , logComponent , 'Unshield funds for payout ' + amount ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-03-08 20:06:39 -08:00
function cacheZCashNetworkStats ( ) {
var params = null ;
daemon . cmd ( 'getmininginfo' , params ,
function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error getting stats from zcashd'
+ JSON . stringify ( result . error ) ) ;
} else {
logger . special ( logSystem , logComponent , "Updating " + logComponent + " network stats..." ) ;
var coin = logComponent ;
var finalRedisCommands = [ ] ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkBlocks' , result [ 0 ] . response . blocks ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkDiff' , result [ 0 ] . response . difficulty ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkSols' , result [ 0 ] . response . networksolps ] ) ;
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
if ( error ) {
logger . error ( logSystem , logComponent , 'Could not update zcash stats to redis ' + JSON . stringify ( error ) ) ;
return ;
}
} ) ;
}
daemon . cmd ( 'getinfo' , params ,
function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error getting stats from zcashd'
+ JSON . stringify ( result . error ) ) ;
} else {
var coin = logComponent ;
var finalRedisCommands = [ ] ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkConnections' , result [ 0 ] . response . connections ] ) ;
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
if ( error ) {
logger . error ( logSystem , logComponent , 'Could not update zcash stats to redis ' + JSON . stringify ( error ) ) ;
return ;
}
} ) ;
}
}
) ;
}
) ;
}
2016-11-27 14:54:08 -08:00
// run coinbase coin transfers every x minutes
2017-03-08 20:06:39 -08:00
var intervalState = 0 ; // do not send ZtoT and TtoZ and same time, this results in operation failed!
2016-11-27 14:54:08 -08:00
var interval = poolOptions . walletInterval * 60 * 1000 ; // run every x minutes
setInterval ( function ( ) {
2017-03-08 20:06:39 -08:00
intervalState ++ ;
switch ( intervalState ) {
case 1 :
listUnspent ( poolOptions . address , null , 1 , false , sendTToZ ) ;
break ;
default :
listUnspentZ ( poolOptions . zAddress , 1 , false , sendZToT ) ;
//listUnspent(null, poolOptions.address, 1, true, function (){});
intervalState = 0 ;
break ;
}
// update zcash stats
cacheZCashNetworkStats ( ) ;
2016-11-27 14:54:08 -08:00
} , interval ) ;
2017-03-08 20:06:39 -08:00
// check operation statuses every x seconds
var opid _interval = poolOptions . walletInterval * 1000 ;
setInterval ( function ( ) {
var checkOpIdSuccessAndGetResult = function ( ops ) {
ops . forEach ( function ( op , i ) {
if ( op . status == "success" || op . status == "failed" ) {
daemon . cmd ( 'z_getoperationresult' , [ [ op . id ] ] , function ( result ) {
if ( result . error ) {
logger . warning ( logSystem , logComponent , 'Unable to get payment operation id result ' + JSON . stringify ( result ) ) ;
}
if ( result . response ) {
if ( opidCount > 0 ) {
opidCount = 0 ;
}
if ( op . status == "failed" ) {
if ( op . error ) {
logger . error ( logSystem , logComponent , "Payment operation failed " + op . id + " " + op . error . code + ", " + op . error . message ) ;
} else {
logger . error ( logSystem , logComponent , "Payment operation failed " + op . id ) ;
}
} else {
logger . special ( logSystem , logComponent , 'Payment operation success ' + op . id + ' txid: ' + op . result . txid ) ;
}
}
} , true , true ) ;
} else if ( op . status == "executing" ) {
if ( opidCount == 0 ) {
opidCount ++ ;
logger . special ( logSystem , logComponent , 'Payment operation in progress ' + op . id ) ;
}
}
} ) ;
} ;
daemon . cmd ( 'z_getoperationstatus' , null , function ( result ) {
if ( result . error ) {
logger . warning ( logSystem , logComponent , 'Unable to get operation ids for clearing.' ) ;
}
if ( result . response ) {
checkOpIdSuccessAndGetResult ( result . response ) ;
}
} , true , true ) ;
} , opid _interval ) ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var satoshisToCoins = function ( satoshis ) {
return parseFloat ( ( satoshis / magnitude ) . toFixed ( coinPrecision ) ) ;
2014-03-26 23:56:36 -07:00
} ;
2014-05-07 10:58:56 -07:00
var coinsToSatoshies = function ( coins ) {
return coins * magnitude ;
} ;
2017-03-08 20:06:39 -08:00
function balanceRound ( number ) {
return parseFloat ( ( Math . round ( number * 100000000 ) / 100000000 ) . toFixed ( 8 ) ) ;
}
2014-03-26 23:56:36 -07:00
/ * D e a l w i t h n u m b e r s i n s m a l l e s t p o s s i b l e u n i t s ( s a t o s h i s ) a s m u c h a s p o s s i b l e . T h i s g r e a t l y h e l p s w i t h a c c u r a c y
when rounding and whatnot . When we are storing numbers for only humans to see , store in whole coin units . * /
2014-03-12 15:33:29 -07:00
2014-03-11 18:56:19 -07:00
var processPayments = function ( ) {
2014-03-27 15:29:43 -07:00
var startPaymentProcess = Date . now ( ) ;
2014-05-02 14:59:46 -07:00
var timeSpentRPC = 0 ;
var timeSpentRedis = 0 ;
2014-03-11 18:56:19 -07:00
2014-05-02 14:59:46 -07:00
var startTimeRedis ;
var startTimeRPC ;
var startRedisTimer = function ( ) { startTimeRedis = Date . now ( ) } ;
var endRedisTimer = function ( ) { timeSpentRedis += Date . now ( ) - startTimeRedis } ;
var startRPCTimer = function ( ) { startTimeRPC = Date . now ( ) ; } ;
var endRPCTimer = function ( ) { timeSpentRPC += Date . now ( ) - startTimeRedis } ;
async . waterfall ( [
2014-04-04 18:02:52 -07:00
2014-03-12 15:33:29 -07:00
/ * C a l l r e d i s t o g e t a n a r r a y o f r o u n d s - w h i c h a r e c o i n b a s e t r a n s a c t i o n s a n d b l o c k h e i g h t s f r o m s u b m i t t e d
blocks . * /
2017-03-08 20:06:39 -08:00
function ( callback ) {
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( [
2014-05-09 16:43:11 -07:00
[ 'hgetall' , coin + ':balances' ] ,
[ 'smembers' , coin + ':blocksPending' ]
2014-05-02 14:59:46 -07:00
] ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2014-03-11 20:47:14 -07:00
2014-03-11 18:56:19 -07:00
if ( error ) {
2014-04-04 18:02:52 -07:00
logger . error ( logSystem , logComponent , 'Could not get blocks from redis ' + JSON . stringify ( error ) ) ;
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2014-05-02 14:59:46 -07:00
var workers = { } ;
for ( var w in results [ 0 ] ) {
2014-05-29 15:56:36 -07:00
workers [ w ] = { balance : coinsToSatoshies ( parseFloat ( results [ 0 ] [ w ] ) ) } ;
2014-03-11 18:56:19 -07:00
}
2014-05-02 14:59:46 -07:00
var rounds = results [ 1 ] . map ( function ( r ) {
2014-03-12 17:09:12 -07:00
var details = r . split ( ':' ) ;
2014-03-22 19:08:33 -07:00
return {
2014-04-02 12:01:05 -07:00
blockHash : details [ 0 ] ,
2014-03-27 15:29:43 -07:00
txHash : details [ 1 ] ,
height : details [ 2 ] ,
2014-03-22 19:08:33 -07:00
serialized : r
} ;
2014-03-11 18:56:19 -07:00
} ) ;
2014-03-12 17:09:12 -07:00
2014-05-02 14:59:46 -07:00
callback ( null , workers , rounds ) ;
2014-03-11 18:56:19 -07:00
} ) ;
} ,
2017-03-08 20:06:39 -08:00
2016-12-27 08:42:48 -08:00
2014-03-12 15:33:29 -07:00
/ * D o e s a b a t c h r p c c a l l t o d a e m o n w i t h a l l t h e t r a n s a c t i o n h a s h e s t o s e e i f t h e y a r e c o n f i r m e d y e t .
It also adds the block reward amount to the round object - which the daemon gives also gives us . * /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , callback ) {
2014-03-11 18:56:19 -07:00
2017-03-08 20:06:39 -08:00
// first verify block confirmations by block hash
var batchRPCcommand2 = rounds . map ( function ( r ) {
return [ 'getblock' , [ r . blockHash ] ] ;
2014-03-12 17:09:12 -07:00
} ) ;
2017-03-08 20:06:39 -08:00
// guarantee a response for batchRPCcommand2
batchRPCcommand2 . push ( [ 'getblockcount' ] ) ;
2014-05-02 14:59:46 -07:00
startRPCTimer ( ) ;
2017-03-08 20:06:39 -08:00
daemon . batchCmd ( batchRPCcommand2 , function ( error , blockDetails ) {
2014-05-02 14:59:46 -07:00
endRPCTimer ( ) ;
2017-03-08 20:06:39 -08:00
// error getting block info by hash?
if ( error || ! blockDetails ) {
logger . error ( logSystem , logComponent , 'Check finished - daemon rpc error with batch getblock '
2014-05-02 14:59:46 -07:00
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
2014-03-11 20:47:14 -07:00
return ;
}
2017-03-08 20:06:39 -08:00
// update confirmations in redis for pending blocks
var confirmsUpdate = blockDetails . map ( function ( b ) {
if ( b . result != null && b . result . confirmations > 0 ) {
//if (b.result.confirmations > 100) {
// return ['hdel', logComponent + ':blocksPendingConfirms', b.result.hash];
//}
return [ 'hset' , logComponent + ':blocksPendingConfirms' , b . result . hash , b . result . confirmations ] ;
2014-03-27 11:57:56 -07:00
}
2017-03-08 20:06:39 -08:00
return null ;
} ) ;
// filter nulls, last item is always null...
confirmsUpdate = confirmsUpdate . filter ( function ( val ) { return val !== null ; } ) ;
// guarantee at least one redis update
if ( confirmsUpdate . length < 1 )
confirmsUpdate . push ( [ 'hset' , logComponent + ':blocksPendingConfirms' , 0 , 0 ] ) ;
startRedisTimer ( ) ;
redisClient . multi ( confirmsUpdate ) . exec ( function ( error , updated ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'failed to update pending block confirmations'
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
2014-05-02 14:59:46 -07:00
return ;
2014-03-15 17:58:28 -07:00
}
2017-03-08 20:06:39 -08:00
// check for invalid blocks by block hash
blockDetails . forEach ( function ( block , i ) {
// this is just the response from getblockcount
if ( i === blockDetails . length - 1 ) {
return ;
}
// help track duplicate or invalid blocks by block hash
if ( block && block . result && block . result . hash ) {
// find the round for this block hash
for ( var k = 0 ; k < rounds . length ; k ++ ) {
if ( rounds [ k ] . blockHash == block . result . hash ) {
var round = rounds [ k ] ;
var dupFound = false ;
// duplicate, invalid, kicked, orphaned blocks will have negative confirmations
if ( block . result . confirmations < 0 ) {
// check if this is an invalid duplicate
// we need to kick invalid duplicates now, as this will cause a double payout...
for ( var d = 0 ; d < rounds . length ; d ++ ) {
if ( rounds [ d ] . height == block . result . height && rounds [ d ] . blockHash != block . result . hash ) {
logger . warning ( logSystem , logComponent , 'Kicking invalid duplicate block ' + round . height + ' > ' + round . blockHash ) ;
dupFound = true ;
// kick this round now, its completely invalid!
var kickNow = [ ] ;
kickNow . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksDuplicate' , round . serialized ] ) ;
startRedisTimer ( ) ;
redisClient . multi ( kickNow ) . exec ( function ( error , kicked ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error could not kick invalid duplicate block ' + JSON . stringify ( kicked ) ) ;
}
} ) ;
// filter the duplicate out now, just in case we are actually paying this time around...
rounds = rounds . filter ( function ( item ) { return item . txHash != round . txHash ; } ) ;
}
}
// unknown reason why this block failed, possible orphan or kicked soon
// not sure if we should take any action or just wait it out...
if ( ! dupFound ) {
logger . warning ( logSystem , logComponent , 'Daemon reports negative confirmations ' + block . result . confirmations + ' for block: ' + round . height + ' > ' + round . blockHash ) ;
}
}
}
}
}
} ) ;
// now check block transaction ids
var batchRPCcommand = rounds . map ( function ( r ) {
return [ 'gettransaction' , [ r . txHash ] ] ;
} ) ;
// guarantee a response for batchRPCcommand
batchRPCcommand . push ( [ 'getaccount' , [ poolOptions . address ] ] ) ;
startRPCTimer ( ) ;
daemon . batchCmd ( batchRPCcommand , function ( error , txDetails ) {
endRPCTimer ( ) ;
if ( error || ! txDetails ) {
logger . error ( logSystem , logComponent , 'Check finished - daemon rpc error with batch gettransactions '
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
return ;
}
var addressAccount = "" ;
// check for transaction errors and generated coins
txDetails . forEach ( function ( tx , i ) {
if ( i === txDetails . length - 1 ) {
addressAccount = tx . result ;
return ;
}
var round = rounds [ i ] ;
if ( tx . error && tx . error . code === - 5 ) {
logger . warning ( logSystem , logComponent , 'Daemon reports invalid transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
return ;
}
else if ( ! tx . result . details || ( tx . result . details && tx . result . details . length === 0 ) ) {
logger . warning ( logSystem , logComponent , 'Daemon reports no details for transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
return ;
}
else if ( tx . error || ! tx . result ) {
logger . error ( logSystem , logComponent , 'Odd error with gettransaction ' + round . txHash + ' '
+ JSON . stringify ( tx ) ) ;
return ;
}
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
var generationTx = tx . result . details . filter ( function ( tx ) {
return tx . address === poolOptions . address ;
} ) [ 0 ] ;
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
if ( ! generationTx && tx . result . details . length === 1 ) {
generationTx = tx . result . details [ 0 ] ;
}
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
if ( ! generationTx ) {
logger . error ( logSystem , logComponent , 'Missing output details to pool address for transaction '
+ round . txHash ) ;
return ;
}
2014-03-15 17:58:28 -07:00
2017-03-08 20:06:39 -08:00
round . category = generationTx . category ;
if ( round . category === 'generate' ) {
round . reward = generationTx . amount - 0.0004 || generationTx . value - 0.0004 ; // TODO: Adjust fees to be dynamic
}
} ) ;
var canDeleteShares = function ( r ) {
for ( var i = 0 ; i < rounds . length ; i ++ ) {
var compareR = rounds [ i ] ;
if ( ( compareR . height === r . height )
&& ( compareR . category !== 'kicked' )
&& ( compareR . category !== 'orphan' )
&& ( compareR . serialized !== r . serialized ) ) {
return false ;
}
}
return true ;
} ;
//Filter out all rounds that are immature (not confirmed or orphaned yet)
rounds = rounds . filter ( function ( r ) {
switch ( r . category ) {
case 'orphan' :
case 'kicked' :
r . canDeleteShares = canDeleteShares ( r ) ;
case 'generate' :
return true ;
default :
return false ;
}
} ) ;
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
// check if we have enough tAddress funds to send payments
var totalOwed = 0 ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
totalOwed = totalOwed + ( rounds [ i ] . reward * magnitude ) - 4000 ; // TODO: make tx fees dynamic
2014-05-07 00:11:27 -07:00
}
2017-03-08 20:06:39 -08:00
listUnspent ( null , poolOptions . address , 1 , false , function ( error , tBalance ) {
if ( tBalance < totalOwed ) {
logger . error ( logSystem , logComponent , ( tBalance / magnitude ) . toFixed ( 8 ) + ' is not enough payment funds to process ' + ( totalOwed / magnitude ) . toFixed ( 8 ) + ' of payments. (Possibly due to pending txs)' ) ;
return callback ( true ) ;
}
else {
// zcash daemon does not support account feature
addressAccount = "" ;
callback ( null , workers , rounds , addressAccount ) ;
}
} )
2014-03-22 19:08:33 -07:00
2017-03-08 20:06:39 -08:00
} ) ;
2014-03-11 18:56:19 -07:00
} ) ;
} ) ;
} ,
2014-03-11 20:47:14 -07:00
2014-03-12 15:33:29 -07:00
/ * D o e s a b a t c h r e d i s c a l l t o g e t s h a r e s c o n t r i b u t e d t o e a c h r o u n d . T h e n c a l c u l a t e s t h e r e w a r d
amount owned to each miner for each round . * /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2014-03-11 18:56:19 -07:00
2014-03-12 17:09:12 -07:00
var shareLookups = rounds . map ( function ( r ) {
2014-05-09 16:43:11 -07:00
return [ 'hgetall' , coin + ':shares:round' + r . height ]
2014-03-12 17:09:12 -07:00
} ) ;
2014-03-12 15:33:29 -07:00
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
2014-03-12 17:09:12 -07:00
redisClient . multi ( shareLookups ) . exec ( function ( error , allWorkerShares ) {
2014-05-02 14:59:46 -07:00
endRedisTimer ( ) ;
2014-03-11 18:56:19 -07:00
if ( error ) {
2014-05-02 14:59:46 -07:00
callback ( 'Check finished - redis error with multi get rounds share' ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2014-03-11 20:47:14 -07:00
2014-03-22 19:08:33 -07:00
rounds . forEach ( function ( round , i ) {
var workerShares = allWorkerShares [ i ] ;
2014-03-12 15:33:29 -07:00
2014-03-27 15:29:43 -07:00
if ( ! workerShares ) {
logger . error ( logSystem , logComponent , 'No worker shares for round: '
2014-04-02 12:01:05 -07:00
+ round . height + ' blockHash: ' + round . blockHash ) ;
2014-03-27 15:29:43 -07:00
return ;
2014-03-11 20:47:14 -07:00
}
2017-03-08 20:06:39 -08:00
2014-03-27 15:29:43 -07:00
switch ( round . category ) {
2014-05-07 00:11:27 -07:00
case 'kicked' :
2014-03-27 15:29:43 -07:00
case 'orphan' :
2014-05-02 14:59:46 -07:00
round . workerShares = workerShares ;
2014-03-27 15:29:43 -07:00
break ;
case 'generate' :
/ * W e f o u n d a c o n f i r m e d b l o c k ! N o w g e t t h e r e w a r d f o r i t a n d c a l c u l a t e h o w m u c h
we owe each miner based on the shares they submitted during that block round . * /
2014-05-02 14:59:46 -07:00
var reward = parseInt ( round . reward * magnitude ) ;
2014-03-27 15:29:43 -07:00
var totalShares = Object . keys ( workerShares ) . reduce ( function ( p , c ) {
2014-04-24 13:53:06 -07:00
return p + parseFloat ( workerShares [ c ] )
2014-03-27 15:29:43 -07:00
} , 0 ) ;
2014-05-02 14:59:46 -07:00
for ( var workerAddress in workerShares ) {
var percent = parseFloat ( workerShares [ workerAddress ] ) / totalShares ;
2014-03-27 15:29:43 -07:00
var workerRewardTotal = Math . floor ( reward * percent ) ;
2014-05-02 14:59:46 -07:00
var worker = workers [ workerAddress ] = ( workers [ workerAddress ] || { } ) ;
2017-03-08 20:06:39 -08:00
worker . totalShares = ( worker . totalShares || 0 ) + parseFloat ( workerShares [ workerAddress ] ) ;
2014-05-02 14:59:46 -07:00
worker . reward = ( worker . reward || 0 ) + workerRewardTotal ;
2014-03-27 15:29:43 -07:00
}
break ;
2014-03-22 19:08:33 -07:00
}
} ) ;
2014-03-12 17:09:12 -07:00
2014-05-02 14:59:46 -07:00
callback ( null , workers , rounds , addressAccount ) ;
2014-03-11 20:47:14 -07:00
} ) ;
} ,
2014-03-11 21:01:33 -07:00
2014-03-11 20:47:14 -07:00
/ * C a l c u l a t e i f a n y p a y m e n t s a r e r e a d y t o b e s e n t a n d t r i g g e r t h e m s e n d i n g
2014-03-11 21:57:03 -07:00
Get balance different for each address and pass it along as object of latest balances such as
{ worker1 : balance1 , worker2 , balance2 }
when deciding the sent balance , it the difference should be - 1 * amount they had in db ,
if not sending the balance , the differnce should be + ( the amount they earned this round )
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2014-03-11 21:01:33 -07:00
2014-05-02 14:59:46 -07:00
var trySend = function ( withholdPercent ) {
var addressAmounts = { } ;
2017-03-08 20:06:39 -08:00
var minerTotals = { } ;
2014-05-02 14:59:46 -07:00
var totalSent = 0 ;
2017-03-08 20:06:39 -08:00
var totalShares = 0 ;
// total up miner's balances
2014-05-02 14:59:46 -07:00
for ( var w in workers ) {
var worker = workers [ w ] ;
2017-03-08 20:06:39 -08:00
totalShares += ( worker . totalShares || 0 )
2014-05-02 14:59:46 -07:00
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
2017-03-08 20:06:39 -08:00
var toSend = balanceRound ( satoshisToCoins ( Math . floor ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ) ) ;
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) ;
if ( minerTotals [ address ] != null && minerTotals [ address ] > 0 ) {
minerTotals [ address ] = balanceRound ( minerTotals [ address ] + toSend ) ;
} else {
minerTotals [ address ] = toSend ;
}
}
// now process each workers balance, and pay the miner
for ( var w in workers ) {
var worker = workers [ w ] ;
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
var toSend = Math . floor ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ;
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) ;
// if miners total is enough, go ahead and add this worker balance
if ( minerTotals [ address ] >= satoshisToCoins ( minPaymentSatoshis ) ) {
2014-05-02 14:59:46 -07:00
totalSent += toSend ;
2017-03-08 20:06:39 -08:00
worker . sent = balanceRound ( satoshisToCoins ( toSend ) ) ;
2014-05-02 14:59:46 -07:00
worker . balanceChange = Math . min ( worker . balance , toSend ) * - 1 ;
2017-03-08 20:06:39 -08:00
// multiple workers may have same address, add them up
if ( addressAmounts [ address ] != null && addressAmounts [ address ] > 0 ) {
addressAmounts [ address ] = balanceRound ( addressAmounts [ address ] + worker . sent ) ;
} else {
addressAmounts [ address ] = worker . sent ;
}
2014-05-02 14:59:46 -07:00
}
else {
worker . balanceChange = Math . max ( toSend - worker . balance , 0 ) ;
worker . sent = 0 ;
}
2014-03-20 15:25:59 -07:00
}
2017-03-08 20:06:39 -08:00
// if no payouts...continue to next set of callbacks
2014-05-02 14:59:46 -07:00
if ( Object . keys ( addressAmounts ) . length === 0 ) {
callback ( null , workers , rounds ) ;
return ;
2014-03-20 15:25:59 -07:00
}
2017-03-08 20:06:39 -08:00
/ *
var undoPaymentsOnError = function ( workers ) {
totalSent = 0 ;
// TODO, set round.category to immature, to attempt to pay again
// we did not send anything to any workers
for ( var w in workers ) {
var worker = workers [ w ] ;
if ( worker . sent > 0 ) {
worker . balanceChange = 0 ;
worker . sent = 0 ;
}
}
} ;
* /
// perform the sendmany operation
daemon . cmd ( 'sendmany' , [ "" , addressAmounts ] , function ( result ) {
// check for failed payments, there are many reasons
2014-05-02 14:59:46 -07:00
if ( result . error && result . error . code === - 6 ) {
2017-03-08 20:06:39 -08:00
// not enough minerals...
2014-05-02 14:59:46 -07:00
var higherPercent = withholdPercent + 0.01 ;
2014-05-07 10:58:56 -07:00
logger . warning ( logSystem , logComponent , 'Not enough funds to cover the tx fees for sending out payments, decreasing rewards by '
2014-05-02 14:59:46 -07:00
+ ( higherPercent * 100 ) + '% and retrying' ) ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
trySend ( higherPercent ) ;
}
2017-03-08 20:06:39 -08:00
else if ( result . error && result . error . code === - 5 ) {
// invalid address specified in addressAmounts array
logger . error ( logSystem , logComponent , 'Error sending payments ' + result . error . message ) ;
//undoPaymentsOnError(workers);
callback ( true ) ;
return ;
}
else if ( result . error && result . error . message != null ) {
// unknown error from daemon
logger . error ( logSystem , logComponent , 'Error sending payments ' + result . error . message ) ;
//undoPaymentsOnError(workers);
callback ( true ) ;
return ;
}
2014-05-02 14:59:46 -07:00
else if ( result . error ) {
2017-03-08 20:06:39 -08:00
// some other unknown error
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
//undoPaymentsOnError(workers);
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2017-03-08 20:06:39 -08:00
return ;
2014-05-02 14:59:46 -07:00
}
else {
2017-03-08 20:06:39 -08:00
// make sure sendmany gives us back a txid
var txid = null ;
if ( result . response ) {
txid = result . response ;
}
if ( txid != null ) {
// it worked, congrats on your pools payout ;)
logger . special ( logSystem , logComponent , 'Sent ' + ( totalSent / magnitude ) . toFixed ( 8 )
+ ' to ' + Object . keys ( addressAmounts ) . length + ' miners; txid: ' + txid ) ;
if ( withholdPercent > 0 ) {
logger . warning ( logSystem , logComponent , 'Had to withhold ' + ( withholdPercent * 100 )
+ '% of reward from miners to cover transaction fees. '
+ 'Fund pool wallet with coins to prevent this from happening' ) ;
}
// save payments data to redis
var paymentBlocks = rounds . map ( function ( r ) {
return parseInt ( r . height ) ;
} ) ;
var paymentsUpdate = [ ] ;
var paymentsData = [ { txid : txid , paid : balanceRound ( totalSent / magnitude ) , shares : totalShares , miners : Object . keys ( addressAmounts ) . length } , { blocks : paymentBlocks } , addressAmounts ] ;
paymentsUpdate . push ( [ 'zadd' , logComponent + ':payments' , Date . now ( ) , JSON . stringify ( paymentsData ) ] ) ;
startRedisTimer ( ) ;
redisClient . multi ( paymentsUpdate ) . exec ( function ( error , payments ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error redis save payments data ' + JSON . stringify ( payments ) ) ;
}
callback ( null , workers , rounds ) ;
} ) ;
} else {
clearInterval ( paymentInterval ) ;
logger . error ( logSystem , logComponent , 'Error RPC sendmany did not return txid '
+ JSON . stringify ( result ) + 'Disabling payment processing to prevent possible double-payouts.' ) ;
callback ( true ) ;
return ;
2014-03-22 19:08:33 -07:00
}
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
} , true , true ) ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
} ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
trySend ( 0 ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
} ,
function ( workers , rounds , callback ) {
var totalPaid = 0 ;
var balanceUpdateCommands = [ ] ;
var workerPayoutsCommand = [ ] ;
for ( var w in workers ) {
var worker = workers [ w ] ;
if ( worker . balanceChange !== 0 ) {
balanceUpdateCommands . push ( [
2014-05-07 10:58:56 -07:00
'hincrbyfloat' ,
2014-05-09 16:43:11 -07:00
coin + ':balances' ,
2014-05-02 14:59:46 -07:00
w ,
2017-03-08 20:06:39 -08:00
balanceRound ( satoshisToCoins ( worker . balanceChange ) )
2014-05-02 14:59:46 -07:00
] ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
if ( worker . sent !== 0 ) {
2017-03-08 20:06:39 -08:00
workerPayoutsCommand . push ( [ 'hincrbyfloat' , coin + ':payouts' , w , balanceRound ( worker . sent ) ] ) ;
totalPaid = balanceRound ( totalPaid + worker . sent ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
}
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var movePendingCommands = [ ] ;
var roundsToDelete = [ ] ;
var orphanMergeCommands = [ ] ;
2014-03-20 15:25:59 -07:00
2014-05-07 00:11:27 -07:00
var moveSharesToCurrent = function ( r ) {
var workerShares = r . workerShares ;
2017-03-08 20:06:39 -08:00
if ( workerShares != null ) {
Object . keys ( workerShares ) . forEach ( function ( worker ) {
orphanMergeCommands . push ( [ 'hincrby' , coin + ':shares:roundCurrent' , worker , workerShares [ worker ] ] ) ;
} ) ;
}
2014-05-07 00:11:27 -07:00
} ;
2014-05-02 14:59:46 -07:00
rounds . forEach ( function ( r ) {
switch ( r . category ) {
2014-05-07 00:11:27 -07:00
case 'kicked' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksKicked' , r . serialized ] ) ;
2014-05-02 14:59:46 -07:00
case 'orphan' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksOrphaned' , r . serialized ] ) ;
2014-05-07 00:11:27 -07:00
if ( r . canDeleteShares ) {
moveSharesToCurrent ( r ) ;
2014-05-09 16:43:11 -07:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2014-05-07 00:11:27 -07:00
}
return ;
2014-05-02 14:59:46 -07:00
case 'generate' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksConfirmed' , r . serialized ] ) ;
2016-12-18 20:11:55 -08:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2014-05-07 00:11:27 -07:00
return ;
2014-05-02 14:59:46 -07:00
}
} ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var finalRedisCommands = [ ] ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( movePendingCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( movePendingCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( orphanMergeCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( orphanMergeCommands ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
if ( balanceUpdateCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( balanceUpdateCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( workerPayoutsCommand . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( workerPayoutsCommand ) ;
2014-04-02 13:43:58 -07:00
2014-05-02 14:59:46 -07:00
if ( roundsToDelete . length > 0 )
finalRedisCommands . push ( [ 'del' ] . concat ( roundsToDelete ) ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( totalPaid !== 0 )
2017-03-08 20:06:39 -08:00
finalRedisCommands . push ( [ 'hincrbyfloat' , coin + ':stats' , 'totalPaid' , balanceRound ( totalPaid ) ] ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( finalRedisCommands . length === 0 ) {
callback ( ) ;
return ;
}
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2014-04-02 03:56:13 -07:00
if ( error ) {
2014-05-02 14:59:46 -07:00
clearInterval ( paymentInterval ) ;
logger . error ( logSystem , logComponent ,
'Payments sent but could not update redis. ' + JSON . stringify ( error )
+ ' Disabling payment processing to prevent possible double-payouts. The redis commands in '
+ coin + '_finalRedisCommands.txt must be ran manually' ) ;
fs . writeFile ( coin + '_finalRedisCommands.txt' , JSON . stringify ( finalRedisCommands ) , function ( err ) {
logger . error ( 'Could not write finalRedisCommands.txt, you are fucked.' ) ;
} ) ;
2014-04-02 03:56:13 -07:00
}
2014-05-02 14:59:46 -07:00
callback ( ) ;
2014-04-02 03:56:13 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
}
2014-03-27 15:29:43 -07:00
2014-05-02 14:59:46 -07:00
] , function ( ) {
2014-03-27 15:29:43 -07:00
var paymentProcessTime = Date . now ( ) - startPaymentProcess ;
2014-05-02 14:59:46 -07:00
logger . debug ( logSystem , logComponent , 'Finished interval - time spent: '
+ paymentProcessTime + 'ms total, ' + timeSpentRedis + 'ms redis, '
+ timeSpentRPC + 'ms daemon RPC' ) ;
2014-03-27 15:29:43 -07:00
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
} ;
2014-04-26 15:24:06 -07:00
var getProperAddress = function ( address ) {
if ( address . length === 40 ) {
return util . addressFromEx ( poolOptions . address , address ) ;
}
else return address ;
} ;
2014-03-20 16:05:13 -07:00
2014-05-03 09:29:31 -07:00
}