2014-05-29 11:52:31 -07:00
var fs = require ( 'fs' ) ;
2017-05-05 21:00:28 -07:00
var request = require ( 'request' ) ;
2014-05-29 11:52:31 -07:00
2014-03-09 19:31:58 -07:00
var redis = require ( 'redis' ) ;
2014-03-11 18:56:19 -07:00
var async = require ( 'async' ) ;
2014-03-09 19:31:58 -07:00
var Stratum = require ( 'stratum-pool' ) ;
2014-04-26 15:24:06 -07:00
var util = require ( 'stratum-pool/lib/util.js' ) ;
2014-03-09 19:31:58 -07:00
module . exports = function ( logger ) {
var poolConfigs = JSON . parse ( process . env . pools ) ;
2014-03-30 16:04:54 -07:00
var enabledPools = [ ] ;
2014-03-09 19:31:58 -07:00
Object . keys ( poolConfigs ) . forEach ( function ( coin ) {
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
if ( poolOptions . paymentProcessing &&
poolOptions . paymentProcessing . enabled )
2014-03-30 16:04:54 -07:00
enabledPools . push ( coin ) ;
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-30 16:04:54 -07:00
async . filter ( enabledPools , function ( coin , callback ) {
SetupForPool ( logger , poolConfigs [ coin ] , function ( setupResults ) {
2016-11-27 11:27:08 -08:00
callback ( null , setupResults ) ;
2014-03-30 16:04:54 -07:00
} ) ;
2016-11-27 11:27:08 -08:00
} , function ( err , results ) {
results . forEach ( function ( coin ) {
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-30 16:04:54 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2014-03-09 19:31:58 -07:00
2017-05-25 13:40:53 -07:00
logger . debug ( logSystem , logComponent , 'Payment processing setup with daemon ('
2014-03-30 16:04:54 -07:00
+ processingConfig . daemon . user + '@' + processingConfig . daemon . host + ':' + processingConfig . daemon . port
2017-05-25 13:40:53 -07:00
+ ') and redis (' + poolOptions . redis . host + ':' + poolOptions . redis . port + ')' ) ;
2014-03-30 16:04:54 -07:00
} ) ;
} ) ;
} ;
function SetupForPool ( logger , poolOptions , setupFinished ) {
2014-03-09 19:31:58 -07:00
2014-03-26 14:08:34 -07:00
2014-03-30 16:04:54 -07:00
var coin = poolOptions . coin . name ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-09 19:31:58 -07:00
2014-03-22 23:16:06 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2017-08-11 19:56:18 -07:00
2017-03-08 20:06:39 -08:00
var opidCount = 0 ;
2017-08-11 19:56:18 -07:00
var opids = [ ] ;
2017-05-05 22:41:54 -07:00
// zcash team recommends 10 confirmations for safety from orphaned blocks
2017-05-25 13:40:53 -07:00
var minConfShield = Math . max ( ( processingConfig . minConf || 10 ) , 1 ) ; // Don't allow 0 conf transactions.
2017-05-13 22:52:26 -07:00
var minConfPayout = Math . max ( ( processingConfig . minConf || 10 ) , 1 ) ;
2017-05-13 23:09:56 -07:00
if ( minConfPayout < 10 ) {
2017-05-25 13:40:53 -07:00
logger . warning ( logSystem , logComponent , logComponent + 'minConf of 10 is recommended to reduce chances of payments being orphaned.' ) ;
2017-05-13 22:52:26 -07:00
}
2017-04-18 18:27:54 -07:00
2017-05-25 13:40:53 -07:00
// minimum paymentInterval of 60 seconds
var paymentIntervalSecs = Math . max ( ( processingConfig . paymentInterval || 180 ) , 60 ) ;
if ( parseInt ( processingConfig . paymentInterval ) < 180 ) {
logger . warning ( logSystem , logComponent , 'paymentInterval of 180 seconds recommended to reduce the RPC work queue.' ) ;
}
var maxBlocksPerPayment = Math . max ( processingConfig . maxBlocksPerPayment || 3 , 1 ) ;
2017-05-03 22:20:08 -07:00
// pplnt - pay per last N time shares
var pplntEnabled = processingConfig . paymentMode === "pplnt" || false ;
var pplntTimeQualify = processingConfig . pplnt || 0.51 ; // 51%
2017-05-05 21:00:28 -07:00
var getMarketStats = poolOptions . coin . getMarketStats === true ;
2017-04-01 17:04:04 -07:00
var requireShielding = poolOptions . coin . requireShielding === true ;
2017-04-08 13:21:42 -07:00
var fee = parseFloat ( poolOptions . coin . txfee ) || parseFloat ( 0.0004 ) ;
2017-04-06 15:23:37 -07:00
2017-04-20 12:31:13 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' requireShielding: ' + requireShielding ) ;
2017-05-13 22:56:39 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' minConf: ' + minConfShield ) ;
2017-04-20 12:31:13 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' payments txfee reserve: ' + fee ) ;
2017-04-19 19:48:24 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' maxBlocksPerPayment: ' + maxBlocksPerPayment ) ;
2017-05-03 22:20:08 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' PPLNT: ' + pplntEnabled + ', time period: ' + pplntTimeQualify ) ;
2014-03-22 23:16:06 -07:00
2014-05-06 19:30:31 -07:00
var daemon = new Stratum . daemon . interface ( [ processingConfig . daemon ] , function ( severity , message ) {
logger [ severity ] ( logSystem , logComponent , message ) ;
} ) ;
2014-05-02 14:59:46 -07:00
var redisClient = redis . createClient ( poolOptions . redis . port , poolOptions . redis . host ) ;
2017-07-01 17:19:08 -07:00
// redis auth if enabled
2017-07-01 17:16:44 -07:00
redisClient . auth ( poolOptions . redis . password ) ;
2014-05-02 14:59:46 -07:00
var magnitude ;
var minPaymentSatoshis ;
var coinPrecision ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var paymentInterval ;
2014-03-30 16:04:54 -07:00
2016-11-27 11:27:08 -08:00
function validateAddress ( callback ) {
daemon . cmd ( 'validateaddress' , [ poolOptions . address ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-29 19:47:57 -08:00
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateTAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'validateaddress' , [ poolOptions . tAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateZAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'z_validateaddress' , [ poolOptions . zAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-27 11:27:08 -08:00
} , true ) ;
}
function getBalance ( callback ) {
daemon . cmd ( 'getbalance' , [ ] , function ( result ) {
if ( result . error ) {
return callback ( true ) ;
}
try {
var d = result . data . split ( 'result":' ) [ 1 ] . split ( ',' ) [ 0 ] . split ( '.' ) [ 1 ] ;
magnitude = parseInt ( '10' + new Array ( d . length ) . join ( '0' ) ) ;
minPaymentSatoshis = parseInt ( processingConfig . minimumPayment * magnitude ) ;
coinPrecision = magnitude . toString ( ) . length - 1 ;
}
catch ( e ) {
logger . error ( logSystem , logComponent , 'Error detecting number of satoshis in a coin, cannot do payment processing. Tried parsing: ' + result . data ) ;
return callback ( true ) ;
}
callback ( ) ;
} , true , true ) ;
}
function asyncComplete ( err ) {
2014-03-30 16:04:54 -07:00
if ( err ) {
setupFinished ( false ) ;
return ;
}
2017-03-08 20:06:39 -08:00
if ( paymentInterval ) {
clearInterval ( paymentInterval ) ;
}
2017-05-30 15:15:34 -07:00
paymentInterval = setInterval ( processPayments , paymentIntervalSecs * 1000 ) ;
//setTimeout(processPayments, 100);
2014-03-30 16:04:54 -07:00
setupFinished ( true ) ;
2016-11-27 11:27:08 -08:00
}
2014-03-30 16:04:54 -07:00
2017-04-12 11:47:51 -07:00
if ( requireShielding === true ) {
async . parallel ( [ validateAddress , validateTAddress , validateZAddress , getBalance ] , asyncComplete ) ;
} else {
async . parallel ( [ validateAddress , validateTAddress , getBalance ] , asyncComplete ) ;
}
2016-11-27 14:54:08 -08:00
//get t_address coinbalance
2016-12-27 22:08:31 -08:00
function listUnspent ( addr , notAddr , minConf , displayBool , callback ) {
if ( addr !== null ) {
2017-03-08 20:06:39 -08:00
var args = [ minConf , 99999999 , [ addr ] ] ;
2016-12-27 22:08:31 -08:00
} else {
2017-03-08 20:06:39 -08:00
addr = 'Payout wallet' ;
var args = [ minConf , 99999999 ] ;
2016-12-27 22:08:31 -08:00
}
daemon . cmd ( 'listunspent' , args , function ( result ) {
2017-05-30 15:15:34 -07:00
if ( ! result || result . error || result [ 0 ] . error ) {
logger . error ( logSystem , logComponent , 'Error with RPC call listunspent ' + addr + ' ' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
2017-05-25 13:40:53 -07:00
var tBalance = parseFloat ( 0 ) ;
2017-03-08 20:06:39 -08:00
if ( result [ 0 ] . response != null && result [ 0 ] . response . length > 0 ) {
for ( var i = 0 , len = result [ 0 ] . response . length ; i < len ; i ++ ) {
2017-05-25 13:40:53 -07:00
if ( result [ 0 ] . response [ i ] . address && result [ 0 ] . response [ i ] . address !== notAddr ) {
tBalance += parseFloat ( result [ 0 ] . response [ i ] . amount || 0 ) ;
2017-03-08 20:06:39 -08:00
}
2016-12-27 22:08:31 -08:00
}
2017-05-25 13:40:53 -07:00
tBalance = coinsRound ( tBalance ) ;
2016-11-27 14:54:08 -08:00
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-05-25 13:40:53 -07:00
logger . special ( logSystem , logComponent , addr + ' balance of ' + tBalance ) ;
2016-12-27 08:42:48 -08:00
}
2017-05-25 13:40:53 -07:00
callback ( null , coinsToSatoshies ( tBalance ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
// get z_address coinbalance
2016-12-27 08:42:48 -08:00
function listUnspentZ ( addr , minConf , displayBool , callback ) {
daemon . cmd ( 'z_getbalance' , [ addr , minConf ] , function ( result ) {
2017-05-25 13:40:53 -07:00
if ( ! result || result . error || result [ 0 ] . error ) {
2017-05-30 15:15:34 -07:00
logger . error ( logSystem , logComponent , 'Error with RPC call z_getbalance ' + addr + ' ' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
2017-05-25 13:40:53 -07:00
var zBalance = parseFloat ( 0 ) ;
2017-03-08 20:06:39 -08:00
if ( result [ 0 ] . response != null ) {
2017-05-25 13:40:53 -07:00
zBalance = coinsRound ( result [ 0 ] . response ) ;
2017-03-08 20:06:39 -08:00
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-03-08 20:06:39 -08:00
logger . special ( logSystem , logComponent , addr . substring ( 0 , 14 ) + '...' + addr . substring ( addr . length - 14 ) + ' balance: ' + ( zBalance ) . toFixed ( 8 ) ) ;
2016-12-27 08:42:48 -08:00
}
2017-05-25 13:40:53 -07:00
callback ( null , coinsToSatoshies ( zBalance ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
//send t_address balance to z_address
function sendTToZ ( callback , tBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-05-25 13:40:53 -07:00
if ( tBalance === NaN ) {
logger . error ( logSystem , logComponent , 'tBalance === NaN for sendTToZ' ) ;
return ;
}
if ( ( tBalance - 10000 ) <= 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendTToZ is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
var amount = satoshisToCoins ( tBalance - 10000 ) ;
2017-03-08 20:06:39 -08:00
var params = [ poolOptions . address , [ { 'address' : poolOptions . zAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2017-05-30 15:15:34 -07:00
if ( ! result || result . error || result [ 0 ] . error || ! result [ 0 ] . response ) {
logger . error ( logSystem , logComponent , 'Error trying to shield balance ' + amount + ' ' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-08-11 19:56:18 -07:00
var opid = ( result . response || result [ 0 ] . response ) ;
2017-03-08 20:06:39 -08:00
opidCount ++ ;
2017-08-11 19:56:18 -07:00
opids . push ( opid ) ;
logger . special ( logSystem , logComponent , 'Shield balance ' + amount + ' ' + opid ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-04-06 15:23:37 -07:00
2016-11-27 14:54:08 -08:00
// send z_address balance to t_address
function sendZToT ( callback , zBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-05-25 13:40:53 -07:00
if ( zBalance === NaN ) {
logger . error ( logSystem , logComponent , 'zBalance === NaN for sendZToT' ) ;
return ;
}
if ( ( zBalance - 10000 ) <= 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendZToT is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
var amount = satoshisToCoins ( zBalance - 10000 ) ;
// unshield no more than 100 ZEC at a time
2017-03-08 20:06:39 -08:00
if ( amount > 100.0 )
amount = 100.0 ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
var params = [ poolOptions . zAddress , [ { 'address' : poolOptions . tAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2017-05-30 15:15:34 -07:00
if ( ! result || result . error || result [ 0 ] . error || ! result [ 0 ] . response ) {
logger . error ( logSystem , logComponent , 'Error trying to send z_address coin balance to payout t_address.' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-08-11 19:56:18 -07:00
var opid = ( result . response || result [ 0 ] . response ) ;
2017-03-08 20:06:39 -08:00
opidCount ++ ;
2017-08-11 19:56:18 -07:00
opids . push ( opid ) ;
logger . special ( logSystem , logComponent , 'Unshield funds for payout ' + amount + ' ' + opid ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-05-05 21:00:28 -07:00
function cacheMarketStats ( ) {
var marketStatsUpdate = [ ] ;
2017-06-03 22:32:41 -07:00
var coin = logComponent . replace ( '_testnet' , '' ) . toLowerCase ( ) ;
if ( coin == 'zen' )
coin = 'zencash' ;
2017-05-05 21:00:28 -07:00
request ( 'https://api.coinmarketcap.com/v1/ticker/' + coin + '/' , function ( error , response , body ) {
if ( error ) {
2017-05-30 15:15:34 -07:00
logger . error ( logSystem , logComponent , 'Error with http request to https://api.coinmarketcap.com/ ' + JSON . stringify ( error ) ) ;
2017-05-05 21:00:28 -07:00
return ;
}
if ( response && response . statusCode ) {
if ( response . statusCode == 200 ) {
if ( body ) {
var data = JSON . parse ( body ) ;
if ( data . length > 0 ) {
2017-06-03 22:32:41 -07:00
marketStatsUpdate . push ( [ 'hset' , logComponent + ':stats' , 'coinmarketcap' , JSON . stringify ( data ) ] ) ;
2017-05-05 21:00:28 -07:00
redisClient . multi ( marketStatsUpdate ) . exec ( function ( err , results ) {
if ( err ) {
2017-05-30 15:15:34 -07:00
logger . error ( logSystem , logComponent , 'Error with redis during call to cacheMarketStats() ' + JSON . stringify ( error ) ) ;
2017-05-05 21:00:28 -07:00
return ;
}
} ) ;
}
}
} else {
2017-05-30 15:15:34 -07:00
logger . error ( logSystem , logComponent , 'Error, unexpected http status code during call to cacheMarketStats() ' + JSON . stringify ( response . statusCode ) ) ;
2017-05-05 21:00:28 -07:00
}
}
} ) ;
}
2017-05-03 22:20:08 -07:00
2017-04-01 17:04:04 -07:00
function cacheNetworkStats ( ) {
2017-03-08 20:06:39 -08:00
var params = null ;
daemon . cmd ( 'getmininginfo' , params ,
2017-05-30 15:15:34 -07:00
function ( result ) {
if ( ! result || result . error || result [ 0 ] . error || ! result [ 0 ] . response ) {
logger . error ( logSystem , logComponent , 'Error with RPC call getmininginfo ' + JSON . stringify ( result [ 0 ] . error ) ) ;
2017-04-01 17:04:04 -07:00
return ;
2017-05-30 15:15:34 -07:00
}
var coin = logComponent ;
var finalRedisCommands = [ ] ;
if ( result [ 0 ] . response . blocks !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkBlocks' , result [ 0 ] . response . blocks ] ) ;
}
if ( result [ 0 ] . response . difficulty !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkDiff' , result [ 0 ] . response . difficulty ] ) ;
}
if ( result [ 0 ] . response . networkhashps !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkSols' , result [ 0 ] . response . networkhashps ] ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
daemon . cmd ( 'getnetworkinfo' , params ,
2017-03-08 20:06:39 -08:00
function ( result ) {
2017-05-30 15:15:34 -07:00
if ( ! result || result . error || result [ 0 ] . error || ! result [ 0 ] . response ) {
logger . error ( logSystem , logComponent , 'Error with RPC call getnetworkinfo ' + JSON . stringify ( result [ 0 ] . error ) ) ;
2017-04-01 17:04:04 -07:00
return ;
2017-03-08 20:06:39 -08:00
}
2017-05-30 15:15:34 -07:00
if ( result [ 0 ] . response . connections !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkConnections' , result [ 0 ] . response . connections ] ) ;
}
if ( result [ 0 ] . response . version !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkVersion' , result [ 0 ] . response . version ] ) ;
}
if ( result [ 0 ] . response . subversion !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkSubVersion' , result [ 0 ] . response . subversion ] ) ;
}
if ( result [ 0 ] . response . protocolversion !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkProtocolVersion' , result [ 0 ] . response . protocolversion ] ) ;
}
if ( finalRedisCommands . length <= 0 )
return ;
2017-04-01 17:04:04 -07:00
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
if ( error ) {
2017-05-30 15:15:34 -07:00
logger . error ( logSystem , logComponent , 'Error with redis during call to cacheNetworkStats() ' + JSON . stringify ( error ) ) ;
2017-04-01 17:04:04 -07:00
return ;
}
} ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-01 17:04:04 -07:00
) ;
2017-03-08 20:06:39 -08:00
}
) ;
}
2017-04-06 15:23:37 -07:00
2017-05-25 13:40:53 -07:00
// run shielding process every x minutes
2017-05-05 22:41:54 -07:00
var shieldIntervalState = 0 ; // do not send ZtoT and TtoZ and same time, this results in operation failed!
2017-05-25 13:40:53 -07:00
var shielding _interval = Math . max ( parseInt ( poolOptions . walletInterval || 1 ) , 1 ) * 60 * 1000 ; // run every x minutes
2017-05-05 22:41:54 -07:00
// shielding not required for some equihash coins
if ( requireShielding === true ) {
var shieldInterval = setInterval ( function ( ) {
shieldIntervalState ++ ;
switch ( shieldIntervalState ) {
2017-04-01 17:04:04 -07:00
case 1 :
listUnspent ( poolOptions . address , null , minConfShield , false , sendTToZ ) ;
break ;
default :
listUnspentZ ( poolOptions . zAddress , minConfShield , false , sendZToT ) ;
2017-05-05 22:41:54 -07:00
shieldIntervalState = 0 ;
2017-04-01 17:04:04 -07:00
break ;
}
2017-05-05 22:41:54 -07:00
} , shielding _interval ) ;
}
// stats caching every 58 seconds
var stats _interval = 58 * 1000 ;
var statsInterval = setInterval ( function ( ) {
2017-04-01 17:04:04 -07:00
// update network stats using coin daemon
cacheNetworkStats ( ) ;
2017-05-05 21:00:28 -07:00
// update market stats using coinmarketcap
if ( getMarketStats === true ) {
cacheMarketStats ( ) ;
}
2017-05-05 22:41:54 -07:00
} , stats _interval ) ;
2017-04-06 15:23:37 -07:00
2017-05-05 22:41:54 -07:00
// check operation statuses every 57 seconds
var opid _interval = 57 * 1000 ;
2017-04-01 17:04:04 -07:00
// shielding not required for some equihash coins
if ( requireShielding === true ) {
2017-05-03 22:20:08 -07:00
var checkOpids = function ( ) {
2017-05-30 15:15:34 -07:00
clearTimeout ( opidTimeout ) ;
2017-05-03 22:20:08 -07:00
var checkOpIdSuccessAndGetResult = function ( ops ) {
var batchRPC = [ ] ;
2017-08-15 09:12:45 -07:00
// if there are no op-ids
if ( ops . length == 0 ) {
// and we think there is
if ( opidCount !== 0 ) {
// clear them!
opidCount = 0 ;
opids = [ ] ;
logger . warning ( logSystem , logComponent , 'Clearing operation ids due to empty result set.' ) ;
}
}
2017-05-03 22:20:08 -07:00
ops . forEach ( function ( op , i ) {
2017-05-30 15:15:34 -07:00
// check operation id status
2017-05-03 22:20:08 -07:00
if ( op . status == "success" || op . status == "failed" ) {
2017-05-30 15:15:34 -07:00
// clear operation id result
2017-08-11 19:56:18 -07:00
var opid _index = opids . indexOf ( op . id ) ;
if ( opid _index > - 1 ) {
// clear operation id count
batchRPC . push ( [ 'z_getoperationresult' , [ [ op . id ] ] ] ) ;
opidCount -- ;
opids . splice ( opid _index , 1 ) ;
2017-03-08 20:06:39 -08:00
}
2017-05-30 15:15:34 -07:00
// log status to console
2017-05-03 22:20:08 -07:00
if ( op . status == "failed" ) {
if ( op . error ) {
logger . error ( logSystem , logComponent , "Shielding operation failed " + op . id + " " + op . error . code + ", " + op . error . message ) ;
2017-03-08 20:06:39 -08:00
} else {
2017-05-03 22:20:08 -07:00
logger . error ( logSystem , logComponent , "Shielding operation failed " + op . id ) ;
2017-03-08 20:06:39 -08:00
}
2017-05-03 22:20:08 -07:00
} else {
logger . special ( logSystem , logComponent , 'Shielding operation success ' + op . id + ' txid: ' + op . result . txid ) ;
}
} else if ( op . status == "executing" ) {
2017-08-15 09:12:45 -07:00
logger . special ( logSystem , logComponent , 'Shielding operation in progress ' + op . id ) ;
2017-03-08 20:06:39 -08:00
}
2017-05-03 22:20:08 -07:00
} ) ;
2017-05-30 15:15:34 -07:00
// if there are no completed operations
2017-05-03 22:20:08 -07:00
if ( batchRPC . length <= 0 ) {
2017-05-30 15:15:34 -07:00
opidTimeout = setTimeout ( checkOpids , opid _interval ) ;
2017-05-03 22:20:08 -07:00
return ;
2017-03-08 20:06:39 -08:00
}
2017-05-30 15:15:34 -07:00
// clear results for completed operations
2017-05-03 22:20:08 -07:00
daemon . batchCmd ( batchRPC , function ( error , results ) {
if ( error || ! results ) {
2017-05-30 15:15:34 -07:00
opidTimeout = setTimeout ( checkOpids , opid _interval ) ;
logger . error ( logSystem , logComponent , 'Error with RPC call z_getoperationresult ' + JSON . stringify ( error ) ) ;
2017-05-03 22:20:08 -07:00
return ;
}
2017-05-30 15:15:34 -07:00
// check result execution_secs vs pool_config
2017-05-03 22:20:08 -07:00
results . forEach ( function ( result , i ) {
2017-07-19 18:41:18 -07:00
if ( result . result [ i ] && parseFloat ( result . result [ i ] . execution _secs || 0 ) > shielding _interval ) {
2017-05-30 15:15:34 -07:00
logger . warning ( logSystem , logComponent , 'Warning, walletInverval shorter than opid execution time of ' + result . result [ i ] . execution _secs + ' secs.' ) ;
}
2017-05-03 22:20:08 -07:00
} ) ;
2017-05-30 15:15:34 -07:00
// keep checking operation ids
opidTimeout = setTimeout ( checkOpids , opid _interval ) ;
2017-05-03 22:20:08 -07:00
} ) ;
} ;
2017-05-30 15:15:34 -07:00
// check for completed operation ids
2017-05-03 22:20:08 -07:00
daemon . cmd ( 'z_getoperationstatus' , null , function ( result ) {
2017-05-30 15:15:34 -07:00
var err = false ;
if ( result . error ) {
err = true ;
logger . error ( logSystem , logComponent , 'Error with RPC call z_getoperationstatus ' + JSON . stringify ( result . error ) ) ;
} else if ( result . response ) {
checkOpIdSuccessAndGetResult ( result . response ) ;
} else {
err = true ;
logger . error ( logSystem , logComponent , 'No response from z_getoperationstatus RPC call.' ) ;
}
if ( err === true ) {
opidTimeout = setTimeout ( checkOpids , opid _interval ) ;
2017-08-11 19:56:18 -07:00
if ( opidCount !== 0 ) {
2017-05-30 15:15:34 -07:00
opidCount = 0 ;
2017-08-11 19:56:18 -07:00
opids = [ ] ;
2017-05-30 15:15:34 -07:00
logger . warning ( logSystem , logComponent , 'Clearing operation ids due to RPC call errors.' ) ;
}
2017-05-25 13:40:53 -07:00
}
2017-05-03 22:20:08 -07:00
} , true , true ) ;
}
2017-05-30 15:15:34 -07:00
var opidTimeout = setTimeout ( checkOpids , opid _interval ) ;
2017-05-03 22:20:08 -07:00
}
function roundTo ( n , digits ) {
if ( digits === undefined ) {
digits = 0 ;
}
var multiplicator = Math . pow ( 10 , digits ) ;
n = parseFloat ( ( n * multiplicator ) . toFixed ( 11 ) ) ;
var test = ( Math . round ( n ) / multiplicator ) ;
return + ( test . toFixed ( digits ) ) ;
2017-04-01 17:04:04 -07:00
}
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var satoshisToCoins = function ( satoshis ) {
2017-05-03 22:20:08 -07:00
return roundTo ( ( satoshis / magnitude ) , coinPrecision ) ;
2014-03-26 23:56:36 -07:00
} ;
2014-05-07 10:58:56 -07:00
var coinsToSatoshies = function ( coins ) {
2017-04-08 13:12:56 -07:00
return Math . round ( coins * magnitude ) ;
2014-05-07 10:58:56 -07:00
} ;
2017-05-03 22:20:08 -07:00
function coinsRound ( number ) {
return roundTo ( number , coinPrecision ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-18 18:27:54 -07:00
function checkForDuplicateBlockHeight ( rounds , height ) {
var count = 0 ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
if ( rounds [ i ] . height == height )
count ++ ;
}
return count > 1 ;
}
2014-03-26 23:56:36 -07:00
/ * D e a l w i t h n u m b e r s i n s m a l l e s t p o s s i b l e u n i t s ( s a t o s h i s ) a s m u c h a s p o s s i b l e . T h i s g r e a t l y h e l p s w i t h a c c u r a c y
when rounding and whatnot . When we are storing numbers for only humans to see , store in whole coin units . * /
2014-03-12 15:33:29 -07:00
2014-03-11 18:56:19 -07:00
var processPayments = function ( ) {
2014-03-27 15:29:43 -07:00
var startPaymentProcess = Date . now ( ) ;
2014-05-02 14:59:46 -07:00
var timeSpentRPC = 0 ;
var timeSpentRedis = 0 ;
2014-03-11 18:56:19 -07:00
2014-05-02 14:59:46 -07:00
var startTimeRedis ;
var startTimeRPC ;
var startRedisTimer = function ( ) { startTimeRedis = Date . now ( ) } ;
var endRedisTimer = function ( ) { timeSpentRedis += Date . now ( ) - startTimeRedis } ;
var startRPCTimer = function ( ) { startTimeRPC = Date . now ( ) ; } ;
var endRPCTimer = function ( ) { timeSpentRPC += Date . now ( ) - startTimeRedis } ;
async . waterfall ( [
2017-05-03 22:20:08 -07:00
/ *
Step 1 - build workers and rounds objects from redis
* removes duplicate block submissions from redis
* /
2017-04-06 15:23:37 -07:00
function ( callback ) {
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( [
2014-05-09 16:43:11 -07:00
[ 'hgetall' , coin + ':balances' ] ,
[ 'smembers' , coin + ':blocksPending' ]
2014-05-02 14:59:46 -07:00
] ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2014-03-11 18:56:19 -07:00
if ( error ) {
2014-04-04 18:02:52 -07:00
logger . error ( logSystem , logComponent , 'Could not get blocks from redis ' + JSON . stringify ( error ) ) ;
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2017-05-03 22:20:08 -07:00
// build workers object from :balances
2014-05-02 14:59:46 -07:00
var workers = { } ;
for ( var w in results [ 0 ] ) {
2014-05-29 15:56:36 -07:00
workers [ w ] = { balance : coinsToSatoshies ( parseFloat ( results [ 0 ] [ w ] ) ) } ;
2014-03-11 18:56:19 -07:00
}
2017-05-03 22:20:08 -07:00
// build rounds object from :blocksPending
2014-05-02 14:59:46 -07:00
var rounds = results [ 1 ] . map ( function ( r ) {
2014-03-12 17:09:12 -07:00
var details = r . split ( ':' ) ;
2014-03-22 19:08:33 -07:00
return {
2014-04-02 12:01:05 -07:00
blockHash : details [ 0 ] ,
2014-03-27 15:29:43 -07:00
txHash : details [ 1 ] ,
height : details [ 2 ] ,
2017-04-18 18:27:54 -07:00
minedby : details [ 3 ] ,
duplicate : false ,
2014-03-22 19:08:33 -07:00
serialized : r
} ;
2014-03-11 18:56:19 -07:00
} ) ;
2017-08-02 19:26:32 -07:00
/* sort rounds by block hieght to pay in order */
rounds . sort ( function ( a , b ) {
return a . height - b . height ;
} ) ;
2017-04-18 18:27:54 -07:00
// find duplicate blocks by height
// this can happen when two or more solutions are submitted at the same block height
var duplicateFound = false ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
if ( checkForDuplicateBlockHeight ( rounds , rounds [ i ] . height ) === true ) {
rounds [ i ] . duplicate = true ;
duplicateFound = true ;
}
}
// handle duplicates if needed
if ( duplicateFound ) {
var dups = rounds . filter ( function ( round ) { return round . duplicate ; } ) ;
2017-04-19 19:05:11 -07:00
logger . warning ( logSystem , logComponent , 'Duplicate pending blocks found: ' + JSON . stringify ( dups ) ) ;
2017-04-18 18:27:54 -07:00
// attempt to find the invalid duplicates
var rpcDupCheck = dups . map ( function ( r ) {
return [ 'getblock' , [ r . blockHash ] ] ;
} ) ;
startRPCTimer ( ) ;
daemon . batchCmd ( rpcDupCheck , function ( error , blocks ) {
endRPCTimer ( ) ;
if ( error || ! blocks ) {
logger . error ( logSystem , logComponent , 'Error with duplicate block check rpc call getblock ' + JSON . stringify ( error ) ) ;
return ;
2017-04-19 19:05:11 -07:00
}
2017-04-18 18:27:54 -07:00
// look for the invalid duplicate block
2017-04-19 14:19:44 -07:00
var validBlocks = { } ; // hashtable for unique look up
var invalidBlocks = [ ] ; // array for redis work
2017-04-18 18:27:54 -07:00
blocks . forEach ( function ( block , i ) {
if ( block && block . result ) {
// invalid duplicate submit blocks have negative confirmations
if ( block . result . confirmations < 0 ) {
2017-04-19 14:19:44 -07:00
logger . warning ( logSystem , logComponent , 'Remove invalid duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
// move from blocksPending to blocksDuplicate...
2017-04-18 18:27:54 -07:00
invalidBlocks . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksDuplicate' , dups [ i ] . serialized ] ) ;
} else {
2017-04-19 14:19:44 -07:00
// block must be valid, make sure it is unique
if ( validBlocks . hasOwnProperty ( dups [ i ] . blockHash ) ) {
// not unique duplicate block
logger . warning ( logSystem , logComponent , 'Remove non-unique duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
// move from blocksPending to blocksDuplicate...
invalidBlocks . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksDuplicate' , dups [ i ] . serialized ] ) ;
} else {
// keep unique valid block
validBlocks [ dups [ i ] . blockHash ] = dups [ i ] . serialized ;
logger . debug ( logSystem , logComponent , 'Keep valid duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
}
2017-04-18 18:27:54 -07:00
}
}
} ) ;
// filter out all duplicates to prevent double payments
rounds = rounds . filter ( function ( round ) { return ! round . duplicate ; } ) ;
// if we detected the invalid duplicates, move them
if ( invalidBlocks . length > 0 ) {
// move invalid duplicate blocks in redis
startRedisTimer ( ) ;
redisClient . multi ( invalidBlocks ) . exec ( function ( error , kicked ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error could not move invalid duplicate blocks in redis ' + JSON . stringify ( error ) ) ;
}
// continue payments normally
callback ( null , workers , rounds ) ;
} ) ;
} else {
// notify pool owner that we are unable to find the invalid duplicate blocks, manual intervention required...
2017-04-18 19:03:36 -07:00
logger . error ( logSystem , logComponent , 'Unable to detect invalid duplicate blocks, duplicate block payments on hold.' ) ;
2017-04-18 18:27:54 -07:00
// continue payments normally
callback ( null , workers , rounds ) ;
}
} ) ;
} else {
// no duplicates, continue payments normally
callback ( null , workers , rounds ) ;
}
2014-03-11 18:56:19 -07:00
} ) ;
} ,
2017-04-06 15:23:37 -07:00
2016-12-27 08:42:48 -08:00
2017-05-03 22:20:08 -07:00
/ *
Step 2 - check if mined block coinbase tx are ready for payment
* adds block reward to rounds object
* adds block confirmations count to rounds object
* updates confirmation counts in redis
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , callback ) {
2017-05-03 22:20:08 -07:00
// get pending block tx details
var batchRPCcommand = rounds . map ( function ( r ) {
return [ 'gettransaction' , [ r . txHash ] ] ;
2014-03-12 17:09:12 -07:00
} ) ;
2017-05-03 22:20:08 -07:00
// get account address (not implemented at this time)
batchRPCcommand . push ( [ 'getaccount' , [ poolOptions . address ] ] ) ;
2017-04-06 15:23:37 -07:00
2014-05-02 14:59:46 -07:00
startRPCTimer ( ) ;
2017-05-03 22:20:08 -07:00
daemon . batchCmd ( batchRPCcommand , function ( error , txDetails ) {
2014-05-02 14:59:46 -07:00
endRPCTimer ( ) ;
2017-05-03 22:20:08 -07:00
if ( error || ! txDetails ) {
logger . error ( logSystem , logComponent , 'Check finished - daemon rpc error with batch gettransactions ' + JSON . stringify ( error ) ) ;
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2014-03-11 20:47:14 -07:00
return ;
}
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
var confirmsUpdate = [ ] ;
var addressAccount = "" ;
// check for transaction errors and generated coins
txDetails . forEach ( function ( tx , i ) {
if ( i === txDetails . length - 1 ) {
if ( tx . result && tx . result . toString ( ) . length > 0 ) {
addressAccount = tx . result . toString ( ) ;
2017-03-08 22:15:10 -08:00
}
2017-05-03 22:20:08 -07:00
return ;
2014-03-27 11:57:56 -07:00
}
2017-05-03 22:20:08 -07:00
var round = rounds [ i ] ;
// look for transaction errors
if ( tx . error && tx . error . code === - 5 ) {
logger . warning ( logSystem , logComponent , 'Daemon reports invalid transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
2014-05-02 14:59:46 -07:00
return ;
2014-03-15 17:58:28 -07:00
}
2017-05-03 22:20:08 -07:00
else if ( ! tx . result . details || ( tx . result . details && tx . result . details . length === 0 ) ) {
logger . warning ( logSystem , logComponent , 'Daemon reports no details for transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
return ;
}
else if ( tx . error || ! tx . result ) {
logger . error ( logSystem , logComponent , 'Odd error with gettransaction ' + round . txHash + ' ' + JSON . stringify ( tx ) ) ;
return ;
}
// get the coin base generation tx
var generationTx = tx . result . details . filter ( function ( tx ) {
return tx . address === poolOptions . address ;
} ) [ 0 ] ;
if ( ! generationTx && tx . result . details . length === 1 ) {
generationTx = tx . result . details [ 0 ] ;
}
if ( ! generationTx ) {
logger . error ( logSystem , logComponent , 'Missing output details to pool address for transaction ' + round . txHash ) ;
return ;
}
// get transaction category for round
round . category = generationTx . category ;
round . confirmations = parseInt ( ( tx . result . confirmations || 0 ) ) ;
// get reward for newly generated blocks
if ( round . category === 'generate' ) {
round . reward = coinsRound ( parseFloat ( generationTx . amount || generationTx . value ) ) ;
}
// update confirmations in redis
confirmsUpdate . push ( [ 'hset' , coin + ':blocksPendingConfirms' , round . blockHash , round . confirmations ] ) ;
} ) ;
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
var canDeleteShares = function ( r ) {
for ( var i = 0 ; i < rounds . length ; i ++ ) {
var compareR = rounds [ i ] ;
if ( ( compareR . height === r . height )
&& ( compareR . category !== 'kicked' )
&& ( compareR . category !== 'orphan' )
&& ( compareR . serialized !== r . serialized ) ) {
return false ;
2017-03-08 20:06:39 -08:00
}
2017-05-03 22:20:08 -07:00
}
return true ;
} ;
2017-04-06 15:23:37 -07:00
2017-08-02 19:26:32 -07:00
// only pay max blocks at a time
2017-05-03 22:20:08 -07:00
var payingBlocks = 0 ;
rounds = rounds . filter ( function ( r ) {
switch ( r . category ) {
case 'orphan' :
case 'kicked' :
r . canDeleteShares = canDeleteShares ( r ) ;
return true ;
case 'generate' :
payingBlocks ++ ;
2017-08-03 14:43:31 -07:00
return ( payingBlocks <= maxBlocksPerPayment ) ;
2017-04-18 18:27:54 -07:00
2017-05-03 22:20:08 -07:00
default :
return false ;
}
} ) ;
2014-05-02 14:59:46 -07:00
2017-05-03 22:20:08 -07:00
// TODO: make tx fees dynamic
var feeSatoshi = coinsToSatoshies ( fee ) ;
// calculate what the pool owes its miners
var totalOwed = parseInt ( 0 ) ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
// only pay generated blocks, not orphaned or kicked
if ( rounds [ i ] . category == 'generate' ) {
totalOwed = totalOwed + coinsToSatoshies ( rounds [ i ] . reward ) - feeSatoshi ;
}
}
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
var notAddr = null ;
if ( requireShielding === true ) {
notAddr = poolOptions . address ;
}
// update confirmations for pending blocks in redis
if ( confirmsUpdate . length > 0 ) {
startRedisTimer ( ) ;
redisClient . multi ( confirmsUpdate ) . exec ( function ( error , result ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error could not update confirmations for pending blocks in redis ' + JSON . stringify ( error ) ) ;
return callback ( true ) ;
2014-05-07 00:11:27 -07:00
}
2017-05-03 22:20:08 -07:00
// check if we have enough tAddress funds to begin payment processing
2017-04-01 17:04:04 -07:00
listUnspent ( null , notAddr , minConfPayout , false , function ( error , tBalance ) {
if ( error ) {
2017-04-18 18:27:54 -07:00
logger . error ( logSystem , logComponent , 'Error checking pool balance before processing payments.' ) ;
2017-04-06 15:23:37 -07:00
return callback ( true ) ;
2017-04-01 17:04:04 -07:00
} else if ( tBalance < totalOwed ) {
2017-05-03 22:20:08 -07:00
logger . error ( logSystem , logComponent , 'Insufficient funds (' + satoshisToCoins ( tBalance ) + ') to process payments (' + satoshisToCoins ( totalOwed ) + ') for ' + payingBlocks + ' blocks; possibly waiting for txs.' ) ;
2017-03-08 20:06:39 -08:00
return callback ( true ) ;
}
2017-05-03 22:20:08 -07:00
// account feature not implemented at this time
addressAccount = "" ;
2017-05-03 23:41:00 -07:00
// begin payments for generated coins
2017-05-03 22:20:08 -07:00
callback ( null , workers , rounds , addressAccount ) ;
} ) ;
2017-03-08 20:06:39 -08:00
} ) ;
2017-05-03 22:20:08 -07:00
} else {
// no pending blocks, need to find a block!
return callback ( true ) ;
}
} )
2014-03-11 18:56:19 -07:00
} ,
2014-03-11 20:47:14 -07:00
2017-05-03 22:20:08 -07:00
/ *
Step 3 - lookup shares in redis and calculate rewards
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2017-05-03 22:20:08 -07:00
// pplnt times lookup
var timeLookups = rounds . map ( function ( r ) {
return [ 'hgetall' , coin + ':shares:times' + r . height ]
2014-03-12 17:09:12 -07:00
} ) ;
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
2017-05-03 22:20:08 -07:00
redisClient . multi ( timeLookups ) . exec ( function ( error , allWorkerTimes ) {
2014-05-02 14:59:46 -07:00
endRedisTimer ( ) ;
2014-03-11 18:56:19 -07:00
if ( error ) {
2017-05-03 22:20:08 -07:00
callback ( 'Check finished - redis error with multi get rounds time' ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2017-05-03 22:20:08 -07:00
var shareLookups = rounds . map ( function ( r ) {
return [ 'hgetall' , coin + ':shares:round' + r . height ] ;
} ) ;
startRedisTimer ( ) ;
redisClient . multi ( shareLookups ) . exec ( function ( error , allWorkerShares ) {
endRedisTimer ( ) ;
if ( error ) {
callback ( 'Check finished - redis error with multi get rounds share' ) ;
2014-03-27 15:29:43 -07:00
return ;
2014-03-11 20:47:14 -07:00
}
2017-04-06 15:23:37 -07:00
2017-05-03 22:20:08 -07:00
// error detection
var err = null ;
2014-03-27 15:29:43 -07:00
2017-05-03 22:20:08 -07:00
// total shares
rounds . forEach ( function ( round , i ) {
var workerShares = allWorkerShares [ i ] ;
if ( ! workerShares ) {
err = true ;
logger . error ( logSystem , logComponent , 'No worker shares for round: ' + round . height + ' blockHash: ' + round . blockHash ) ;
return ;
}
var workerTimes = allWorkerTimes [ i ] ;
switch ( round . category ) {
case 'kicked' :
case 'orphan' :
round . workerShares = workerShares ;
break ;
case 'generate' :
// TODO: make tx fees dynamic
var feeSatoshi = coinsToSatoshies ( fee ) ;
var reward = coinsToSatoshies ( round . reward ) - feeSatoshi ;
var totalShares = parseFloat ( 0 ) ;
var sharesLost = parseFloat ( 0 ) ;
// find most time spent in this round by single worker
maxTime = 0 ;
for ( var workerAddress in workerTimes ) {
if ( maxTime < parseFloat ( workerTimes [ workerAddress ] ) )
maxTime = parseFloat ( workerTimes [ workerAddress ] ) ;
}
// total up shares for round
for ( var workerAddress in workerShares ) {
var worker = workers [ workerAddress ] = ( workers [ workerAddress ] || { } ) ;
var shares = parseFloat ( ( workerShares [ workerAddress ] || 0 ) ) ;
// if pplnt mode
if ( pplntEnabled === true && maxTime > 0 ) {
var tshares = shares ;
var lost = parseFloat ( 0 ) ;
var address = workerAddress . split ( '.' ) [ 0 ] ;
if ( workerTimes [ address ] != null && parseFloat ( workerTimes [ address ] ) > 0 ) {
var timePeriod = roundTo ( parseFloat ( workerTimes [ address ] || 1 ) / maxTime , 2 ) ;
if ( timePeriod > 0 && timePeriod < pplntTimeQualify ) {
var lost = shares - ( shares * timePeriod ) ;
sharesLost += lost ;
shares = Math . max ( shares - lost , 0 ) ;
logger . warning ( logSystem , logComponent , 'PPLNT: Reduced shares for ' + workerAddress + ' round:' + round . height + ' maxTime:' + maxTime + 'sec timePeriod:' + roundTo ( timePeriod , 6 ) + ' shares:' + tshares + ' lost:' + lost + ' new:' + shares ) ;
}
if ( timePeriod > 1.0 ) {
err = true ;
logger . error ( logSystem , logComponent , 'Time share period is greater than 1.0 for ' + workerAddress + ' round:' + round . height + ' blockHash:' + round . blockHash ) ;
2017-05-25 13:40:53 -07:00
return ;
2017-05-03 22:20:08 -07:00
}
2017-05-25 13:40:53 -07:00
worker . timePeriod = timePeriod ;
2017-05-03 22:20:08 -07:00
} else {
logger . warning ( logSystem , logComponent , 'PPLNT: Missing time share period for ' + workerAddress + ', miner shares qualified in round ' + round . height ) ;
}
}
worker . roundShares = shares ;
worker . totalShares = parseFloat ( worker . totalShares || 0 ) + shares ;
totalShares += shares ;
}
2017-05-07 09:57:19 -07:00
//console.log('--REWARD DEBUG--------------');
2017-05-03 22:20:08 -07:00
// calculate rewards for round
var totalAmount = 0 ;
for ( var workerAddress in workerShares ) {
var worker = workers [ workerAddress ] = ( workers [ workerAddress ] || { } ) ;
var percent = parseFloat ( worker . roundShares ) / totalShares ;
if ( percent > 1.0 ) {
err = true ;
logger . error ( logSystem , logComponent , 'Share percent is greater than 1.0 for ' + workerAddress + ' round:' + round . height + ' blockHash:' + round . blockHash ) ;
return ;
}
// calculate workers reward for this round
var workerRewardTotal = Math . round ( reward * percent ) ;
// add to total reward for worker
worker . reward = ( worker . reward || 0 ) + workerRewardTotal ;
// add to total amount sent to all workers
totalAmount += worker . reward ;
2017-05-07 09:57:19 -07:00
//console.log('rewardAmount: '+workerAddress+' '+workerRewardTotal);
//console.log('totalAmount: '+workerAddress+' '+worker.reward);
2017-05-03 22:20:08 -07:00
}
2017-05-07 09:57:19 -07:00
//console.log('totalAmount: '+totalAmount);
//console.log('blockHeight: '+round.height);
//console.log('blockReward: '+reward);
//console.log('totalShares: '+totalShares);
//console.log('sharesLost: '+sharesLost);
//console.log('----------------------------');
2017-05-03 22:20:08 -07:00
break ;
}
} ) ;
// if there was no errors
if ( err === null ) {
// continue payments
callback ( null , workers , rounds , addressAccount ) ;
} else {
// stop waterfall flow, do not process payments
callback ( true ) ;
2014-03-22 19:08:33 -07:00
}
} ) ;
2017-05-03 22:20:08 -07:00
2014-03-11 20:47:14 -07:00
} ) ;
2017-05-03 22:20:08 -07:00
2014-03-11 20:47:14 -07:00
} ,
2017-05-03 22:20:08 -07:00
/ *
Step 4 - Generate RPC commands to send payments
When deciding the sent balance , it the difference should be - 1 * amount they had in db ,
If not sending the balance , the differnce should be + ( the amount they earned this round )
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2014-03-11 21:01:33 -07:00
2017-05-30 15:15:34 -07:00
var tries = 0 ;
2014-05-02 14:59:46 -07:00
var trySend = function ( withholdPercent ) {
var addressAmounts = { } ;
2017-04-25 22:15:41 -07:00
var balanceAmounts = { } ;
2017-05-07 09:57:19 -07:00
var shareAmounts = { } ;
2017-05-25 13:40:53 -07:00
var timePeriods = { } ;
2017-03-08 20:06:39 -08:00
var minerTotals = { } ;
2014-05-02 14:59:46 -07:00
var totalSent = 0 ;
2017-03-08 20:06:39 -08:00
var totalShares = 0 ;
2017-05-30 15:15:34 -07:00
// track attempts made, calls to trySend...
tries ++ ;
2017-03-08 20:06:39 -08:00
// total up miner's balances
2014-05-02 14:59:46 -07:00
for ( var w in workers ) {
var worker = workers [ w ] ;
2017-03-08 20:06:39 -08:00
totalShares += ( worker . totalShares || 0 )
2014-05-02 14:59:46 -07:00
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
2017-05-03 22:20:08 -07:00
// get miner payout totals
var toSendSatoshis = Math . round ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ;
2017-07-18 22:06:47 -07:00
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) . trim ( ) ;
2017-03-08 20:06:39 -08:00
if ( minerTotals [ address ] != null && minerTotals [ address ] > 0 ) {
2017-05-03 22:20:08 -07:00
minerTotals [ address ] += toSendSatoshis ;
2017-03-08 20:06:39 -08:00
} else {
2017-05-03 22:20:08 -07:00
minerTotals [ address ] = toSendSatoshis ;
2017-03-08 20:06:39 -08:00
}
}
// now process each workers balance, and pay the miner
for ( var w in workers ) {
var worker = workers [ w ] ;
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
2017-05-03 22:20:08 -07:00
var toSendSatoshis = Math . round ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ;
2017-07-18 22:06:47 -07:00
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) . trim ( ) ;
2017-03-08 20:06:39 -08:00
// if miners total is enough, go ahead and add this worker balance
2017-05-03 22:20:08 -07:00
if ( minerTotals [ address ] >= minPaymentSatoshis ) {
totalSent += toSendSatoshis ;
// send funds
worker . sent = satoshisToCoins ( toSendSatoshis ) ;
worker . balanceChange = Math . min ( worker . balance , toSendSatoshis ) * - 1 ;
2017-03-08 20:06:39 -08:00
// multiple workers may have same address, add them up
if ( addressAmounts [ address ] != null && addressAmounts [ address ] > 0 ) {
2017-05-03 22:20:08 -07:00
addressAmounts [ address ] = coinsRound ( addressAmounts [ address ] + worker . sent ) ;
2017-03-08 20:06:39 -08:00
} else {
addressAmounts [ address ] = worker . sent ;
}
2017-05-03 22:20:08 -07:00
} else {
// add to balance, not enough minerals
2014-05-02 14:59:46 -07:00
worker . sent = 0 ;
2017-05-03 22:20:08 -07:00
worker . balanceChange = Math . max ( toSendSatoshis - worker . balance , 0 ) ;
2017-04-25 22:15:41 -07:00
// track balance changes
2017-05-07 09:57:19 -07:00
if ( worker . balanceChange > 0 ) {
if ( balanceAmounts [ address ] != null && balanceAmounts [ address ] > 0 ) {
balanceAmounts [ address ] = coinsRound ( balanceAmounts [ address ] + satoshisToCoins ( worker . balanceChange ) ) ;
} else {
balanceAmounts [ address ] = satoshisToCoins ( worker . balanceChange ) ;
}
}
}
// track share work
if ( worker . totalShares > 0 ) {
if ( shareAmounts [ address ] != null && shareAmounts [ address ] > 0 ) {
shareAmounts [ address ] += worker . totalShares ;
2017-04-25 22:15:41 -07:00
} else {
2017-05-07 09:57:19 -07:00
shareAmounts [ address ] = worker . totalShares ;
2017-04-25 22:15:41 -07:00
}
2014-05-02 14:59:46 -07:00
}
2014-03-20 15:25:59 -07:00
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// if no payouts...continue to next set of callbacks
2014-05-02 14:59:46 -07:00
if ( Object . keys ( addressAmounts ) . length === 0 ) {
callback ( null , workers , rounds ) ;
return ;
2014-03-20 15:25:59 -07:00
}
2017-05-03 22:20:08 -07:00
2017-05-30 15:15:34 -07:00
// do final rounding of payments per address
// this forces amounts to be valid (0.12345678)
for ( var a in addressAmounts ) {
addressAmounts [ a ] = coinsRound ( addressAmounts [ a ] ) ;
}
2017-05-03 23:41:00 -07:00
// POINT OF NO RETURN! GOOD LUCK!
// WE ARE SENDING PAYMENT CMD TO DAEMON
// perform the sendmany operation .. addressAccount
2017-05-30 15:15:34 -07:00
var rpccallTracking = 'sendmany "" ' + JSON . stringify ( addressAmounts ) ;
2017-03-08 20:06:39 -08:00
daemon . cmd ( 'sendmany' , [ "" , addressAmounts ] , function ( result ) {
// check for failed payments, there are many reasons
2014-05-02 14:59:46 -07:00
if ( result . error && result . error . code === - 6 ) {
2017-05-30 15:15:34 -07:00
// check if it is because we don't have enough funds
if ( result . error . message && result . error . message . includes ( "insufficient funds" ) ) {
// only try up to XX times (Max, 0.5%)
if ( tries < 5 ) {
// we thought we had enough funds to send payments, but apparently not...
// try decreasing payments by a small percent to cover unexpected tx fees?
var higherPercent = withholdPercent + 0.001 ; // 0.1%
logger . warning ( logSystem , logComponent , 'Insufficient funds (??) for payments (' + satoshisToCoins ( totalSent ) + '), decreasing rewards by ' + ( higherPercent * 100 ) . toFixed ( 1 ) + '% and retrying' ) ;
trySend ( higherPercent ) ;
} else {
logger . warning ( logSystem , logComponent , rpccallTracking ) ;
logger . error ( logSystem , logComponent , "Error sending payments, decreased rewards by too much!!!" ) ;
callback ( true ) ;
}
} else {
// there was some fatal payment error?
logger . warning ( logSystem , logComponent , rpccallTracking ) ;
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
// payment failed, prevent updates to redis
callback ( true ) ;
}
2017-05-03 23:41:00 -07:00
return ;
2014-05-02 14:59:46 -07:00
}
2017-03-08 20:06:39 -08:00
else if ( result . error && result . error . code === - 5 ) {
// invalid address specified in addressAmounts array
2017-05-30 15:15:34 -07:00
logger . warning ( logSystem , logComponent , rpccallTracking ) ;
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
2017-05-03 23:41:00 -07:00
// payment failed, prevent updates to redis
2017-03-08 20:06:39 -08:00
callback ( true ) ;
return ;
}
else if ( result . error && result . error . message != null ) {
2017-05-30 15:15:34 -07:00
// invalid amount, others?
logger . warning ( logSystem , logComponent , rpccallTracking ) ;
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
2017-05-03 23:41:00 -07:00
// payment failed, prevent updates to redis
2017-03-08 20:06:39 -08:00
callback ( true ) ;
return ;
}
2014-05-02 14:59:46 -07:00
else if ( result . error ) {
2017-05-03 23:41:00 -07:00
// unknown error
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
2017-05-03 23:41:00 -07:00
// payment failed, prevent updates to redis
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2017-03-08 20:06:39 -08:00
return ;
2014-05-02 14:59:46 -07:00
}
else {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// make sure sendmany gives us back a txid
var txid = null ;
if ( result . response ) {
txid = result . response ;
}
if ( txid != null ) {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// it worked, congrats on your pools payout ;)
2017-05-03 22:20:08 -07:00
logger . special ( logSystem , logComponent , 'Sent ' + satoshisToCoins ( totalSent )
2017-03-08 20:06:39 -08:00
+ ' to ' + Object . keys ( addressAmounts ) . length + ' miners; txid: ' + txid ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( withholdPercent > 0 ) {
logger . warning ( logSystem , logComponent , 'Had to withhold ' + ( withholdPercent * 100 )
+ '% of reward from miners to cover transaction fees. '
+ 'Fund pool wallet with coins to prevent this from happening' ) ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// save payments data to redis
2017-05-03 22:20:08 -07:00
var paymentBlocks = rounds . filter ( function ( r ) { return r . category == 'generate' ; } ) . map ( function ( r ) {
2017-03-08 20:06:39 -08:00
return parseInt ( r . height ) ;
} ) ;
2017-05-03 22:20:08 -07:00
2017-03-08 20:06:39 -08:00
var paymentsUpdate = [ ] ;
2017-05-07 09:57:19 -07:00
var paymentsData = { time : Date . now ( ) , txid : txid , shares : totalShares , paid : satoshisToCoins ( totalSent ) , miners : Object . keys ( addressAmounts ) . length , blocks : paymentBlocks , amounts : addressAmounts , balances : balanceAmounts , work : shareAmounts } ;
2017-03-08 20:06:39 -08:00
paymentsUpdate . push ( [ 'zadd' , logComponent + ':payments' , Date . now ( ) , JSON . stringify ( paymentsData ) ] ) ;
startRedisTimer ( ) ;
redisClient . multi ( paymentsUpdate ) . exec ( function ( error , payments ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error redis save payments data ' + JSON . stringify ( payments ) ) ;
}
2017-05-03 22:20:08 -07:00
// perform final redis updates
2017-03-08 20:06:39 -08:00
callback ( null , workers , rounds ) ;
} ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
} else {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
clearInterval ( paymentInterval ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error RPC sendmany did not return txid '
+ JSON . stringify ( result ) + 'Disabling payment processing to prevent possible double-payouts.' ) ;
callback ( true ) ;
return ;
2014-03-22 19:08:33 -07:00
}
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
} , true , true ) ;
} ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
trySend ( 0 ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
} ,
2017-05-03 22:20:08 -07:00
/ *
Step 5 - Final redis commands
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , callback ) {
2017-04-01 17:04:04 -07:00
var totalPaid = parseFloat ( 0 ) ;
2014-05-02 14:59:46 -07:00
var balanceUpdateCommands = [ ] ;
var workerPayoutsCommand = [ ] ;
2017-04-01 17:04:04 -07:00
// update worker paid/balance stats
2014-05-02 14:59:46 -07:00
for ( var w in workers ) {
var worker = workers [ w ] ;
if ( worker . balanceChange !== 0 ) {
balanceUpdateCommands . push ( [
2014-05-07 10:58:56 -07:00
'hincrbyfloat' ,
2014-05-09 16:43:11 -07:00
coin + ':balances' ,
2014-05-02 14:59:46 -07:00
w ,
2017-05-03 22:20:08 -07:00
satoshisToCoins ( worker . balanceChange )
2014-05-02 14:59:46 -07:00
] ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
if ( worker . sent !== 0 ) {
2017-05-03 22:20:08 -07:00
workerPayoutsCommand . push ( [ 'hincrbyfloat' , coin + ':payouts' , w , coinsRound ( worker . sent ) ] ) ;
totalPaid = coinsRound ( totalPaid + worker . sent ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
}
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var movePendingCommands = [ ] ;
var roundsToDelete = [ ] ;
var orphanMergeCommands = [ ] ;
2017-05-03 22:20:08 -07:00
var confirmsToDelete = [ ] ;
2014-05-07 00:11:27 -07:00
var moveSharesToCurrent = function ( r ) {
var workerShares = r . workerShares ;
2017-03-08 20:06:39 -08:00
if ( workerShares != null ) {
Object . keys ( workerShares ) . forEach ( function ( worker ) {
orphanMergeCommands . push ( [ 'hincrby' , coin + ':shares:roundCurrent' , worker , workerShares [ worker ] ] ) ;
} ) ;
}
2014-05-07 00:11:27 -07:00
} ;
2017-04-01 17:04:04 -07:00
// handle the round
2014-05-02 14:59:46 -07:00
rounds . forEach ( function ( r ) {
switch ( r . category ) {
2014-05-07 00:11:27 -07:00
case 'kicked' :
2017-05-03 22:20:08 -07:00
confirmsToDelete . push ( [ 'hdel' , coin + ':blocksPendingConfirms' , r . blockHash ] ) ;
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksKicked' , r . serialized ] ) ;
2014-05-02 14:59:46 -07:00
case 'orphan' :
2017-05-03 22:20:08 -07:00
confirmsToDelete . push ( [ 'hdel' , coin + ':blocksPendingConfirms' , r . blockHash ] ) ;
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksOrphaned' , r . serialized ] ) ;
2014-05-07 00:11:27 -07:00
if ( r . canDeleteShares ) {
moveSharesToCurrent ( r ) ;
2014-05-09 16:43:11 -07:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2017-05-03 22:20:08 -07:00
roundsToDelete . push ( coin + ':shares:times' + r . height ) ;
2014-05-07 00:11:27 -07:00
}
return ;
2014-05-02 14:59:46 -07:00
case 'generate' :
2017-05-03 22:20:08 -07:00
confirmsToDelete . push ( [ 'hdel' , coin + ':blocksPendingConfirms' , r . blockHash ] ) ;
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksConfirmed' , r . serialized ] ) ;
2016-12-18 20:11:55 -08:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2017-05-03 22:20:08 -07:00
roundsToDelete . push ( coin + ':shares:times' + r . height ) ;
2014-05-07 00:11:27 -07:00
return ;
2014-05-02 14:59:46 -07:00
}
} ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var finalRedisCommands = [ ] ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( movePendingCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( movePendingCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( orphanMergeCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( orphanMergeCommands ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
if ( balanceUpdateCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( balanceUpdateCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( workerPayoutsCommand . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( workerPayoutsCommand ) ;
2014-04-02 13:43:58 -07:00
2014-05-02 14:59:46 -07:00
if ( roundsToDelete . length > 0 )
finalRedisCommands . push ( [ 'del' ] . concat ( roundsToDelete ) ) ;
2014-03-22 19:08:33 -07:00
2017-05-03 22:20:08 -07:00
if ( confirmsToDelete . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( confirmsToDelete ) ;
2014-05-02 14:59:46 -07:00
if ( totalPaid !== 0 )
2017-05-03 22:20:08 -07:00
finalRedisCommands . push ( [ 'hincrbyfloat' , coin + ':stats' , 'totalPaid' , totalPaid ] ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( finalRedisCommands . length === 0 ) {
callback ( ) ;
return ;
}
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2017-05-03 22:20:08 -07:00
if ( error ) {
2014-05-02 14:59:46 -07:00
clearInterval ( paymentInterval ) ;
2017-05-03 22:20:08 -07:00
2014-05-02 14:59:46 -07:00
logger . error ( logSystem , logComponent ,
'Payments sent but could not update redis. ' + JSON . stringify ( error )
+ ' Disabling payment processing to prevent possible double-payouts. The redis commands in '
+ coin + '_finalRedisCommands.txt must be ran manually' ) ;
2017-05-03 22:20:08 -07:00
2014-05-02 14:59:46 -07:00
fs . writeFile ( coin + '_finalRedisCommands.txt' , JSON . stringify ( finalRedisCommands ) , function ( err ) {
logger . error ( 'Could not write finalRedisCommands.txt, you are fucked.' ) ;
} ) ;
2014-04-02 03:56:13 -07:00
}
2014-05-02 14:59:46 -07:00
callback ( ) ;
2014-04-02 03:56:13 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
}
2014-03-27 15:29:43 -07:00
2014-05-02 14:59:46 -07:00
] , function ( ) {
2014-03-27 15:29:43 -07:00
var paymentProcessTime = Date . now ( ) - startPaymentProcess ;
2014-05-02 14:59:46 -07:00
logger . debug ( logSystem , logComponent , 'Finished interval - time spent: '
+ paymentProcessTime + 'ms total, ' + timeSpentRedis + 'ms redis, '
+ timeSpentRPC + 'ms daemon RPC' ) ;
2014-03-27 15:29:43 -07:00
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
} ;
2014-04-26 15:24:06 -07:00
var getProperAddress = function ( address ) {
2017-08-15 08:33:06 -07:00
return address ;
2014-04-26 15:24:06 -07:00
} ;
2014-05-03 09:29:31 -07:00
}