2014-05-29 11:52:31 -07:00
var fs = require ( 'fs' ) ;
2014-03-09 19:31:58 -07:00
var redis = require ( 'redis' ) ;
2014-03-11 18:56:19 -07:00
var async = require ( 'async' ) ;
2014-03-09 19:31:58 -07:00
var Stratum = require ( 'stratum-pool' ) ;
2014-04-26 15:24:06 -07:00
var util = require ( 'stratum-pool/lib/util.js' ) ;
2014-03-09 19:31:58 -07:00
module . exports = function ( logger ) {
var poolConfigs = JSON . parse ( process . env . pools ) ;
2014-03-30 16:04:54 -07:00
var enabledPools = [ ] ;
2014-03-09 19:31:58 -07:00
Object . keys ( poolConfigs ) . forEach ( function ( coin ) {
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
if ( poolOptions . paymentProcessing &&
poolOptions . paymentProcessing . enabled )
2014-03-30 16:04:54 -07:00
enabledPools . push ( coin ) ;
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-30 16:04:54 -07:00
async . filter ( enabledPools , function ( coin , callback ) {
SetupForPool ( logger , poolConfigs [ coin ] , function ( setupResults ) {
2016-11-27 11:27:08 -08:00
callback ( null , setupResults ) ;
2014-03-30 16:04:54 -07:00
} ) ;
2016-11-27 11:27:08 -08:00
} , function ( err , results ) {
results . forEach ( function ( coin ) {
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
var poolOptions = poolConfigs [ coin ] ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-30 16:04:54 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
logger . debug ( logSystem , logComponent , 'Payment processing setup to run every '
+ processingConfig . paymentInterval + ' second(s) with daemon ('
+ processingConfig . daemon . user + '@' + processingConfig . daemon . host + ':' + processingConfig . daemon . port
2014-05-02 14:59:46 -07:00
+ ') and redis (' + poolOptions . redis . host + ':' + poolOptions . redis . port + ')' ) ;
2014-03-09 19:31:58 -07:00
2014-03-30 16:04:54 -07:00
} ) ;
} ) ;
} ;
function SetupForPool ( logger , poolOptions , setupFinished ) {
2014-03-09 19:31:58 -07:00
2014-03-26 14:08:34 -07:00
2014-03-30 16:04:54 -07:00
var coin = poolOptions . coin . name ;
2014-05-02 14:59:46 -07:00
var processingConfig = poolOptions . paymentProcessing ;
2014-03-09 19:31:58 -07:00
2014-03-22 23:16:06 -07:00
var logSystem = 'Payments' ;
var logComponent = coin ;
2017-03-08 20:06:39 -08:00
var opidCount = 0 ;
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
var minConfShield = 3 ;
2017-04-12 18:25:48 -07:00
var minConfPayout = 3 ;
2017-04-18 18:27:54 -07:00
2017-04-19 19:48:24 -07:00
var maxBlocksPerPayment = processingConfig . maxBlocksPerPayment || 3 ;
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
var requireShielding = poolOptions . coin . requireShielding === true ;
2017-04-08 13:21:42 -07:00
var fee = parseFloat ( poolOptions . coin . txfee ) || parseFloat ( 0.0004 ) ;
2017-04-06 15:23:37 -07:00
2017-04-20 12:31:13 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' requireShielding: ' + requireShielding ) ;
logger . debug ( logSystem , logComponent , logComponent + ' payments txfee reserve: ' + fee ) ;
2017-04-19 19:48:24 -07:00
logger . debug ( logSystem , logComponent , logComponent + ' maxBlocksPerPayment: ' + maxBlocksPerPayment ) ;
2014-03-22 23:16:06 -07:00
2014-05-06 19:30:31 -07:00
var daemon = new Stratum . daemon . interface ( [ processingConfig . daemon ] , function ( severity , message ) {
logger [ severity ] ( logSystem , logComponent , message ) ;
} ) ;
2014-05-02 14:59:46 -07:00
var redisClient = redis . createClient ( poolOptions . redis . port , poolOptions . redis . host ) ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var magnitude ;
var minPaymentSatoshis ;
var coinPrecision ;
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var paymentInterval ;
2014-03-30 16:04:54 -07:00
2016-11-27 11:27:08 -08:00
function validateAddress ( callback ) {
daemon . cmd ( 'validateaddress' , [ poolOptions . address ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-29 19:47:57 -08:00
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateTAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'validateaddress' , [ poolOptions . tAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
} , true ) ;
2016-12-01 16:05:46 -08:00
}
function validateZAddress ( callback ) {
2016-11-29 19:47:57 -08:00
daemon . cmd ( 'z_validateaddress' , [ poolOptions . zAddress ] , function ( result ) {
if ( result . error ) {
logger . error ( logSystem , logComponent , 'Error with payment processing daemon ' + JSON . stringify ( result . error ) ) ;
callback ( true ) ;
}
else if ( ! result . response || ! result . response . ismine ) {
logger . error ( logSystem , logComponent ,
'Daemon does not own pool address - payment processing can not be done with this daemon, '
+ JSON . stringify ( result . response ) ) ;
callback ( true ) ;
}
else {
callback ( )
}
2016-11-27 11:27:08 -08:00
} , true ) ;
}
function getBalance ( callback ) {
daemon . cmd ( 'getbalance' , [ ] , function ( result ) {
if ( result . error ) {
return callback ( true ) ;
}
try {
var d = result . data . split ( 'result":' ) [ 1 ] . split ( ',' ) [ 0 ] . split ( '.' ) [ 1 ] ;
magnitude = parseInt ( '10' + new Array ( d . length ) . join ( '0' ) ) ;
minPaymentSatoshis = parseInt ( processingConfig . minimumPayment * magnitude ) ;
coinPrecision = magnitude . toString ( ) . length - 1 ;
}
catch ( e ) {
logger . error ( logSystem , logComponent , 'Error detecting number of satoshis in a coin, cannot do payment processing. Tried parsing: ' + result . data ) ;
return callback ( true ) ;
}
callback ( ) ;
} , true , true ) ;
}
function asyncComplete ( err ) {
2014-03-30 16:04:54 -07:00
if ( err ) {
setupFinished ( false ) ;
return ;
}
2017-03-08 20:06:39 -08:00
if ( paymentInterval ) {
clearInterval ( paymentInterval ) ;
}
2014-05-02 14:59:46 -07:00
paymentInterval = setInterval ( function ( ) {
2014-03-30 16:04:54 -07:00
try {
processPayments ( ) ;
} catch ( e ) {
throw e ;
2014-03-09 19:31:58 -07:00
}
2014-03-30 16:04:54 -07:00
} , processingConfig . paymentInterval * 1000 ) ;
setTimeout ( processPayments , 100 ) ;
setupFinished ( true ) ;
2016-11-27 11:27:08 -08:00
}
2014-03-30 16:04:54 -07:00
2017-04-12 11:47:51 -07:00
if ( requireShielding === true ) {
async . parallel ( [ validateAddress , validateTAddress , validateZAddress , getBalance ] , asyncComplete ) ;
} else {
async . parallel ( [ validateAddress , validateTAddress , getBalance ] , asyncComplete ) ;
}
2016-11-27 14:54:08 -08:00
//get t_address coinbalance
2016-12-27 22:08:31 -08:00
function listUnspent ( addr , notAddr , minConf , displayBool , callback ) {
if ( addr !== null ) {
2017-03-08 20:06:39 -08:00
var args = [ minConf , 99999999 , [ addr ] ] ;
2016-12-27 22:08:31 -08:00
} else {
2017-03-08 20:06:39 -08:00
addr = 'Payout wallet' ;
var args = [ minConf , 99999999 ] ;
2016-12-27 22:08:31 -08:00
}
daemon . cmd ( 'listunspent' , args , function ( result ) {
2016-11-27 14:54:08 -08:00
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2016-12-29 01:47:08 -08:00
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to get t-addr [' + addr + '] balance with RPC listunspent.'
2016-12-29 02:33:44 -08:00
+ JSON . stringify ( result . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
var tBalance = 0 ;
2017-03-08 20:06:39 -08:00
if ( result [ 0 ] . response != null && result [ 0 ] . response . length > 0 ) {
for ( var i = 0 , len = result [ 0 ] . response . length ; i < len ; i ++ ) {
if ( result [ 0 ] . response [ i ] . address !== notAddr ) {
tBalance = tBalance + ( result [ 0 ] . response [ i ] . amount * magnitude ) ;
}
2016-12-27 22:08:31 -08:00
}
2016-11-27 14:54:08 -08:00
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-03-08 20:06:39 -08:00
logger . special ( logSystem , logComponent , addr + ' balance of ' + ( tBalance / magnitude ) . toFixed ( 8 ) ) ;
2016-12-27 08:42:48 -08:00
}
callback ( null , tBalance . toFixed ( 8 ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
// get z_address coinbalance
2016-12-27 08:42:48 -08:00
function listUnspentZ ( addr , minConf , displayBool , callback ) {
daemon . cmd ( 'z_getbalance' , [ addr , minConf ] , function ( result ) {
2016-11-27 14:54:08 -08:00
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
2016-12-27 08:42:48 -08:00
if ( result [ 0 ] . error ) {
logger . error ( logSystem , logComponent , 'Error trying to get coin balance with RPC z_getbalance.' + JSON . stringify ( result [ 0 ] . error ) ) ;
2016-11-27 19:20:15 -08:00
callback = function ( ) { } ;
2016-11-27 14:54:08 -08:00
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
var zBalance = 0 ;
if ( result [ 0 ] . response != null ) {
zBalance = result [ 0 ] . response ;
}
2016-12-27 08:42:48 -08:00
if ( displayBool === true ) {
2017-03-08 20:06:39 -08:00
logger . special ( logSystem , logComponent , addr . substring ( 0 , 14 ) + '...' + addr . substring ( addr . length - 14 ) + ' balance: ' + ( zBalance ) . toFixed ( 8 ) ) ;
2016-12-27 08:42:48 -08:00
}
callback ( null , ( zBalance * magnitude ) . toFixed ( 8 ) ) ;
2016-11-27 14:54:08 -08:00
}
} ) ;
}
//send t_address balance to z_address
function sendTToZ ( callback , tBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-01-04 17:44:16 -08:00
if ( ( tBalance - 10000 ) < 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendTToZ is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
2017-04-06 15:23:37 -07:00
var amount = balanceRound ( ( tBalance - 10000 ) / magnitude ) ;
2017-03-08 20:06:39 -08:00
var params = [ poolOptions . address , [ { 'address' : poolOptions . zAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to shield mined balance ' + JSON . stringify ( result . error ) ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
opidCount ++ ;
logger . special ( logSystem , logComponent , 'Shield mined balance ' + amount ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-04-06 15:23:37 -07:00
2016-11-27 14:54:08 -08:00
// send z_address balance to t_address
function sendZToT ( callback , zBalance ) {
2016-11-27 19:20:15 -08:00
if ( callback === true )
return ;
2017-01-04 17:44:16 -08:00
if ( ( zBalance - 10000 ) < 0 )
2016-11-27 19:20:15 -08:00
return ;
2017-03-08 20:06:39 -08:00
// do not allow more than a single z_sendmany operation at a time
if ( opidCount > 0 ) {
logger . warning ( logSystem , logComponent , 'sendZToT is waiting, too many z_sendmany operations already in progress.' ) ;
return ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
var amount = balanceRound ( ( zBalance - 10000 ) / magnitude ) ;
// no more than 100 ZEC at a time
if ( amount > 100.0 )
amount = 100.0 ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
var params = [ poolOptions . zAddress , [ { 'address' : poolOptions . tAddress , 'amount' : amount } ] ] ;
daemon . cmd ( 'z_sendmany' , params ,
2016-11-27 14:54:08 -08:00
function ( result ) {
//Check if payments failed because wallet doesn't have enough coins to pay for tx fees
if ( result . error ) {
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error trying to send z_address coin balance to payout t_address.'
2016-11-27 14:54:08 -08:00
+ JSON . stringify ( result . error ) ) ;
callback = function ( ) { } ;
callback ( true ) ;
}
else {
2017-03-08 20:06:39 -08:00
opidCount ++ ;
logger . special ( logSystem , logComponent , 'Unshield funds for payout ' + amount ) ;
2016-11-27 14:54:08 -08:00
callback = function ( ) { } ;
callback ( null ) ;
}
}
) ;
}
2017-04-18 18:27:54 -07:00
// TODO, this needs to be moved out of payments processor
2017-04-01 17:04:04 -07:00
function cacheNetworkStats ( ) {
2017-03-08 20:06:39 -08:00
var params = null ;
daemon . cmd ( 'getmininginfo' , params ,
function ( result ) {
2017-04-01 17:04:04 -07:00
var finalRedisCommands = [ ] ;
var coin = logComponent ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( result . error ) {
2017-04-01 17:04:04 -07:00
logger . error ( logSystem , logComponent , 'Error with RPC call `getmininginfo`'
2017-03-08 20:06:39 -08:00
+ JSON . stringify ( result . error ) ) ;
2017-04-01 17:04:04 -07:00
return ;
2017-03-08 20:06:39 -08:00
} else {
2017-04-06 15:23:37 -07:00
if ( result [ 0 ] . response . blocks !== null ) {
2017-04-01 17:04:04 -07:00
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkBlocks' , result [ 0 ] . response . blocks ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkDiff' , result [ 0 ] . response . difficulty ] ) ;
2017-04-10 09:58:04 -07:00
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkSols' , result [ 0 ] . response . networkhashps ] ) ;
2017-04-01 17:04:04 -07:00
} else {
logger . error ( logSystem , logComponent , "Error parse RPC call reponse.blocks tp `getmininginfo`." + JSON . stringify ( result [ 0 ] . response ) ) ;
}
2017-03-08 20:06:39 -08:00
}
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
daemon . cmd ( 'getnetworkinfo' , params ,
2017-03-08 20:06:39 -08:00
function ( result ) {
if ( result . error ) {
2017-04-01 17:04:04 -07:00
logger . error ( logSystem , logComponent , 'Error with RPC call `getnetworkinfo`'
2017-03-08 20:06:39 -08:00
+ JSON . stringify ( result . error ) ) ;
2017-04-01 17:04:04 -07:00
return ;
2017-03-08 20:06:39 -08:00
} else {
2017-04-01 17:04:04 -07:00
if ( result [ 0 ] . response !== null ) {
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkConnections' , result [ 0 ] . response . connections ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkVersion' , result [ 0 ] . response . version ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkSubVersion' , result [ 0 ] . response . subversion ] ) ;
finalRedisCommands . push ( [ 'hset' , coin + ':stats' , 'networkProtocolVersion' , result [ 0 ] . response . protocolversion ] ) ;
} else {
logger . error ( logSystem , logComponent , "Error parse RPC call response to `getnetworkinfo`." + JSON . stringify ( result [ 0 ] . response ) ) ;
}
2017-03-08 20:06:39 -08:00
}
2017-04-01 17:04:04 -07:00
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
if ( error ) {
logger . error ( logSystem , logComponent , 'Error update coin stats to redis ' + JSON . stringify ( error ) ) ;
return ;
}
} ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-01 17:04:04 -07:00
) ;
2017-03-08 20:06:39 -08:00
}
) ;
}
2017-04-06 15:23:37 -07:00
2016-11-27 14:54:08 -08:00
// run coinbase coin transfers every x minutes
2017-03-08 20:06:39 -08:00
var intervalState = 0 ; // do not send ZtoT and TtoZ and same time, this results in operation failed!
2016-11-27 14:54:08 -08:00
var interval = poolOptions . walletInterval * 60 * 1000 ; // run every x minutes
setInterval ( function ( ) {
2017-04-01 17:04:04 -07:00
// shielding not required for some equihash coins
if ( requireShielding === true ) {
intervalState ++ ;
switch ( intervalState ) {
case 1 :
listUnspent ( poolOptions . address , null , minConfShield , false , sendTToZ ) ;
break ;
default :
listUnspentZ ( poolOptions . zAddress , minConfShield , false , sendZToT ) ;
intervalState = 0 ;
break ;
}
2017-03-08 20:06:39 -08:00
}
2017-04-01 17:04:04 -07:00
// update network stats using coin daemon
cacheNetworkStats ( ) ;
2016-11-27 14:54:08 -08:00
} , interval ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// check operation statuses every x seconds
var opid _interval = poolOptions . walletInterval * 1000 ;
2017-04-01 17:04:04 -07:00
// shielding not required for some equihash coins
if ( requireShielding === true ) {
setInterval ( function ( ) {
var checkOpIdSuccessAndGetResult = function ( ops ) {
ops . forEach ( function ( op , i ) {
if ( op . status == "success" || op . status == "failed" ) {
daemon . cmd ( 'z_getoperationresult' , [ [ op . id ] ] , function ( result ) {
if ( result . error ) {
logger . warning ( logSystem , logComponent , 'Unable to get payment operation id result ' + JSON . stringify ( result ) ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-01 17:04:04 -07:00
if ( result . response ) {
if ( opidCount > 0 ) {
opidCount = 0 ;
}
if ( op . status == "failed" ) {
if ( op . error ) {
2017-04-18 18:27:54 -07:00
logger . error ( logSystem , logComponent , "Shielding operation failed " + op . id + " " + op . error . code + ", " + op . error . message ) ;
2017-04-01 17:04:04 -07:00
} else {
2017-04-18 18:27:54 -07:00
logger . error ( logSystem , logComponent , "Shielding operation failed " + op . id ) ;
2017-04-01 17:04:04 -07:00
}
2017-03-08 20:06:39 -08:00
} else {
2017-04-18 18:27:54 -07:00
logger . special ( logSystem , logComponent , 'Shielding operation success ' + op . id + ' txid: ' + op . result . txid ) ;
2017-03-08 20:06:39 -08:00
}
}
2017-04-01 17:04:04 -07:00
} , true , true ) ;
} else if ( op . status == "executing" ) {
if ( opidCount == 0 ) {
opidCount ++ ;
2017-04-18 18:27:54 -07:00
logger . special ( logSystem , logComponent , 'Shielding operation in progress ' + op . id ) ;
2017-03-08 20:06:39 -08:00
}
}
2017-04-01 17:04:04 -07:00
} ) ;
} ;
daemon . cmd ( 'z_getoperationstatus' , null , function ( result ) {
if ( result . error ) {
logger . warning ( logSystem , logComponent , 'Unable to get operation ids for clearing.' ) ;
}
if ( result . response ) {
checkOpIdSuccessAndGetResult ( result . response ) ;
}
} , true , true ) ;
} , opid _interval ) ;
}
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
var satoshisToCoins = function ( satoshis ) {
return parseFloat ( ( satoshis / magnitude ) . toFixed ( coinPrecision ) ) ;
2014-03-26 23:56:36 -07:00
} ;
2014-05-07 10:58:56 -07:00
var coinsToSatoshies = function ( coins ) {
2017-04-08 13:12:56 -07:00
return Math . round ( coins * magnitude ) ;
2014-05-07 10:58:56 -07:00
} ;
2017-03-08 20:06:39 -08:00
function balanceRound ( number ) {
2017-04-18 18:27:54 -07:00
return parseFloat ( ( Math . round ( number * 100000000 ) / 100000000 ) . toFixed ( 8 ) ) ;
2017-03-08 20:06:39 -08:00
}
2017-04-18 18:27:54 -07:00
function checkForDuplicateBlockHeight ( rounds , height ) {
var count = 0 ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
if ( rounds [ i ] . height == height )
count ++ ;
}
return count > 1 ;
}
2014-03-26 23:56:36 -07:00
/ * D e a l w i t h n u m b e r s i n s m a l l e s t p o s s i b l e u n i t s ( s a t o s h i s ) a s m u c h a s p o s s i b l e . T h i s g r e a t l y h e l p s w i t h a c c u r a c y
when rounding and whatnot . When we are storing numbers for only humans to see , store in whole coin units . * /
2014-03-12 15:33:29 -07:00
2014-03-11 18:56:19 -07:00
var processPayments = function ( ) {
2014-03-27 15:29:43 -07:00
var startPaymentProcess = Date . now ( ) ;
2014-05-02 14:59:46 -07:00
var timeSpentRPC = 0 ;
var timeSpentRedis = 0 ;
2014-03-11 18:56:19 -07:00
2014-05-02 14:59:46 -07:00
var startTimeRedis ;
var startTimeRPC ;
var startRedisTimer = function ( ) { startTimeRedis = Date . now ( ) } ;
var endRedisTimer = function ( ) { timeSpentRedis += Date . now ( ) - startTimeRedis } ;
var startRPCTimer = function ( ) { startTimeRPC = Date . now ( ) ; } ;
var endRPCTimer = function ( ) { timeSpentRPC += Date . now ( ) - startTimeRedis } ;
async . waterfall ( [
2014-04-04 18:02:52 -07:00
2017-04-18 18:27:54 -07:00
/* Call redis to get an array of rounds and balances - which are coinbase transactions and block heights from submitted blocks. */
2017-04-06 15:23:37 -07:00
function ( callback ) {
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( [
2014-05-09 16:43:11 -07:00
[ 'hgetall' , coin + ':balances' ] ,
[ 'smembers' , coin + ':blocksPending' ]
2014-05-02 14:59:46 -07:00
] ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2014-03-11 18:56:19 -07:00
if ( error ) {
2014-04-04 18:02:52 -07:00
logger . error ( logSystem , logComponent , 'Could not get blocks from redis ' + JSON . stringify ( error ) ) ;
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2017-04-18 18:27:54 -07:00
// build worker balances
2014-05-02 14:59:46 -07:00
var workers = { } ;
for ( var w in results [ 0 ] ) {
2014-05-29 15:56:36 -07:00
workers [ w ] = { balance : coinsToSatoshies ( parseFloat ( results [ 0 ] [ w ] ) ) } ;
2014-03-11 18:56:19 -07:00
}
2017-04-18 18:27:54 -07:00
// build initial rounds data from blocksPending
2014-05-02 14:59:46 -07:00
var rounds = results [ 1 ] . map ( function ( r ) {
2014-03-12 17:09:12 -07:00
var details = r . split ( ':' ) ;
2014-03-22 19:08:33 -07:00
return {
2014-04-02 12:01:05 -07:00
blockHash : details [ 0 ] ,
2014-03-27 15:29:43 -07:00
txHash : details [ 1 ] ,
height : details [ 2 ] ,
2017-04-18 18:27:54 -07:00
minedby : details [ 3 ] ,
duplicate : false ,
2014-03-22 19:08:33 -07:00
serialized : r
} ;
2014-03-11 18:56:19 -07:00
} ) ;
2017-04-18 18:27:54 -07:00
// find duplicate blocks by height
// this can happen when two or more solutions are submitted at the same block height
var duplicateFound = false ;
for ( var i = 0 ; i < rounds . length ; i ++ ) {
if ( checkForDuplicateBlockHeight ( rounds , rounds [ i ] . height ) === true ) {
rounds [ i ] . duplicate = true ;
duplicateFound = true ;
}
}
// handle duplicates if needed
if ( duplicateFound ) {
var dups = rounds . filter ( function ( round ) { return round . duplicate ; } ) ;
2017-04-19 19:05:11 -07:00
logger . warning ( logSystem , logComponent , 'Duplicate pending blocks found: ' + JSON . stringify ( dups ) ) ;
2017-04-18 18:27:54 -07:00
// attempt to find the invalid duplicates
var rpcDupCheck = dups . map ( function ( r ) {
return [ 'getblock' , [ r . blockHash ] ] ;
} ) ;
startRPCTimer ( ) ;
daemon . batchCmd ( rpcDupCheck , function ( error , blocks ) {
endRPCTimer ( ) ;
if ( error || ! blocks ) {
logger . error ( logSystem , logComponent , 'Error with duplicate block check rpc call getblock ' + JSON . stringify ( error ) ) ;
return ;
2017-04-19 19:05:11 -07:00
}
2017-04-18 18:27:54 -07:00
// look for the invalid duplicate block
2017-04-19 14:19:44 -07:00
var validBlocks = { } ; // hashtable for unique look up
var invalidBlocks = [ ] ; // array for redis work
2017-04-18 18:27:54 -07:00
blocks . forEach ( function ( block , i ) {
if ( block && block . result ) {
// invalid duplicate submit blocks have negative confirmations
if ( block . result . confirmations < 0 ) {
2017-04-19 14:19:44 -07:00
logger . warning ( logSystem , logComponent , 'Remove invalid duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
// move from blocksPending to blocksDuplicate...
2017-04-18 18:27:54 -07:00
invalidBlocks . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksDuplicate' , dups [ i ] . serialized ] ) ;
} else {
2017-04-19 14:19:44 -07:00
// block must be valid, make sure it is unique
if ( validBlocks . hasOwnProperty ( dups [ i ] . blockHash ) ) {
// not unique duplicate block
logger . warning ( logSystem , logComponent , 'Remove non-unique duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
// move from blocksPending to blocksDuplicate...
invalidBlocks . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksDuplicate' , dups [ i ] . serialized ] ) ;
} else {
// keep unique valid block
validBlocks [ dups [ i ] . blockHash ] = dups [ i ] . serialized ;
logger . debug ( logSystem , logComponent , 'Keep valid duplicate block ' + block . result . height + ' > ' + block . result . hash ) ;
}
2017-04-18 18:27:54 -07:00
}
}
} ) ;
// filter out all duplicates to prevent double payments
rounds = rounds . filter ( function ( round ) { return ! round . duplicate ; } ) ;
// if we detected the invalid duplicates, move them
if ( invalidBlocks . length > 0 ) {
// move invalid duplicate blocks in redis
startRedisTimer ( ) ;
redisClient . multi ( invalidBlocks ) . exec ( function ( error , kicked ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error could not move invalid duplicate blocks in redis ' + JSON . stringify ( error ) ) ;
}
// continue payments normally
callback ( null , workers , rounds ) ;
} ) ;
} else {
// notify pool owner that we are unable to find the invalid duplicate blocks, manual intervention required...
2017-04-18 19:03:36 -07:00
logger . error ( logSystem , logComponent , 'Unable to detect invalid duplicate blocks, duplicate block payments on hold.' ) ;
2017-04-18 18:27:54 -07:00
// continue payments normally
callback ( null , workers , rounds ) ;
}
} ) ;
} else {
// no duplicates, continue payments normally
callback ( null , workers , rounds ) ;
}
2014-03-11 18:56:19 -07:00
} ) ;
} ,
2017-04-06 15:23:37 -07:00
2016-12-27 08:42:48 -08:00
2014-03-12 15:33:29 -07:00
/ * D o e s a b a t c h r p c c a l l t o d a e m o n w i t h a l l t h e t r a n s a c t i o n h a s h e s t o s e e i f t h e y a r e c o n f i r m e d y e t .
It also adds the block reward amount to the round object - which the daemon gives also gives us . * /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , callback ) {
2014-03-11 18:56:19 -07:00
2017-03-08 20:06:39 -08:00
// first verify block confirmations by block hash
var batchRPCcommand2 = rounds . map ( function ( r ) {
return [ 'getblock' , [ r . blockHash ] ] ;
2014-03-12 17:09:12 -07:00
} ) ;
2017-03-08 20:06:39 -08:00
// guarantee a response for batchRPCcommand2
batchRPCcommand2 . push ( [ 'getblockcount' ] ) ;
2017-04-06 15:23:37 -07:00
2014-05-02 14:59:46 -07:00
startRPCTimer ( ) ;
2017-03-08 20:06:39 -08:00
daemon . batchCmd ( batchRPCcommand2 , function ( error , blockDetails ) {
2014-05-02 14:59:46 -07:00
endRPCTimer ( ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// error getting block info by hash?
if ( error || ! blockDetails ) {
logger . error ( logSystem , logComponent , 'Check finished - daemon rpc error with batch getblock '
2014-05-02 14:59:46 -07:00
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
2014-03-11 20:47:14 -07:00
return ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// update confirmations in redis for pending blocks
2017-04-01 17:04:04 -07:00
var confirmsUpdate = blockDetails . map ( function ( b ) {
2017-03-08 20:06:39 -08:00
if ( b . result != null && b . result . confirmations > 0 ) {
2017-03-08 22:15:10 -08:00
if ( b . result . confirmations > 100 ) {
2017-04-06 15:23:37 -07:00
return [ 'hdel' , logComponent + ':blocksPendingConfirms' , b . result . hash ] ;
2017-03-08 22:15:10 -08:00
}
2017-03-08 20:06:39 -08:00
return [ 'hset' , logComponent + ':blocksPendingConfirms' , b . result . hash , b . result . confirmations ] ;
2014-03-27 11:57:56 -07:00
}
2017-03-08 20:06:39 -08:00
return null ;
} ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// filter nulls, last item is always null...
confirmsUpdate = confirmsUpdate . filter ( function ( val ) { return val !== null ; } ) ;
// guarantee at least one redis update
if ( confirmsUpdate . length < 1 )
confirmsUpdate . push ( [ 'hset' , logComponent + ':blocksPendingConfirms' , 0 , 0 ] ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
startRedisTimer ( ) ;
redisClient . multi ( confirmsUpdate ) . exec ( function ( error , updated ) {
endRedisTimer ( ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( error ) {
logger . error ( logSystem , logComponent , 'failed to update pending block confirmations'
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
2014-05-02 14:59:46 -07:00
return ;
2014-03-15 17:58:28 -07:00
}
2017-04-18 18:27:54 -07:00
// get pending block transaction details from coin daemon
2017-03-08 20:06:39 -08:00
var batchRPCcommand = rounds . map ( function ( r ) {
return [ 'gettransaction' , [ r . txHash ] ] ;
2017-04-06 15:23:37 -07:00
} ) ;
2017-04-18 18:27:54 -07:00
// get account address (not implemented in zcash at this time..)
2017-03-08 20:06:39 -08:00
batchRPCcommand . push ( [ 'getaccount' , [ poolOptions . address ] ] ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
startRPCTimer ( ) ;
daemon . batchCmd ( batchRPCcommand , function ( error , txDetails ) {
endRPCTimer ( ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( error || ! txDetails ) {
logger . error ( logSystem , logComponent , 'Check finished - daemon rpc error with batch gettransactions '
+ JSON . stringify ( error ) ) ;
callback ( true ) ;
return ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
var addressAccount = "" ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// check for transaction errors and generated coins
txDetails . forEach ( function ( tx , i ) {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( i === txDetails . length - 1 ) {
addressAccount = tx . result ;
return ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
var round = rounds [ i ] ;
if ( tx . error && tx . error . code === - 5 ) {
logger . warning ( logSystem , logComponent , 'Daemon reports invalid transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
return ;
}
else if ( ! tx . result . details || ( tx . result . details && tx . result . details . length === 0 ) ) {
logger . warning ( logSystem , logComponent , 'Daemon reports no details for transaction: ' + round . txHash ) ;
round . category = 'kicked' ;
return ;
}
else if ( tx . error || ! tx . result ) {
logger . error ( logSystem , logComponent , 'Odd error with gettransaction ' + round . txHash + ' '
+ JSON . stringify ( tx ) ) ;
return ;
}
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
var generationTx = tx . result . details . filter ( function ( tx ) {
return tx . address === poolOptions . address ;
} ) [ 0 ] ;
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
if ( ! generationTx && tx . result . details . length === 1 ) {
generationTx = tx . result . details [ 0 ] ;
}
2014-05-02 14:59:46 -07:00
2017-03-08 20:06:39 -08:00
if ( ! generationTx ) {
2017-04-12 18:25:48 -07:00
logger . error ( logSystem , logComponent , 'Missing output details to pool address for transaction ' + round . txHash ) ;
2017-03-08 20:06:39 -08:00
return ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
round . category = generationTx . category ;
if ( round . category === 'generate' ) {
2017-04-01 17:04:04 -07:00
round . reward = balanceRound ( generationTx . amount - fee ) || balanceRound ( generationTx . value - fee ) ; // TODO: Adjust fees to be dynamic
2017-03-08 20:06:39 -08:00
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
} ) ;
var canDeleteShares = function ( r ) {
for ( var i = 0 ; i < rounds . length ; i ++ ) {
var compareR = rounds [ i ] ;
if ( ( compareR . height === r . height )
&& ( compareR . category !== 'kicked' )
&& ( compareR . category !== 'orphan' )
&& ( compareR . serialized !== r . serialized ) ) {
return false ;
}
}
return true ;
} ;
2017-04-06 15:23:37 -07:00
2017-04-18 18:27:54 -07:00
// limit blocks paid per payment round
2017-04-19 19:05:11 -07:00
var payingBlocks = 0 ;
2017-04-18 18:27:54 -07:00
//filter out all rounds that are immature (not confirmed or orphaned yet)
2017-03-08 20:06:39 -08:00
rounds = rounds . filter ( function ( r ) {
2017-04-18 18:27:54 -07:00
// only pay max blocks at a time
2017-04-19 19:05:11 -07:00
if ( payingBlocks >= maxBlocksPerPayment )
2017-04-18 18:27:54 -07:00
return false ;
2017-03-08 20:06:39 -08:00
switch ( r . category ) {
case 'orphan' :
case 'kicked' :
r . canDeleteShares = canDeleteShares ( r ) ;
2017-04-18 18:27:54 -07:00
return true ;
2017-03-08 20:06:39 -08:00
case 'generate' :
2017-04-18 18:27:54 -07:00
payingBlocks ++ ;
2017-03-08 20:06:39 -08:00
return true ;
2017-04-18 18:27:54 -07:00
2017-03-08 20:06:39 -08:00
default :
return false ;
}
} ) ;
2014-05-02 14:59:46 -07:00
2017-04-18 18:27:54 -07:00
// TODO: make tx fees dynamic
2017-04-08 13:12:56 -07:00
var feeSatoshi = fee * magnitude ;
2017-04-18 18:27:54 -07:00
2017-04-01 17:04:04 -07:00
// calculate what the pool owes its miners
var totalOwed = parseInt ( 0 ) ;
2017-03-08 20:06:39 -08:00
for ( var i = 0 ; i < rounds . length ; i ++ ) {
2017-04-18 18:27:54 -07:00
// only pay generated blocks, not orphaned or kicked
if ( rounds [ i ] . category == 'generate' ) {
totalOwed = totalOwed + Math . round ( rounds [ i ] . reward * magnitude ) - feeSatoshi ;
}
2017-04-01 17:04:04 -07:00
}
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
var notAddr = null ;
if ( requireShielding === true ) {
notAddr = poolOptions . address ;
2014-05-07 00:11:27 -07:00
}
2017-04-06 15:23:37 -07:00
2017-04-01 17:04:04 -07:00
// check if we have enough tAddress funds to brgin payment processing
listUnspent ( null , notAddr , minConfPayout , false , function ( error , tBalance ) {
if ( error ) {
2017-04-18 18:27:54 -07:00
logger . error ( logSystem , logComponent , 'Error checking pool balance before processing payments.' ) ;
2017-04-06 15:23:37 -07:00
return callback ( true ) ;
2017-04-01 17:04:04 -07:00
} else if ( tBalance < totalOwed ) {
2017-04-19 19:05:11 -07:00
logger . error ( logSystem , logComponent , 'Insufficient funds to process payments for ' + payingBlocks + ' blocks (' + ( tBalance / magnitude ) . toFixed ( 8 ) + ' < ' + ( totalOwed / magnitude ) . toFixed ( 8 ) + '). Possibly waiting for shielding process.' ) ;
2017-03-08 20:06:39 -08:00
return callback ( true ) ;
2017-04-01 17:04:04 -07:00
} else {
2017-03-08 20:06:39 -08:00
// zcash daemon does not support account feature
addressAccount = "" ;
callback ( null , workers , rounds , addressAccount ) ;
}
} )
2014-03-22 19:08:33 -07:00
2017-03-08 20:06:39 -08:00
} ) ;
2014-03-11 18:56:19 -07:00
} ) ;
} ) ;
} ,
2014-03-11 20:47:14 -07:00
2014-03-12 15:33:29 -07:00
/ * D o e s a b a t c h r e d i s c a l l t o g e t s h a r e s c o n t r i b u t e d t o e a c h r o u n d . T h e n c a l c u l a t e s t h e r e w a r d
amount owned to each miner for each round . * /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2014-03-11 18:56:19 -07:00
2014-03-12 17:09:12 -07:00
var shareLookups = rounds . map ( function ( r ) {
2014-05-09 16:43:11 -07:00
return [ 'hgetall' , coin + ':shares:round' + r . height ]
2014-03-12 17:09:12 -07:00
} ) ;
2014-03-12 15:33:29 -07:00
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
2014-03-12 17:09:12 -07:00
redisClient . multi ( shareLookups ) . exec ( function ( error , allWorkerShares ) {
2014-05-02 14:59:46 -07:00
endRedisTimer ( ) ;
2014-03-11 18:56:19 -07:00
if ( error ) {
2014-05-02 14:59:46 -07:00
callback ( 'Check finished - redis error with multi get rounds share' ) ;
2014-03-11 18:56:19 -07:00
return ;
}
2014-03-11 20:47:14 -07:00
2014-03-22 19:08:33 -07:00
rounds . forEach ( function ( round , i ) {
var workerShares = allWorkerShares [ i ] ;
2014-03-12 15:33:29 -07:00
2014-03-27 15:29:43 -07:00
if ( ! workerShares ) {
logger . error ( logSystem , logComponent , 'No worker shares for round: '
2014-04-02 12:01:05 -07:00
+ round . height + ' blockHash: ' + round . blockHash ) ;
2014-03-27 15:29:43 -07:00
return ;
2014-03-11 20:47:14 -07:00
}
2017-04-06 15:23:37 -07:00
2014-03-27 15:29:43 -07:00
switch ( round . category ) {
2014-05-07 00:11:27 -07:00
case 'kicked' :
2014-03-27 15:29:43 -07:00
case 'orphan' :
2014-05-02 14:59:46 -07:00
round . workerShares = workerShares ;
2014-03-27 15:29:43 -07:00
break ;
case 'generate' :
/ * W e f o u n d a c o n f i r m e d b l o c k ! N o w g e t t h e r e w a r d f o r i t a n d c a l c u l a t e h o w m u c h
we owe each miner based on the shares they submitted during that block round . * /
2014-05-02 14:59:46 -07:00
var reward = parseInt ( round . reward * magnitude ) ;
2014-03-27 15:29:43 -07:00
var totalShares = Object . keys ( workerShares ) . reduce ( function ( p , c ) {
2014-04-24 13:53:06 -07:00
return p + parseFloat ( workerShares [ c ] )
2014-03-27 15:29:43 -07:00
} , 0 ) ;
2014-05-02 14:59:46 -07:00
for ( var workerAddress in workerShares ) {
var percent = parseFloat ( workerShares [ workerAddress ] ) / totalShares ;
2014-03-27 15:29:43 -07:00
var workerRewardTotal = Math . floor ( reward * percent ) ;
2014-05-02 14:59:46 -07:00
var worker = workers [ workerAddress ] = ( workers [ workerAddress ] || { } ) ;
2017-03-08 20:06:39 -08:00
worker . totalShares = ( worker . totalShares || 0 ) + parseFloat ( workerShares [ workerAddress ] ) ;
2014-05-02 14:59:46 -07:00
worker . reward = ( worker . reward || 0 ) + workerRewardTotal ;
2014-03-27 15:29:43 -07:00
}
break ;
2014-03-22 19:08:33 -07:00
}
} ) ;
2014-03-12 17:09:12 -07:00
2014-05-02 14:59:46 -07:00
callback ( null , workers , rounds , addressAccount ) ;
2014-03-11 20:47:14 -07:00
} ) ;
} ,
/ * C a l c u l a t e i f a n y p a y m e n t s a r e r e a d y t o b e s e n t a n d t r i g g e r t h e m s e n d i n g
2014-03-11 21:57:03 -07:00
Get balance different for each address and pass it along as object of latest balances such as
{ worker1 : balance1 , worker2 , balance2 }
when deciding the sent balance , it the difference should be - 1 * amount they had in db ,
if not sending the balance , the differnce should be + ( the amount they earned this round )
* /
2014-05-02 14:59:46 -07:00
function ( workers , rounds , addressAccount , callback ) {
2014-03-11 21:01:33 -07:00
2014-05-02 14:59:46 -07:00
var trySend = function ( withholdPercent ) {
var addressAmounts = { } ;
2017-04-25 22:15:41 -07:00
var balanceAmounts = { } ;
2017-03-08 20:06:39 -08:00
var minerTotals = { } ;
2014-05-02 14:59:46 -07:00
var totalSent = 0 ;
2017-03-08 20:06:39 -08:00
var totalShares = 0 ;
// total up miner's balances
2014-05-02 14:59:46 -07:00
for ( var w in workers ) {
var worker = workers [ w ] ;
2017-03-08 20:06:39 -08:00
totalShares += ( worker . totalShares || 0 )
2014-05-02 14:59:46 -07:00
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
2017-03-08 20:06:39 -08:00
var toSend = balanceRound ( satoshisToCoins ( Math . floor ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ) ) ;
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) ;
if ( minerTotals [ address ] != null && minerTotals [ address ] > 0 ) {
minerTotals [ address ] = balanceRound ( minerTotals [ address ] + toSend ) ;
} else {
minerTotals [ address ] = toSend ;
}
}
// now process each workers balance, and pay the miner
for ( var w in workers ) {
var worker = workers [ w ] ;
worker . balance = worker . balance || 0 ;
worker . reward = worker . reward || 0 ;
var toSend = Math . floor ( ( worker . balance + worker . reward ) * ( 1 - withholdPercent ) ) ;
var address = worker . address = ( worker . address || getProperAddress ( w . split ( '.' ) [ 0 ] ) ) ;
// if miners total is enough, go ahead and add this worker balance
if ( minerTotals [ address ] >= satoshisToCoins ( minPaymentSatoshis ) ) {
2014-05-02 14:59:46 -07:00
totalSent += toSend ;
2017-03-08 20:06:39 -08:00
worker . sent = balanceRound ( satoshisToCoins ( toSend ) ) ;
2014-05-02 14:59:46 -07:00
worker . balanceChange = Math . min ( worker . balance , toSend ) * - 1 ;
2017-03-08 20:06:39 -08:00
// multiple workers may have same address, add them up
if ( addressAmounts [ address ] != null && addressAmounts [ address ] > 0 ) {
addressAmounts [ address ] = balanceRound ( addressAmounts [ address ] + worker . sent ) ;
} else {
addressAmounts [ address ] = worker . sent ;
}
2014-05-02 14:59:46 -07:00
}
else {
worker . balanceChange = Math . max ( toSend - worker . balance , 0 ) ;
worker . sent = 0 ;
2017-04-25 22:15:41 -07:00
// track balance changes
if ( balanceAmounts [ address ] != null && balanceAmounts [ address ] > 0 ) {
balanceAmounts [ address ] = balanceRound ( balanceAmounts [ address ] + worker . balanceChange ) ;
} else {
balanceAmounts [ address ] = worker . balanceChange ;
}
2014-05-02 14:59:46 -07:00
}
2014-03-20 15:25:59 -07:00
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// if no payouts...continue to next set of callbacks
2014-05-02 14:59:46 -07:00
if ( Object . keys ( addressAmounts ) . length === 0 ) {
callback ( null , workers , rounds ) ;
return ;
2014-03-20 15:25:59 -07:00
}
2017-03-08 20:06:39 -08:00
/ *
var undoPaymentsOnError = function ( workers ) {
totalSent = 0 ;
// TODO, set round.category to immature, to attempt to pay again
// we did not send anything to any workers
for ( var w in workers ) {
var worker = workers [ w ] ;
if ( worker . sent > 0 ) {
worker . balanceChange = 0 ;
worker . sent = 0 ;
}
}
} ;
* /
// perform the sendmany operation
daemon . cmd ( 'sendmany' , [ "" , addressAmounts ] , function ( result ) {
// check for failed payments, there are many reasons
2014-05-02 14:59:46 -07:00
if ( result . error && result . error . code === - 6 ) {
2017-03-08 20:06:39 -08:00
// not enough minerals...
2014-05-02 14:59:46 -07:00
var higherPercent = withholdPercent + 0.01 ;
2014-05-07 10:58:56 -07:00
logger . warning ( logSystem , logComponent , 'Not enough funds to cover the tx fees for sending out payments, decreasing rewards by '
2014-05-02 14:59:46 -07:00
+ ( higherPercent * 100 ) + '% and retrying' ) ;
2017-04-06 15:23:37 -07:00
2014-05-02 14:59:46 -07:00
trySend ( higherPercent ) ;
}
2017-03-08 20:06:39 -08:00
else if ( result . error && result . error . code === - 5 ) {
// invalid address specified in addressAmounts array
logger . error ( logSystem , logComponent , 'Error sending payments ' + result . error . message ) ;
//undoPaymentsOnError(workers);
callback ( true ) ;
return ;
}
else if ( result . error && result . error . message != null ) {
// unknown error from daemon
logger . error ( logSystem , logComponent , 'Error sending payments ' + result . error . message ) ;
//undoPaymentsOnError(workers);
callback ( true ) ;
return ;
}
2014-05-02 14:59:46 -07:00
else if ( result . error ) {
2017-03-08 20:06:39 -08:00
// some other unknown error
logger . error ( logSystem , logComponent , 'Error sending payments ' + JSON . stringify ( result . error ) ) ;
//undoPaymentsOnError(workers);
2014-05-02 14:59:46 -07:00
callback ( true ) ;
2017-03-08 20:06:39 -08:00
return ;
2014-05-02 14:59:46 -07:00
}
else {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// make sure sendmany gives us back a txid
var txid = null ;
if ( result . response ) {
txid = result . response ;
}
if ( txid != null ) {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// it worked, congrats on your pools payout ;)
logger . special ( logSystem , logComponent , 'Sent ' + ( totalSent / magnitude ) . toFixed ( 8 )
+ ' to ' + Object . keys ( addressAmounts ) . length + ' miners; txid: ' + txid ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
if ( withholdPercent > 0 ) {
logger . warning ( logSystem , logComponent , 'Had to withhold ' + ( withholdPercent * 100 )
+ '% of reward from miners to cover transaction fees. '
+ 'Fund pool wallet with coins to prevent this from happening' ) ;
}
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
// save payments data to redis
var paymentBlocks = rounds . map ( function ( r ) {
return parseInt ( r . height ) ;
} ) ;
var paymentsUpdate = [ ] ;
2017-04-25 22:15:41 -07:00
var paymentsData = { time : Date . now ( ) , txid : txid , shares : totalShares , paid : balanceRound ( totalSent / magnitude ) , miners : Object . keys ( addressAmounts ) . length , blocks : paymentBlocks , amounts : addressAmounts , balances : balanceAmounts } ;
2017-03-08 20:06:39 -08:00
paymentsUpdate . push ( [ 'zadd' , logComponent + ':payments' , Date . now ( ) , JSON . stringify ( paymentsData ) ] ) ;
startRedisTimer ( ) ;
redisClient . multi ( paymentsUpdate ) . exec ( function ( error , payments ) {
endRedisTimer ( ) ;
if ( error ) {
logger . error ( logSystem , logComponent , 'Error redis save payments data ' + JSON . stringify ( payments ) ) ;
}
callback ( null , workers , rounds ) ;
} ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
} else {
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
clearInterval ( paymentInterval ) ;
2017-04-06 15:23:37 -07:00
2017-03-08 20:06:39 -08:00
logger . error ( logSystem , logComponent , 'Error RPC sendmany did not return txid '
+ JSON . stringify ( result ) + 'Disabling payment processing to prevent possible double-payouts.' ) ;
callback ( true ) ;
return ;
2014-03-22 19:08:33 -07:00
}
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
} , true , true ) ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
} ;
2017-03-08 20:06:39 -08:00
2014-05-02 14:59:46 -07:00
trySend ( 0 ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
} ,
function ( workers , rounds , callback ) {
2017-04-01 17:04:04 -07:00
var totalPaid = parseFloat ( 0 ) ;
2014-05-02 14:59:46 -07:00
var balanceUpdateCommands = [ ] ;
var workerPayoutsCommand = [ ] ;
2017-04-01 17:04:04 -07:00
// update worker paid/balance stats
2014-05-02 14:59:46 -07:00
for ( var w in workers ) {
var worker = workers [ w ] ;
if ( worker . balanceChange !== 0 ) {
balanceUpdateCommands . push ( [
2014-05-07 10:58:56 -07:00
'hincrbyfloat' ,
2014-05-09 16:43:11 -07:00
coin + ':balances' ,
2014-05-02 14:59:46 -07:00
w ,
2017-03-08 20:06:39 -08:00
balanceRound ( satoshisToCoins ( worker . balanceChange ) )
2014-05-02 14:59:46 -07:00
] ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
if ( worker . sent !== 0 ) {
2017-03-08 20:06:39 -08:00
workerPayoutsCommand . push ( [ 'hincrbyfloat' , coin + ':payouts' , w , balanceRound ( worker . sent ) ] ) ;
totalPaid = balanceRound ( totalPaid + worker . sent ) ;
2014-03-20 15:25:59 -07:00
}
2014-05-02 14:59:46 -07:00
}
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var movePendingCommands = [ ] ;
var roundsToDelete = [ ] ;
var orphanMergeCommands = [ ] ;
2014-03-20 15:25:59 -07:00
2014-05-07 00:11:27 -07:00
var moveSharesToCurrent = function ( r ) {
var workerShares = r . workerShares ;
2017-03-08 20:06:39 -08:00
if ( workerShares != null ) {
Object . keys ( workerShares ) . forEach ( function ( worker ) {
orphanMergeCommands . push ( [ 'hincrby' , coin + ':shares:roundCurrent' , worker , workerShares [ worker ] ] ) ;
} ) ;
}
2014-05-07 00:11:27 -07:00
} ;
2017-04-01 17:04:04 -07:00
// handle the round
2014-05-02 14:59:46 -07:00
rounds . forEach ( function ( r ) {
switch ( r . category ) {
2014-05-07 00:11:27 -07:00
case 'kicked' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksKicked' , r . serialized ] ) ;
2014-05-02 14:59:46 -07:00
case 'orphan' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksOrphaned' , r . serialized ] ) ;
2014-05-07 00:11:27 -07:00
if ( r . canDeleteShares ) {
moveSharesToCurrent ( r ) ;
2014-05-09 16:43:11 -07:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2014-05-07 00:11:27 -07:00
}
return ;
2014-05-02 14:59:46 -07:00
case 'generate' :
2014-05-09 16:43:11 -07:00
movePendingCommands . push ( [ 'smove' , coin + ':blocksPending' , coin + ':blocksConfirmed' , r . serialized ] ) ;
2016-12-18 20:11:55 -08:00
roundsToDelete . push ( coin + ':shares:round' + r . height ) ;
2014-05-07 00:11:27 -07:00
return ;
2014-05-02 14:59:46 -07:00
}
} ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
var finalRedisCommands = [ ] ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( movePendingCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( movePendingCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( orphanMergeCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( orphanMergeCommands ) ;
2014-03-20 15:25:59 -07:00
2014-05-02 14:59:46 -07:00
if ( balanceUpdateCommands . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( balanceUpdateCommands ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( workerPayoutsCommand . length > 0 )
finalRedisCommands = finalRedisCommands . concat ( workerPayoutsCommand ) ;
2014-04-02 13:43:58 -07:00
2014-05-02 14:59:46 -07:00
if ( roundsToDelete . length > 0 )
finalRedisCommands . push ( [ 'del' ] . concat ( roundsToDelete ) ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( totalPaid !== 0 )
2017-03-08 20:06:39 -08:00
finalRedisCommands . push ( [ 'hincrbyfloat' , coin + ':stats' , 'totalPaid' , balanceRound ( totalPaid ) ] ) ;
2014-03-22 19:08:33 -07:00
2014-05-02 14:59:46 -07:00
if ( finalRedisCommands . length === 0 ) {
callback ( ) ;
return ;
}
2014-03-09 19:31:58 -07:00
2014-05-02 14:59:46 -07:00
startRedisTimer ( ) ;
redisClient . multi ( finalRedisCommands ) . exec ( function ( error , results ) {
endRedisTimer ( ) ;
2014-04-02 03:56:13 -07:00
if ( error ) {
2014-05-02 14:59:46 -07:00
clearInterval ( paymentInterval ) ;
logger . error ( logSystem , logComponent ,
'Payments sent but could not update redis. ' + JSON . stringify ( error )
+ ' Disabling payment processing to prevent possible double-payouts. The redis commands in '
+ coin + '_finalRedisCommands.txt must be ran manually' ) ;
fs . writeFile ( coin + '_finalRedisCommands.txt' , JSON . stringify ( finalRedisCommands ) , function ( err ) {
logger . error ( 'Could not write finalRedisCommands.txt, you are fucked.' ) ;
} ) ;
2014-04-02 03:56:13 -07:00
}
2014-05-02 14:59:46 -07:00
callback ( ) ;
2014-04-02 03:56:13 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
}
2014-03-27 15:29:43 -07:00
2014-05-02 14:59:46 -07:00
] , function ( ) {
2014-03-27 15:29:43 -07:00
var paymentProcessTime = Date . now ( ) - startPaymentProcess ;
2014-05-02 14:59:46 -07:00
logger . debug ( logSystem , logComponent , 'Finished interval - time spent: '
+ paymentProcessTime + 'ms total, ' + timeSpentRedis + 'ms redis, '
+ timeSpentRPC + 'ms daemon RPC' ) ;
2014-03-27 15:29:43 -07:00
2014-03-09 19:31:58 -07:00
} ) ;
2014-03-11 18:56:19 -07:00
} ;
2014-04-26 15:24:06 -07:00
var getProperAddress = function ( address ) {
if ( address . length === 40 ) {
return util . addressFromEx ( poolOptions . address , address ) ;
}
else return address ;
} ;
2014-05-03 09:29:31 -07:00
}