diff --git a/init.js b/init.js index 984be2f..243c20a 100644 --- a/init.js +++ b/init.js @@ -265,6 +265,7 @@ var spawnPoolWorkers = function(){ if (!_lastShareTimes[workerAddress] || !_lastStartTimes[workerAddress]) { _lastShareTimes[workerAddress] = now; _lastStartTimes[workerAddress] = now; + logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' joined current round.'); } if (_lastShareTimes[workerAddress] != null && _lastShareTimes[workerAddress] > 0) { lastShareTime = _lastShareTimes[workerAddress]; @@ -273,13 +274,13 @@ var spawnPoolWorkers = function(){ var redisCommands = []; - // if its been less than 10 minutes since last share was submitted + // if its been less than 15 minutes since last share was submitted var timeChangeSec = roundTo(Math.max(now - lastShareTime, 0) / 1000, 4); var timeChangeTotal = roundTo(Math.max(now - lastStartTime, 0) / 1000, 4); - if (timeChangeSec < 600) { + if (timeChangeSec < 900) { // loyal miner keeps mining :) redisCommands.push(['hincrbyfloat', msg.coin + ':shares:timesCurrent', workerAddress, timeChangeSec]); - logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}'); + //logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}'); connection.multi(redisCommands).exec(function(err, replies){ if (err) logger.error('PPLNT', msg.coin, 'Thread '+msg.thread, 'Error with time share processor call to redis ' + JSON.stringify(err)); @@ -287,6 +288,7 @@ var spawnPoolWorkers = function(){ } else { // they just re-joined the pool _lastStartTimes[workerAddress] = now; + logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' re-joined current round.'); } // track last time share diff --git a/libs/paymentProcessor.js b/libs/paymentProcessor.js index fe1928f..0281251 100644 --- a/libs/paymentProcessor.js +++ b/libs/paymentProcessor.js @@ -1,4 +1,5 @@ var fs = require('fs'); +var request = require('request'); var redis = require('redis'); var async = require('async'); @@ -49,18 +50,25 @@ function SetupForPool(logger, poolOptions, setupFinished){ var logSystem = 'Payments'; var logComponent = coin; var opidCount = 0; - - var minConfShield = 3; - var minConfPayout = 3; + + // zcash team recommends 10 confirmations for safety from orphaned blocks + var minConfShield = Math.max((processingConfig.minConf || 10), 3); + var minConfPayout = Math.max((processingConfig.minConf || 10), 3); var maxBlocksPerPayment = processingConfig.maxBlocksPerPayment || 3; - + + // pplnt - pay per last N time shares + var pplntEnabled = processingConfig.paymentMode === "pplnt" || false; + var pplntTimeQualify = processingConfig.pplnt || 0.51; // 51% + + var getMarketStats = poolOptions.coin.getMarketStats === true; var requireShielding = poolOptions.coin.requireShielding === true; var fee = parseFloat(poolOptions.coin.txfee) || parseFloat(0.0004); logger.debug(logSystem, logComponent, logComponent + ' requireShielding: ' + requireShielding); logger.debug(logSystem, logComponent, logComponent + ' payments txfee reserve: ' + fee); logger.debug(logSystem, logComponent, logComponent + ' maxBlocksPerPayment: ' + maxBlocksPerPayment); + logger.debug(logSystem, logComponent, logComponent + ' PPLNT: ' + pplntEnabled + ', time period: '+pplntTimeQualify); var daemon = new Stratum.daemon.interface([processingConfig.daemon], function(severity, message){ logger[severity](logSystem, logComponent, message); @@ -236,7 +244,7 @@ function SetupForPool(logger, poolOptions, setupFinished){ return; } - var amount = balanceRound((tBalance - 10000) / magnitude); + var amount = satoshisToCoins(tBalance - 10000); var params = [poolOptions.address, [{'address': poolOptions.zAddress, 'amount': amount}]]; daemon.cmd('z_sendmany', params, function (result) { @@ -269,8 +277,8 @@ function SetupForPool(logger, poolOptions, setupFinished){ return; } - var amount = balanceRound((zBalance - 10000) / magnitude); - // no more than 100 ZEC at a time + var amount = satoshisToCoins(zBalance - 10000); + // unshield no more than 100 ZEC at a time if (amount > 100.0) amount = 100.0; @@ -294,6 +302,36 @@ function SetupForPool(logger, poolOptions, setupFinished){ ); } + // TODO, this needs to be moved out of payments processor + function cacheMarketStats() { + var marketStatsUpdate = []; + var coin = logComponent.replace('_testnet', ''); + request('https://api.coinmarketcap.com/v1/ticker/'+coin+'/', function (error, response, body) { + if (error) { + logger.error(logSystem, logComponent, 'Error getting coin market stats from CoinMarketCap ' + JSON.stringify(err)); + return; + } + if (response && response.statusCode) { + if (response.statusCode == 200) { + if (body) { + var data = JSON.parse(body); + if (data.length > 0) { + marketStatsUpdate.push(['hset', coin + ':stats', 'coinmarketcap', JSON.stringify(data)]); + redisClient.multi(marketStatsUpdate).exec(function(err, results){ + if (err){ + logger.error(logSystem, logComponent, 'Error update coin market stats to redis ' + JSON.stringify(err)); + return; + } + }); + } + } + } else { + logger.error(logSystem, logComponent, 'Error returned from coinmarketcap ' + JSON.stringify(response)); + } + } + }); + } + // TODO, this needs to be moved out of payments processor function cacheNetworkStats () { var params = null; @@ -345,82 +383,116 @@ function SetupForPool(logger, poolOptions, setupFinished){ } // run coinbase coin transfers every x minutes - var intervalState = 0; // do not send ZtoT and TtoZ and same time, this results in operation failed! - var interval = poolOptions.walletInterval * 60 * 1000; // run every x minutes - setInterval(function() { - // shielding not required for some equihash coins - if (requireShielding === true) { - intervalState++; - switch (intervalState) { + var shieldIntervalState = 0; // do not send ZtoT and TtoZ and same time, this results in operation failed! + var shielding_interval = poolOptions.walletInterval * 60 * 1000; // run every x minutes + // shielding not required for some equihash coins + if (requireShielding === true) { + var shieldInterval = setInterval(function() { + shieldIntervalState++; + switch (shieldIntervalState) { case 1: listUnspent(poolOptions.address, null, minConfShield, false, sendTToZ); break; default: listUnspentZ(poolOptions.zAddress, minConfShield, false, sendZToT); - intervalState = 0; + shieldIntervalState = 0; break; } - } + }, shielding_interval); + } + + // stats caching every 58 seconds + var stats_interval = 58 * 1000; + var statsInterval = setInterval(function() { // update network stats using coin daemon cacheNetworkStats(); - }, interval); + // update market stats using coinmarketcap + if (getMarketStats === true) { + cacheMarketStats(); + } + }, stats_interval); - // check operation statuses every x seconds - var opid_interval = poolOptions.walletInterval * 1000; + // check operation statuses every 57 seconds + var opid_interval = 57 * 1000; // shielding not required for some equihash coins if (requireShielding === true) { - setInterval(function(){ - var checkOpIdSuccessAndGetResult = function(ops) { - ops.forEach(function(op, i){ - if (op.status == "success" || op.status == "failed") { - daemon.cmd('z_getoperationresult', [[op.id]], function (result) { - if (result.error) { - logger.warning(logSystem, logComponent, 'Unable to get payment operation id result ' + JSON.stringify(result)); + var checkOpids = function() { + var checkOpIdSuccessAndGetResult = function(ops) { + var batchRPC = []; + ops.forEach(function(op, i){ + if (op.status == "success" || op.status == "failed") { + batchRPC.push(['z_getoperationresult', [[op.id]]]); + if (opidCount > 0) { + opidCount = 0; } - if (result.response) { - if (opidCount > 0) { - opidCount = 0; - } - if (op.status == "failed") { - if (op.error) { - logger.error(logSystem, logComponent, "Shielding operation failed " + op.id + " " + op.error.code +", " + op.error.message); - } else { - logger.error(logSystem, logComponent, "Shielding operation failed " + op.id); - } + if (op.status == "failed") { + if (op.error) { + logger.error(logSystem, logComponent, "Shielding operation failed " + op.id + " " + op.error.code +", " + op.error.message); } else { - logger.special(logSystem, logComponent, 'Shielding operation success ' + op.id + ' txid: ' + op.result.txid); + logger.error(logSystem, logComponent, "Shielding operation failed " + op.id); } + } else { + logger.special(logSystem, logComponent, 'Shielding operation success ' + op.id + ' txid: ' + op.result.txid); + } + } else if (op.status == "executing") { + if (opidCount == 0) { + opidCount++; + logger.special(logSystem, logComponent, 'Shielding operation in progress ' + op.id ); } - }, true, true); - } else if (op.status == "executing") { - if (opidCount == 0) { - opidCount++; - logger.special(logSystem, logComponent, 'Shielding operation in progress ' + op.id ); } + }); + if (batchRPC.length <= 0) { + opidInterval = setInterval(checkOpids, opid_interval); + return; } - }); - }; - daemon.cmd('z_getoperationstatus', null, function (result) { + daemon.batchCmd(batchRPC, function(error, results){ + if (error || !results) { + logger.error(logSystem, logComponent, 'Error with z_getoperationresult ' + JSON.stringify(error)); + return; + } + results.forEach(function(result, i) { + if (parseFloat(result.result[i].execution_secs || 0) > parseFloat(poolOptions.walletInterval)) + logger.warning(logSystem, logComponent, 'Increase walletInterval in pool_config. opid execution took '+result.result[i].execution_secs+' secs.'); + }); + opidInterval = setInterval(checkOpids, opid_interval); + }); + }; + clearInterval(opidInterval); + daemon.cmd('z_getoperationstatus', null, function (result) { if (result.error) { logger.warning(logSystem, logComponent, 'Unable to get operation ids for clearing.'); - } - if (result.response) { + opidInterval = setInterval(checkOpids, opid_interval); + } else if (result.response) { checkOpIdSuccessAndGetResult(result.response); + } else { + opidInterval = setInterval(checkOpids, opid_interval); } - }, true, true); - }, opid_interval); + }, true, true); + } + + var opidInterval = setInterval(checkOpids, opid_interval); + } + + function roundTo(n, digits) { + if (digits === undefined) { + digits = 0; + } + var multiplicator = Math.pow(10, digits); + n = parseFloat((n * multiplicator).toFixed(11)); + var test =(Math.round(n) / multiplicator); + return +(test.toFixed(digits)); } var satoshisToCoins = function(satoshis){ - return parseFloat((satoshis / magnitude).toFixed(coinPrecision)); + return roundTo((satoshis / magnitude), coinPrecision); }; var coinsToSatoshies = function(coins){ return Math.round(coins * magnitude); }; - function balanceRound(number) { - return parseFloat((Math.round(number * 100000000) / 100000000).toFixed(8)); + function coinsRound(number) { + return roundTo(number, coinPrecision); } function checkForDuplicateBlockHeight(rounds, height) { @@ -452,8 +524,10 @@ function SetupForPool(logger, poolOptions, setupFinished){ var endRPCTimer = function(){ timeSpentRPC += Date.now() - startTimeRedis }; async.waterfall([ - - /* Call redis to get an array of rounds and balances - which are coinbase transactions and block heights from submitted blocks. */ + /* + Step 1 - build workers and rounds objects from redis + * removes duplicate block submissions from redis + */ function(callback){ startRedisTimer(); redisClient.multi([ @@ -466,12 +540,12 @@ function SetupForPool(logger, poolOptions, setupFinished){ callback(true); return; } - // build worker balances + // build workers object from :balances var workers = {}; for (var w in results[0]){ workers[w] = {balance: coinsToSatoshies(parseFloat(results[0][w]))}; } - // build initial rounds data from blocksPending + // build rounds object from :blocksPending var rounds = results[1].map(function(r){ var details = r.split(':'); return { @@ -561,263 +635,303 @@ function SetupForPool(logger, poolOptions, setupFinished){ }, - /* Does a batch rpc call to daemon with all the transaction hashes to see if they are confirmed yet. - It also adds the block reward amount to the round object - which the daemon gives also gives us. */ + /* + Step 2 - check if mined block coinbase tx are ready for payment + * adds block reward to rounds object + * adds block confirmations count to rounds object + * updates confirmation counts in redis + */ function(workers, rounds, callback){ - - // first verify block confirmations by block hash - var batchRPCcommand2 = rounds.map(function(r){ - return ['getblock', [r.blockHash]]; + // get pending block tx details + var batchRPCcommand = rounds.map(function(r){ + return ['gettransaction', [r.txHash]]; }); - // guarantee a response for batchRPCcommand2 - batchRPCcommand2.push(['getblockcount']); + // get account address (not implemented at this time) + batchRPCcommand.push(['getaccount', [poolOptions.address]]); startRPCTimer(); - daemon.batchCmd(batchRPCcommand2, function(error, blockDetails){ + daemon.batchCmd(batchRPCcommand, function(error, txDetails){ endRPCTimer(); - - // error getting block info by hash? - if (error || !blockDetails){ - logger.error(logSystem, logComponent, 'Check finished - daemon rpc error with batch getblock ' - + JSON.stringify(error)); + if (error || !txDetails){ + logger.error(logSystem, logComponent, 'Check finished - daemon rpc error with batch gettransactions ' + JSON.stringify(error)); callback(true); return; } - // update confirmations in redis for pending blocks - var confirmsUpdate = blockDetails.map(function(b) { - if (b.result != null && b.result.confirmations > 0) { - if (b.result.confirmations > 100) { - return ['hdel', logComponent + ':blocksPendingConfirms', b.result.hash]; + var confirmsUpdate = []; + var addressAccount = ""; + + // check for transaction errors and generated coins + txDetails.forEach(function(tx, i){ + if (i === txDetails.length - 1){ + if (tx.result && tx.result.toString().length > 0) { + addressAccount = tx.result.toString(); } - return ['hset', logComponent + ':blocksPendingConfirms', b.result.hash, b.result.confirmations]; - } - return null; - }); - - // filter nulls, last item is always null... - confirmsUpdate = confirmsUpdate.filter(function(val) { return val !== null; }); - // guarantee at least one redis update - if (confirmsUpdate.length < 1) - confirmsUpdate.push(['hset', logComponent + ':blocksPendingConfirms', 0, 0]); - - startRedisTimer(); - redisClient.multi(confirmsUpdate).exec(function(error, updated){ - endRedisTimer(); - - if (error){ - logger.error(logSystem, logComponent, 'failed to update pending block confirmations' - + JSON.stringify(error)); - callback(true); return; } - - // get pending block transaction details from coin daemon - var batchRPCcommand = rounds.map(function(r){ - return ['gettransaction', [r.txHash]]; - }); - // get account address (not implemented in zcash at this time..) - batchRPCcommand.push(['getaccount', [poolOptions.address]]); + var round = rounds[i]; + // look for transaction errors + if (tx.error && tx.error.code === -5){ + logger.warning(logSystem, logComponent, 'Daemon reports invalid transaction: ' + round.txHash); + round.category = 'kicked'; + return; + } + else if (!tx.result.details || (tx.result.details && tx.result.details.length === 0)){ + logger.warning(logSystem, logComponent, 'Daemon reports no details for transaction: ' + round.txHash); + round.category = 'kicked'; + return; + } + else if (tx.error || !tx.result){ + logger.error(logSystem, logComponent, 'Odd error with gettransaction ' + round.txHash + ' ' + JSON.stringify(tx)); + return; + } + // get the coin base generation tx + var generationTx = tx.result.details.filter(function(tx){ + return tx.address === poolOptions.address; + })[0]; + if (!generationTx && tx.result.details.length === 1){ + generationTx = tx.result.details[0]; + } + if (!generationTx){ + logger.error(logSystem, logComponent, 'Missing output details to pool address for transaction ' + round.txHash); + return; + } + // get transaction category for round + round.category = generationTx.category; + round.confirmations = parseInt((tx.result.confirmations || 0)); + // get reward for newly generated blocks + if (round.category === 'generate') { + round.reward = coinsRound(parseFloat(generationTx.amount || generationTx.value)); + } + // update confirmations in redis + confirmsUpdate.push(['hset', coin + ':blocksPendingConfirms', round.blockHash, round.confirmations]); + }); - startRPCTimer(); - daemon.batchCmd(batchRPCcommand, function(error, txDetails){ - endRPCTimer(); - - if (error || !txDetails){ - logger.error(logSystem, logComponent, 'Check finished - daemon rpc error with batch gettransactions ' - + JSON.stringify(error)); - callback(true); - return; + var canDeleteShares = function(r){ + for (var i = 0; i < rounds.length; i++){ + var compareR = rounds[i]; + if ((compareR.height === r.height) + && (compareR.category !== 'kicked') + && (compareR.category !== 'orphan') + && (compareR.serialized !== r.serialized)){ + return false; } + } + return true; + }; - var addressAccount = ""; + // limit blocks paid per payment round + var payingBlocks = 0; + //filter out all rounds that are immature (not confirmed or orphaned yet) + rounds = rounds.filter(function(r){ + // only pay max blocks at a time + if (payingBlocks >= maxBlocksPerPayment) + return false; - // check for transaction errors and generated coins - txDetails.forEach(function(tx, i){ - - if (i === txDetails.length - 1){ - addressAccount = tx.result; - return; - } - - var round = rounds[i]; - if (tx.error && tx.error.code === -5){ - logger.warning(logSystem, logComponent, 'Daemon reports invalid transaction: ' + round.txHash); - round.category = 'kicked'; - return; - } - else if (!tx.result.details || (tx.result.details && tx.result.details.length === 0)){ - logger.warning(logSystem, logComponent, 'Daemon reports no details for transaction: ' + round.txHash); - round.category = 'kicked'; - return; - } - else if (tx.error || !tx.result){ - logger.error(logSystem, logComponent, 'Odd error with gettransaction ' + round.txHash + ' ' - + JSON.stringify(tx)); - return; - } - - var generationTx = tx.result.details.filter(function(tx){ - return tx.address === poolOptions.address; - })[0]; - - if (!generationTx && tx.result.details.length === 1){ - generationTx = tx.result.details[0]; - } - - if (!generationTx){ - logger.error(logSystem, logComponent, 'Missing output details to pool address for transaction ' + round.txHash); - return; - } - - round.category = generationTx.category; - if (round.category === 'generate') { - round.reward = balanceRound(generationTx.amount - fee) || balanceRound(generationTx.value - fee); // TODO: Adjust fees to be dynamic - } - - }); - - var canDeleteShares = function(r){ - for (var i = 0; i < rounds.length; i++){ - var compareR = rounds[i]; - if ((compareR.height === r.height) - && (compareR.category !== 'kicked') - && (compareR.category !== 'orphan') - && (compareR.serialized !== r.serialized)){ - return false; - } - } + switch (r.category) { + case 'orphan': + case 'kicked': + r.canDeleteShares = canDeleteShares(r); + return true; + case 'generate': + payingBlocks++; return true; - }; - - // limit blocks paid per payment round - var payingBlocks = 0; - - //filter out all rounds that are immature (not confirmed or orphaned yet) - rounds = rounds.filter(function(r){ - // only pay max blocks at a time - if (payingBlocks >= maxBlocksPerPayment) - return false; - - switch (r.category) { - case 'orphan': - case 'kicked': - r.canDeleteShares = canDeleteShares(r); - return true; - case 'generate': - payingBlocks++; - return true; - - default: - return false; - } - }); + default: + return false; + } + }); - // TODO: make tx fees dynamic - var feeSatoshi = fee * magnitude; - - // calculate what the pool owes its miners - var totalOwed = parseInt(0); - for (var i = 0; i < rounds.length; i++) { - // only pay generated blocks, not orphaned or kicked - if (rounds[i].category == 'generate') { - totalOwed = totalOwed + Math.round(rounds[i].reward * magnitude) - feeSatoshi; - } + // TODO: make tx fees dynamic + var feeSatoshi = coinsToSatoshies(fee); + // calculate what the pool owes its miners + var totalOwed = parseInt(0); + for (var i = 0; i < rounds.length; i++) { + // only pay generated blocks, not orphaned or kicked + if (rounds[i].category == 'generate') { + totalOwed = totalOwed + coinsToSatoshies(rounds[i].reward) - feeSatoshi; + } + } + + var notAddr = null; + if (requireShielding === true) { + notAddr = poolOptions.address; + } + + // update confirmations for pending blocks in redis + if (confirmsUpdate.length > 0) { + startRedisTimer(); + redisClient.multi(confirmsUpdate).exec(function(error, result){ + endRedisTimer(); + if (error) { + logger.error(logSystem, logComponent, 'Error could not update confirmations for pending blocks in redis ' + JSON.stringify(error)); + return callback(true); } - - var notAddr = null; - if (requireShielding === true) { - notAddr = poolOptions.address; - } - - // check if we have enough tAddress funds to brgin payment processing + // check if we have enough tAddress funds to begin payment processing listUnspent(null, notAddr, minConfPayout, false, function (error, tBalance){ if (error) { logger.error(logSystem, logComponent, 'Error checking pool balance before processing payments.'); return callback(true); } else if (tBalance < totalOwed) { - logger.error(logSystem, logComponent, 'Insufficient funds to process payments for ' + payingBlocks + ' blocks ('+(tBalance / magnitude).toFixed(8) + ' < ' + (totalOwed / magnitude).toFixed(8)+'). Possibly waiting for shielding process.'); + logger.error(logSystem, logComponent, 'Insufficient funds ('+satoshisToCoins(tBalance) + ') to process payments (' + satoshisToCoins(totalOwed)+') for ' + payingBlocks + ' blocks; possibly waiting for txs.'); return callback(true); - } else { - // zcash daemon does not support account feature - addressAccount = ""; - callback(null, workers, rounds, addressAccount); } - }) - + // account feature not implemented at this time + addressAccount = ""; + // begin payments for generated coins + callback(null, workers, rounds, addressAccount); + }); }); - }); - }); + } else { + // no pending blocks, need to find a block! + return callback(true); + } + }) }, - /* Does a batch redis call to get shares contributed to each round. Then calculates the reward - amount owned to each miner for each round. */ + /* + Step 3 - lookup shares in redis and calculate rewards + */ function(workers, rounds, addressAccount, callback){ - - var shareLookups = rounds.map(function(r){ - return ['hgetall', coin + ':shares:round' + r.height] + // pplnt times lookup + var timeLookups = rounds.map(function(r){ + return ['hgetall', coin + ':shares:times' + r.height] }); - startRedisTimer(); - redisClient.multi(shareLookups).exec(function(error, allWorkerShares){ + redisClient.multi(timeLookups).exec(function(error, allWorkerTimes){ endRedisTimer(); - if (error){ - callback('Check finished - redis error with multi get rounds share'); + callback('Check finished - redis error with multi get rounds time'); return; } - - rounds.forEach(function(round, i){ - var workerShares = allWorkerShares[i]; - - if (!workerShares){ - logger.error(logSystem, logComponent, 'No worker shares for round: ' - + round.height + ' blockHash: ' + round.blockHash); + var shareLookups = rounds.map(function(r){ + return ['hgetall', coin + ':shares:round' + r.height]; + }); + startRedisTimer(); + redisClient.multi(shareLookups).exec(function(error, allWorkerShares){ + endRedisTimer(); + if (error){ + callback('Check finished - redis error with multi get rounds share'); return; } - switch (round.category){ - case 'kicked': - case 'orphan': - round.workerShares = workerShares; - break; + // error detection + var err = null; - case 'generate': - /* We found a confirmed block! Now get the reward for it and calculate how much - we owe each miner based on the shares they submitted during that block round. */ - var reward = parseInt(round.reward * magnitude); - - var totalShares = Object.keys(workerShares).reduce(function(p, c){ - return p + parseFloat(workerShares[c]) - }, 0); - - for (var workerAddress in workerShares){ - var percent = parseFloat(workerShares[workerAddress]) / totalShares; - var workerRewardTotal = Math.floor(reward * percent); - var worker = workers[workerAddress] = (workers[workerAddress] || {}); - worker.totalShares = (worker.totalShares || 0) + parseFloat(workerShares[workerAddress]); - worker.reward = (worker.reward || 0) + workerRewardTotal; - } - break; + // total shares + rounds.forEach(function(round, i){ + var workerShares = allWorkerShares[i]; + if (!workerShares){ + err = true; + logger.error(logSystem, logComponent, 'No worker shares for round: ' + round.height + ' blockHash: ' + round.blockHash); + return; + } + var workerTimes = allWorkerTimes[i]; + switch (round.category){ + case 'kicked': + case 'orphan': + round.workerShares = workerShares; + break; + case 'generate': + // TODO: make tx fees dynamic + var feeSatoshi = coinsToSatoshies(fee); + var reward = coinsToSatoshies(round.reward) - feeSatoshi; + var totalShares = parseFloat(0); + var sharesLost = parseFloat(0); + // find most time spent in this round by single worker + maxTime = 0; + for (var workerAddress in workerTimes){ + if (maxTime < parseFloat(workerTimes[workerAddress])) + maxTime = parseFloat(workerTimes[workerAddress]); + } + // total up shares for round + for (var workerAddress in workerShares){ + var worker = workers[workerAddress] = (workers[workerAddress] || {}); + var shares = parseFloat((workerShares[workerAddress] || 0)); + // if pplnt mode + if (pplntEnabled === true && maxTime > 0) { + var tshares = shares; + var lost = parseFloat(0); + var address = workerAddress.split('.')[0]; + if (workerTimes[address] != null && parseFloat(workerTimes[address]) > 0) { + var timePeriod = roundTo(parseFloat(workerTimes[address] || 1) / maxTime , 2); + if (timePeriod > 0 && timePeriod < pplntTimeQualify) { + var lost = shares - (shares * timePeriod); + sharesLost += lost; + shares = Math.max(shares - lost, 0); + logger.warning(logSystem, logComponent, 'PPLNT: Reduced shares for '+workerAddress+' round:' + round.height + ' maxTime:'+maxTime+'sec timePeriod:'+roundTo(timePeriod,6)+' shares:'+tshares+' lost:'+lost+' new:'+shares); + } + if (timePeriod > 1.0) { + err = true; + logger.error(logSystem, logComponent, 'Time share period is greater than 1.0 for '+workerAddress+' round:' + round.height + ' blockHash:' + round.blockHash); + return; + } + } else { + logger.warning(logSystem, logComponent, 'PPLNT: Missing time share period for '+workerAddress+', miner shares qualified in round ' + round.height); + } + } + worker.roundShares = shares; + worker.totalShares = parseFloat(worker.totalShares || 0) + shares; + totalShares += shares; + } + + //console.log('--REWARD DEBUG--------------'); + // calculate rewards for round + var totalAmount = 0; + for (var workerAddress in workerShares){ + var worker = workers[workerAddress] = (workers[workerAddress] || {}); + var percent = parseFloat(worker.roundShares) / totalShares; + if (percent > 1.0) { + err = true; + logger.error(logSystem, logComponent, 'Share percent is greater than 1.0 for '+workerAddress+' round:' + round.height + ' blockHash:' + round.blockHash); + return; + } + // calculate workers reward for this round + var workerRewardTotal = Math.round(reward * percent); + // add to total reward for worker + worker.reward = (worker.reward || 0) + workerRewardTotal; + // add to total amount sent to all workers + totalAmount += worker.reward; + //console.log('rewardAmount: '+workerAddress+' '+workerRewardTotal); + //console.log('totalAmount: '+workerAddress+' '+worker.reward); + } + //console.log('totalAmount: '+totalAmount); + //console.log('blockHeight: '+round.height); + //console.log('blockReward: '+reward); + //console.log('totalShares: '+totalShares); + //console.log('sharesLost: '+sharesLost); + //console.log('----------------------------'); + break; + } + }); + + // if there was no errors + if (err === null) { + // continue payments + callback(null, workers, rounds, addressAccount); + } else { + // stop waterfall flow, do not process payments + callback(true); } }); - - callback(null, workers, rounds, addressAccount); + }); + }, - /* Calculate if any payments are ready to be sent and trigger them sending - Get balance different for each address and pass it along as object of latest balances such as - {worker1: balance1, worker2, balance2} - when deciding the sent balance, it the difference should be -1*amount they had in db, - if not sending the balance, the differnce should be +(the amount they earned this round) - */ + + /* + Step 4 - Generate RPC commands to send payments + When deciding the sent balance, it the difference should be -1*amount they had in db, + If not sending the balance, the differnce should be +(the amount they earned this round) + */ function(workers, rounds, addressAccount, callback) { var trySend = function (withholdPercent) { var addressAmounts = {}; + var balanceAmounts = {}; + var shareAmounts = {}; var minerTotals = {}; var totalSent = 0; var totalShares = 0; @@ -827,12 +941,13 @@ function SetupForPool(logger, poolOptions, setupFinished){ totalShares += (worker.totalShares || 0) worker.balance = worker.balance || 0; worker.reward = worker.reward || 0; - var toSend = balanceRound(satoshisToCoins(Math.floor((worker.balance + worker.reward) * (1 - withholdPercent)))); + // get miner payout totals + var toSendSatoshis = Math.round((worker.balance + worker.reward) * (1 - withholdPercent)); var address = worker.address = (worker.address || getProperAddress(w.split('.')[0])); if (minerTotals[address] != null && minerTotals[address] > 0) { - minerTotals[address] = balanceRound(minerTotals[address] + toSend); + minerTotals[address] += toSendSatoshis; } else { - minerTotals[address] = toSend; + minerTotals[address] = toSendSatoshis; } } // now process each workers balance, and pay the miner @@ -840,23 +955,40 @@ function SetupForPool(logger, poolOptions, setupFinished){ var worker = workers[w]; worker.balance = worker.balance || 0; worker.reward = worker.reward || 0; - var toSend = Math.floor((worker.balance + worker.reward) * (1 - withholdPercent)); + var toSendSatoshis = Math.round((worker.balance + worker.reward) * (1 - withholdPercent)); var address = worker.address = (worker.address || getProperAddress(w.split('.')[0])); // if miners total is enough, go ahead and add this worker balance - if (minerTotals[address] >= satoshisToCoins(minPaymentSatoshis)) { - totalSent += toSend; - worker.sent = balanceRound(satoshisToCoins(toSend)); - worker.balanceChange = Math.min(worker.balance, toSend) * -1; + if (minerTotals[address] >= minPaymentSatoshis) { + totalSent += toSendSatoshis; + // send funds + worker.sent = satoshisToCoins(toSendSatoshis); + worker.balanceChange = Math.min(worker.balance, toSendSatoshis) * -1; // multiple workers may have same address, add them up if (addressAmounts[address] != null && addressAmounts[address] > 0) { - addressAmounts[address] = balanceRound(addressAmounts[address] + worker.sent); + addressAmounts[address] = coinsRound(addressAmounts[address] + worker.sent); } else { addressAmounts[address] = worker.sent; } - } - else { - worker.balanceChange = Math.max(toSend - worker.balance, 0); + } else { + // add to balance, not enough minerals worker.sent = 0; + worker.balanceChange = Math.max(toSendSatoshis - worker.balance, 0); + // track balance changes + if (worker.balanceChange > 0) { + if (balanceAmounts[address] != null && balanceAmounts[address] > 0) { + balanceAmounts[address] = coinsRound(balanceAmounts[address] + satoshisToCoins(worker.balanceChange)); + } else { + balanceAmounts[address] = satoshisToCoins(worker.balanceChange); + } + } + } + // track share work + if (worker.totalShares > 0) { + if (shareAmounts[address] != null && shareAmounts[address] > 0) { + shareAmounts[address] += worker.totalShares; + } else { + shareAmounts[address] = worker.totalShares; + } } } @@ -865,49 +997,40 @@ function SetupForPool(logger, poolOptions, setupFinished){ callback(null, workers, rounds); return; } - /* - var undoPaymentsOnError = function(workers) { - totalSent = 0; - // TODO, set round.category to immature, to attempt to pay again - // we did not send anything to any workers - for (var w in workers) { - var worker = workers[w]; - if (worker.sent > 0) { - worker.balanceChange = 0; - worker.sent = 0; - } - } - }; - */ - // perform the sendmany operation + + // POINT OF NO RETURN! GOOD LUCK! + // WE ARE SENDING PAYMENT CMD TO DAEMON + + // perform the sendmany operation .. addressAccount daemon.cmd('sendmany', ["", addressAmounts], function (result) { // check for failed payments, there are many reasons if (result.error && result.error.code === -6) { - // not enough minerals... - var higherPercent = withholdPercent + 0.01; - logger.warning(logSystem, logComponent, 'Not enough funds to cover the tx fees for sending out payments, decreasing rewards by ' - + (higherPercent * 100) + '% and retrying'); - + // we thought we had enough funds to send payments, but apparently not... + // try decreasing payments by a small percent to cover unexpected tx fees? + var higherPercent = withholdPercent + 0.001; + logger.warning(logSystem, logComponent, 'Not enough funds to cover the tx fees for sending out payments, decreasing rewards by ' + (higherPercent * 100) + '% and retrying'); trySend(higherPercent); + //callback(true); not a complete failure... + return; } else if (result.error && result.error.code === -5) { // invalid address specified in addressAmounts array logger.error(logSystem, logComponent, 'Error sending payments ' + result.error.message); - //undoPaymentsOnError(workers); + // payment failed, prevent updates to redis callback(true); return; } else if (result.error && result.error.message != null) { - // unknown error from daemon + // error from daemon logger.error(logSystem, logComponent, 'Error sending payments ' + result.error.message); - //undoPaymentsOnError(workers); + // payment failed, prevent updates to redis callback(true); return; } else if (result.error) { - // some other unknown error + // unknown error logger.error(logSystem, logComponent, 'Error sending payments ' + JSON.stringify(result.error)); - //undoPaymentsOnError(workers); + // payment failed, prevent updates to redis callback(true); return; } @@ -921,7 +1044,7 @@ function SetupForPool(logger, poolOptions, setupFinished){ if (txid != null) { // it worked, congrats on your pools payout ;) - logger.special(logSystem, logComponent, 'Sent ' + (totalSent / magnitude).toFixed(8) + logger.special(logSystem, logComponent, 'Sent ' + satoshisToCoins(totalSent) + ' to ' + Object.keys(addressAmounts).length + ' miners; txid: '+txid); if (withholdPercent > 0) { @@ -931,11 +1054,12 @@ function SetupForPool(logger, poolOptions, setupFinished){ } // save payments data to redis - var paymentBlocks = rounds.map(function(r){ + var paymentBlocks = rounds.filter(function(r){ return r.category == 'generate'; }).map(function(r){ return parseInt(r.height); }); + var paymentsUpdate = []; - var paymentsData = {time:Date.now(), txid:txid, shares:totalShares, paid:balanceRound(totalSent / magnitude), miners:Object.keys(addressAmounts).length, blocks: paymentBlocks, amounts: addressAmounts}; + var paymentsData = {time:Date.now(), txid:txid, shares:totalShares, paid:satoshisToCoins(totalSent), miners:Object.keys(addressAmounts).length, blocks: paymentBlocks, amounts: addressAmounts, balances: balanceAmounts, work:shareAmounts}; paymentsUpdate.push(['zadd', logComponent + ':payments', Date.now(), JSON.stringify(paymentsData)]); startRedisTimer(); redisClient.multi(paymentsUpdate).exec(function(error, payments){ @@ -943,6 +1067,7 @@ function SetupForPool(logger, poolOptions, setupFinished){ if (error){ logger.error(logSystem, logComponent, 'Error redis save payments data ' + JSON.stringify(payments)); } + // perform final redis updates callback(null, workers, rounds); }); @@ -958,12 +1083,16 @@ function SetupForPool(logger, poolOptions, setupFinished){ } } }, true, true); - }; trySend(0); }, + + + /* + Step 5 - Final redis commands + */ function(workers, rounds, callback){ var totalPaid = parseFloat(0); @@ -979,19 +1108,20 @@ function SetupForPool(logger, poolOptions, setupFinished){ 'hincrbyfloat', coin + ':balances', w, - balanceRound(satoshisToCoins(worker.balanceChange)) + satoshisToCoins(worker.balanceChange) ]); } if (worker.sent !== 0){ - workerPayoutsCommand.push(['hincrbyfloat', coin + ':payouts', w, balanceRound(worker.sent)]); - totalPaid = balanceRound(totalPaid + worker.sent); + workerPayoutsCommand.push(['hincrbyfloat', coin + ':payouts', w, coinsRound(worker.sent)]); + totalPaid = coinsRound(totalPaid + worker.sent); } } var movePendingCommands = []; var roundsToDelete = []; var orphanMergeCommands = []; - + var confirmsToDelete = []; + var moveSharesToCurrent = function(r){ var workerShares = r.workerShares; if (workerShares != null) { @@ -1005,17 +1135,22 @@ function SetupForPool(logger, poolOptions, setupFinished){ rounds.forEach(function(r){ switch(r.category){ case 'kicked': + confirmsToDelete.push(['hdel', coin + ':blocksPendingConfirms', r.blockHash]); movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksKicked', r.serialized]); case 'orphan': + confirmsToDelete.push(['hdel', coin + ':blocksPendingConfirms', r.blockHash]); movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksOrphaned', r.serialized]); if (r.canDeleteShares){ moveSharesToCurrent(r); roundsToDelete.push(coin + ':shares:round' + r.height); + roundsToDelete.push(coin + ':shares:times' + r.height); } return; case 'generate': + confirmsToDelete.push(['hdel', coin + ':blocksPendingConfirms', r.blockHash]); movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksConfirmed', r.serialized]); roundsToDelete.push(coin + ':shares:round' + r.height); + roundsToDelete.push(coin + ':shares:times' + r.height); return; } }); @@ -1037,8 +1172,11 @@ function SetupForPool(logger, poolOptions, setupFinished){ if (roundsToDelete.length > 0) finalRedisCommands.push(['del'].concat(roundsToDelete)); + if (confirmsToDelete.length > 0) + finalRedisCommands = finalRedisCommands.concat(confirmsToDelete); + if (totalPaid !== 0) - finalRedisCommands.push(['hincrbyfloat', coin + ':stats', 'totalPaid', balanceRound(totalPaid)]); + finalRedisCommands.push(['hincrbyfloat', coin + ':stats', 'totalPaid', totalPaid]); if (finalRedisCommands.length === 0){ callback(); @@ -1048,12 +1186,14 @@ function SetupForPool(logger, poolOptions, setupFinished){ startRedisTimer(); redisClient.multi(finalRedisCommands).exec(function(error, results){ endRedisTimer(); - if (error){ + if (error) { clearInterval(paymentInterval); + logger.error(logSystem, logComponent, 'Payments sent but could not update redis. ' + JSON.stringify(error) + ' Disabling payment processing to prevent possible double-payouts. The redis commands in ' + coin + '_finalRedisCommands.txt must be ran manually'); + fs.writeFile(coin + '_finalRedisCommands.txt', JSON.stringify(finalRedisCommands), function(err){ logger.error('Could not write finalRedisCommands.txt, you are fucked.'); }); diff --git a/libs/poolWorker.js b/libs/poolWorker.js index 84188f5..abb28c6 100644 --- a/libs/poolWorker.js +++ b/libs/poolWorker.js @@ -177,9 +177,9 @@ module.exports = function(logger){ var pool = Stratum.createPool(poolOptions, authorizeFN, logger); pool.on('share', function(isValidShare, isValidBlock, data){ - + var shareData = JSON.stringify(data); - + if (data.blockHash && !isValidBlock) logger.debug(logSystem, logComponent, logSubCat, 'We thought a block was found but it was rejected by the daemon, share data: ' + shareData); @@ -187,19 +187,22 @@ module.exports = function(logger){ logger.debug(logSystem, logComponent, logSubCat, 'Block found: ' + data.blockHash + ' by ' + data.worker); if (isValidShare) { - if(data.shareDiff > 1000000000) + if(data.shareDiff > 1000000000) { logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000.000!'); - else if(data.shareDiff > 1000000) + } else if(data.shareDiff > 1000000) { logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000!'); - + } //logger.debug(logSystem, logComponent, logSubCat, 'Share accepted at diff ' + data.difficulty + '/' + data.shareDiff + ' by ' + data.worker + ' [' + data.ip + ']' ); - - } else if (!isValidShare) + } else if (!isValidShare) { logger.debug(logSystem, logComponent, logSubCat, 'Share rejected: ' + shareData); - - handlers.share(isValidShare, isValidBlock, data) - - + } + + // handle the share + handlers.share(isValidShare, isValidBlock, data); + + // send to master for pplnt time tracking + process.send({type: 'shareTrack', thread:(parseInt(forkId)+1), coin:poolOptions.coin.name, isValidShare:isValidShare, isValidBlock:isValidBlock, data:data}); + }).on('difficultyUpdate', function(workerName, diff){ logger.debug(logSystem, logComponent, logSubCat, 'Difficulty update to diff ' + diff + ' workerName=' + JSON.stringify(workerName)); handlers.diff(workerName, diff); diff --git a/libs/shareProcessor.js b/libs/shareProcessor.js index 354adaa..f881c58 100644 --- a/libs/shareProcessor.js +++ b/libs/shareProcessor.js @@ -25,9 +25,8 @@ module.exports = function(logger, poolConfig){ var logSystem = 'Pool'; var logComponent = coin; var logSubCat = 'Thread ' + (parseInt(forkId) + 1); - + var connection = redis.createClient(redisConfig.port, redisConfig.host); - connection.on('ready', function(){ logger.debug(logSystem, logComponent, logSubCat, 'Share processing setup with redis (' + redisConfig.host + ':' + redisConfig.port + ')'); @@ -38,7 +37,6 @@ module.exports = function(logger, poolConfig){ connection.on('end', function(){ logger.error(logSystem, logComponent, logSubCat, 'Connection to redis database has been ended'); }); - connection.info(function(error, response){ if (error){ logger.error(logSystem, logComponent, logSubCat, 'Redis version check failed'); @@ -65,18 +63,17 @@ module.exports = function(logger, poolConfig){ } }); - - this.handleShare = function(isValidShare, isValidBlock, shareData){ + this.handleShare = function(isValidShare, isValidBlock, shareData) { var redisCommands = []; - if (isValidShare){ + if (isValidShare) { redisCommands.push(['hincrbyfloat', coin + ':shares:roundCurrent', shareData.worker, shareData.difficulty]); redisCommands.push(['hincrby', coin + ':stats', 'validShares', 1]); - } - else{ + } else { redisCommands.push(['hincrby', coin + ':stats', 'invalidShares', 1]); } + /* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to generate hashrate for each worker and pool. */ @@ -86,6 +83,7 @@ module.exports = function(logger, poolConfig){ if (isValidBlock){ redisCommands.push(['rename', coin + ':shares:roundCurrent', coin + ':shares:round' + shareData.height]); + redisCommands.push(['rename', coin + ':shares:timesCurrent', coin + ':shares:times' + shareData.height]); redisCommands.push(['sadd', coin + ':blocksPending', [shareData.blockHash, shareData.txHash, shareData.height, shareData.worker, dateNow].join(':')]); redisCommands.push(['hincrby', coin + ':stats', 'validBlocks', 1]); } @@ -97,8 +95,6 @@ module.exports = function(logger, poolConfig){ if (err) logger.error(logSystem, logComponent, logSubCat, 'Error with share processor multi ' + JSON.stringify(err)); }); - - }; }; diff --git a/libs/stats.js b/libs/stats.js index 2bc6f99..48bd3ab 100644 --- a/libs/stats.js +++ b/libs/stats.js @@ -152,6 +152,20 @@ module.exports = function(logger, portalConfig, poolConfigs){ } _this.statPoolHistory.push(data); } + + function readableSeconds(t) { + var seconds = Math.round(t); + var minutes = Math.floor(seconds/60); + var hours = Math.floor(minutes/60); + var days = Math.floor(hours/24); + hours = hours-(days*24); + minutes = minutes-(days*24*60)-(hours*60); + seconds = seconds-(days*24*60*60)-(hours*60*60)-(minutes*60); + if (days > 0) { return (days + "d " + hours + "h " + minutes + "m " + seconds + "s"); } + if (hours > 0) { return (hours + "h " + minutes + "m " + seconds + "s"); } + if (minutes > 0) {return (minutes + "m " + seconds + "s"); } + return (seconds + "s"); + } this.getCoins = function(cback){ _this.stats.coins = redisClients[0].coins; @@ -296,7 +310,8 @@ module.exports = function(logger, portalConfig, poolConfigs){ ['smembers', ':blocksConfirmed'], ['hgetall', ':shares:roundCurrent'], ['hgetall', ':blocksPendingConfirms'], - ['zrange', ':payments', -100, -1] + ['zrange', ':payments', -100, -1], + ['hgetall', ':shares:timesCurrent'] ]; var commandsPerCoin = redisCommandTemplates.length; @@ -317,6 +332,12 @@ module.exports = function(logger, portalConfig, poolConfigs){ else{ for(var i = 0; i < replies.length; i += commandsPerCoin){ var coinName = client.coins[i / commandsPerCoin | 0]; + var marketStats = {}; + if (replies[i + 2]) { + if (replies[i + 2].coinmarketcap) { + marketStats = replies[i + 2] ? (JSON.parse(replies[i + 2].coinmarketcap)[0] || 0) : 0; + } + } var coinStats = { name: coinName, symbol: poolConfigs[coinName].coin.symbol.toUpperCase(), @@ -335,6 +356,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ networkVersion: replies[i + 2] ? (replies[i + 2].networkSubVersion || 0) : 0, networkProtocolVersion: replies[i + 2] ? (replies[i + 2].networkProtocolVersion || 0) : 0 }, + marketStats: marketStats, /* block stat counts */ blocks: { pending: replies[i + 3], @@ -344,14 +366,17 @@ module.exports = function(logger, portalConfig, poolConfigs){ /* show all pending blocks */ pending: { blocks: replies[i + 6].sort(sortBlocks), - confirms: replies[i + 9] + confirms: (replies[i + 9] || {}) }, /* show last 5 found blocks */ confirmed: { blocks: replies[i + 7].sort(sortBlocks).slice(0,5) }, payments: [], - currentRoundShares: replies[i + 8] + currentRoundShares: (replies[i + 8] || {}), + currentRoundTimes: (replies[i + 11] || {}), + maxRoundTime: 0, + shareCount: 0 }; for(var j = replies[i + 10].length; j > 0; j--){ var jsonObj; @@ -364,17 +389,10 @@ module.exports = function(logger, portalConfig, poolConfigs){ coinStats.payments.push(jsonObj); } } - /* - for (var b in coinStats.confirmed.blocks) { - var parms = coinStats.confirmed.blocks[b].split(':'); - if (parms[4] != null && parms[4] > 0) { - console.log(fancyTimestamp(parseInt(parms[4]), true)); - } - break; - } - */ allCoinStats[coinStats.name] = (coinStats); } + // sort pools alphabetically + allCoinStats = sortPoolsByName(allCoinStats); callback(); } }); @@ -419,6 +437,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ shares: workerShares, invalidshares: 0, currRoundShares: 0, + currRoundTime: 0, hashrate: null, hashrateString: null, luckDays: null, @@ -436,6 +455,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ shares: workerShares, invalidshares: 0, currRoundShares: 0, + currRoundTime: 0, hashrate: null, hashrateString: null, luckDays: null, @@ -455,6 +475,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ shares: 0, invalidshares: -workerShares, currRoundShares: 0, + currRoundTime: 0, hashrate: null, hashrateString: null, luckDays: null, @@ -472,6 +493,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ shares: 0, invalidshares: -workerShares, currRoundShares: 0, + currRoundTime: 0, hashrate: null, hashrateString: null, luckDays: null, @@ -510,6 +532,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ portalStats.algos[algo].workers += Object.keys(coinStats.workers).length; var _shareTotal = parseFloat(0); + var _maxTimeShare = parseFloat(0); for (var worker in coinStats.currentRoundShares) { var miner = worker.split(".")[0]; if (miner in coinStats.miners) { @@ -520,8 +543,21 @@ module.exports = function(logger, portalConfig, poolConfigs){ } _shareTotal += parseFloat(coinStats.currentRoundShares[worker]); } + for (var worker in coinStats.currentRoundTimes) { + var time = parseFloat(coinStats.currentRoundTimes[worker]); + if (_maxTimeShare < time) + _maxTimeShare = time; + + var miner = worker.split(".")[0]; + if (miner in coinStats.miners) { + coinStats.miners[miner].currRoundTime += parseFloat(coinStats.currentRoundTimes[worker]); + } + } + coinStats.shareCount = _shareTotal; - + coinStats.maxRoundTime = _maxTimeShare; + coinStats.maxRoundTimeString = readableSeconds(_maxTimeShare); + for (var worker in coinStats.workers) { var _workerRate = shareMultiplier * coinStats.workers[worker].shares / portalConfig.website.stats.hashrateWindow; var _wHashRate = (_workerRate / 1000000) * 2; @@ -559,6 +595,7 @@ module.exports = function(logger, portalConfig, poolConfigs){ delete saveStats.pools[pool].pending; delete saveStats.pools[pool].confirmed; delete saveStats.pools[pool].currentRoundShares; + delete saveStats.pools[pool].currentRoundTimes; delete saveStats.pools[pool].payments; delete saveStats.pools[pool].miners; }); @@ -591,13 +628,24 @@ module.exports = function(logger, portalConfig, poolConfigs){ }; - function sortBlocks(a, b) { - var as = a.split(":"); - var bs = b.split(":"); - if (as[2] > bs[2]) return -1; - if (as[2] < bs[2]) return 1; - return 0; - } + function sortPoolsByName(objects) { + var newObject = {}; + var sortedArray = sortProperties(objects, 'name', false, false); + for (var i = 0; i < sortedArray.length; i++) { + var key = sortedArray[i][0]; + var value = sortedArray[i][1]; + newObject[key] = value; + } + return newObject; + } + + function sortBlocks(a, b) { + var as = parseInt(a.split(":")[2]); + var bs = parseInt(b.split(":")[2]); + if (as > bs) return -1; + if (as < bs) return 1; + return 0; + } function sortWorkersByName(objects) { var newObject = {}; diff --git a/libs/website.js b/libs/website.js index 2234943..487b3fc 100644 --- a/libs/website.js +++ b/libs/website.js @@ -98,7 +98,13 @@ module.exports = function(logger){ // if an html file was changed reload it /* requires node-watch 0.5.0 or newer */ watch(['./website', './website/pages'], function(evt, filename){ - var basename = path.basename(filename); + var basename; + // support older versions of node-watch automatically + if (!filename && evt) + basename = path.basename(evt); + else + basename = path.basename(filename); + if (basename in pageFiles){ readPageFiles([basename]); logger.special(logSystem, 'Server', 'Reloaded file ' + basename); diff --git a/pool_configs/komodo_example.json b/pool_configs/komodo_example.json index a81f446..b491f0f 100644 --- a/pool_configs/komodo_example.json +++ b/pool_configs/komodo_example.json @@ -27,6 +27,8 @@ "paymentProcessing": { "enabled": true, + "paymentMode": "prop", + "_comment_paymentMode":"prop, pplnt", "paymentInterval": 57, "_comment_paymentInterval": "Interval in seconds to check and perform payments.", "minimumPayment": 0.1, diff --git a/pool_configs/zcash_example.json b/pool_configs/zcash_example.json index 2466d19..0017d6a 100644 --- a/pool_configs/zcash_example.json +++ b/pool_configs/zcash_example.json @@ -20,6 +20,8 @@ "paymentProcessing": { "enabled": false, + "paymentMode": "prop", + "_comment_paymentMode":"prop, pplnt", "paymentInterval": 20, "minimumPayment": 0.1, "maxBlocksPerPayment": 3, diff --git a/pool_configs/zcash_testnet_example.json b/pool_configs/zcash_testnet_example.json index 987dbfc..f3bf44e 100644 --- a/pool_configs/zcash_testnet_example.json +++ b/pool_configs/zcash_testnet_example.json @@ -21,6 +21,8 @@ "paymentProcessing": { "enabled": false, + "paymentMode": "prop", + "_comment_paymentMode":"prop, pplnt", "paymentInterval": 20, "minimumPayment": 0.1, "maxBlocksPerPayment": 1, diff --git a/pool_configs/zclassic_example.json b/pool_configs/zclassic_example.json index c3120bb..c6c028b 100644 --- a/pool_configs/zclassic_example.json +++ b/pool_configs/zclassic_example.json @@ -25,6 +25,8 @@ "paymentProcessing": { "enabled": true, + "paymentMode": "prop", + "_comment_paymentMode":"prop, pplnt", "paymentInterval": 20, "minimumPayment": 0.1, "maxBlocksPerPayment": 3, diff --git a/website/pages/stats.html b/website/pages/stats.html index 39998ad..3274284 100644 --- a/website/pages/stats.html +++ b/website/pages/stats.html @@ -153,11 +153,15 @@ {{if (block[4] != null) { }} {{=readableDate(block[4])}} {{ } }} - {{if (it.stats.pools[pool].pending.confirms[block[0]]) { }} - {{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100 - {{ } else { }} - *PENDING* - {{ } }} + {{if (it.stats.pools[pool].pending.confirms) { }} + {{if (it.stats.pools[pool].pending.confirms[block[0]]) { }} + {{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100 + {{ } else { }} + *PENDING* + {{ } }} + {{ } else { }} + *PENDING* + {{ } }}
Mined By: {{=block[3]}}
{{ blockscomb.push(block);}}