Merge remote-tracking branch 'upstream/master'

This commit is contained in:
hellcatz 2017-05-11 18:17:53 -07:00
commit fe72b6e533
11 changed files with 566 additions and 359 deletions

View File

@ -265,6 +265,7 @@ var spawnPoolWorkers = function(){
if (!_lastShareTimes[workerAddress] || !_lastStartTimes[workerAddress]) { if (!_lastShareTimes[workerAddress] || !_lastStartTimes[workerAddress]) {
_lastShareTimes[workerAddress] = now; _lastShareTimes[workerAddress] = now;
_lastStartTimes[workerAddress] = now; _lastStartTimes[workerAddress] = now;
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' joined current round.');
} }
if (_lastShareTimes[workerAddress] != null && _lastShareTimes[workerAddress] > 0) { if (_lastShareTimes[workerAddress] != null && _lastShareTimes[workerAddress] > 0) {
lastShareTime = _lastShareTimes[workerAddress]; lastShareTime = _lastShareTimes[workerAddress];
@ -273,13 +274,13 @@ var spawnPoolWorkers = function(){
var redisCommands = []; var redisCommands = [];
// if its been less than 10 minutes since last share was submitted // if its been less than 15 minutes since last share was submitted
var timeChangeSec = roundTo(Math.max(now - lastShareTime, 0) / 1000, 4); var timeChangeSec = roundTo(Math.max(now - lastShareTime, 0) / 1000, 4);
var timeChangeTotal = roundTo(Math.max(now - lastStartTime, 0) / 1000, 4); var timeChangeTotal = roundTo(Math.max(now - lastStartTime, 0) / 1000, 4);
if (timeChangeSec < 600) { if (timeChangeSec < 900) {
// loyal miner keeps mining :) // loyal miner keeps mining :)
redisCommands.push(['hincrbyfloat', msg.coin + ':shares:timesCurrent', workerAddress, timeChangeSec]); redisCommands.push(['hincrbyfloat', msg.coin + ':shares:timesCurrent', workerAddress, timeChangeSec]);
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}'); //logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}');
connection.multi(redisCommands).exec(function(err, replies){ connection.multi(redisCommands).exec(function(err, replies){
if (err) if (err)
logger.error('PPLNT', msg.coin, 'Thread '+msg.thread, 'Error with time share processor call to redis ' + JSON.stringify(err)); logger.error('PPLNT', msg.coin, 'Thread '+msg.thread, 'Error with time share processor call to redis ' + JSON.stringify(err));
@ -287,6 +288,7 @@ var spawnPoolWorkers = function(){
} else { } else {
// they just re-joined the pool // they just re-joined the pool
_lastStartTimes[workerAddress] = now; _lastStartTimes[workerAddress] = now;
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' re-joined current round.');
} }
// track last time share // track last time share

File diff suppressed because it is too large Load Diff

View File

@ -177,9 +177,9 @@ module.exports = function(logger){
var pool = Stratum.createPool(poolOptions, authorizeFN, logger); var pool = Stratum.createPool(poolOptions, authorizeFN, logger);
pool.on('share', function(isValidShare, isValidBlock, data){ pool.on('share', function(isValidShare, isValidBlock, data){
var shareData = JSON.stringify(data); var shareData = JSON.stringify(data);
if (data.blockHash && !isValidBlock) if (data.blockHash && !isValidBlock)
logger.debug(logSystem, logComponent, logSubCat, 'We thought a block was found but it was rejected by the daemon, share data: ' + shareData); logger.debug(logSystem, logComponent, logSubCat, 'We thought a block was found but it was rejected by the daemon, share data: ' + shareData);
@ -187,19 +187,22 @@ module.exports = function(logger){
logger.debug(logSystem, logComponent, logSubCat, 'Block found: ' + data.blockHash + ' by ' + data.worker); logger.debug(logSystem, logComponent, logSubCat, 'Block found: ' + data.blockHash + ' by ' + data.worker);
if (isValidShare) { if (isValidShare) {
if(data.shareDiff > 1000000000) if(data.shareDiff > 1000000000) {
logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000.000!'); logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000.000!');
else if(data.shareDiff > 1000000) } else if(data.shareDiff > 1000000) {
logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000!'); logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000!');
}
//logger.debug(logSystem, logComponent, logSubCat, 'Share accepted at diff ' + data.difficulty + '/' + data.shareDiff + ' by ' + data.worker + ' [' + data.ip + ']' ); //logger.debug(logSystem, logComponent, logSubCat, 'Share accepted at diff ' + data.difficulty + '/' + data.shareDiff + ' by ' + data.worker + ' [' + data.ip + ']' );
} else if (!isValidShare) {
} else if (!isValidShare)
logger.debug(logSystem, logComponent, logSubCat, 'Share rejected: ' + shareData); logger.debug(logSystem, logComponent, logSubCat, 'Share rejected: ' + shareData);
}
handlers.share(isValidShare, isValidBlock, data)
// handle the share
handlers.share(isValidShare, isValidBlock, data);
// send to master for pplnt time tracking
process.send({type: 'shareTrack', thread:(parseInt(forkId)+1), coin:poolOptions.coin.name, isValidShare:isValidShare, isValidBlock:isValidBlock, data:data});
}).on('difficultyUpdate', function(workerName, diff){ }).on('difficultyUpdate', function(workerName, diff){
logger.debug(logSystem, logComponent, logSubCat, 'Difficulty update to diff ' + diff + ' workerName=' + JSON.stringify(workerName)); logger.debug(logSystem, logComponent, logSubCat, 'Difficulty update to diff ' + diff + ' workerName=' + JSON.stringify(workerName));
handlers.diff(workerName, diff); handlers.diff(workerName, diff);

View File

@ -25,9 +25,8 @@ module.exports = function(logger, poolConfig){
var logSystem = 'Pool'; var logSystem = 'Pool';
var logComponent = coin; var logComponent = coin;
var logSubCat = 'Thread ' + (parseInt(forkId) + 1); var logSubCat = 'Thread ' + (parseInt(forkId) + 1);
var connection = redis.createClient(redisConfig.port, redisConfig.host); var connection = redis.createClient(redisConfig.port, redisConfig.host);
connection.on('ready', function(){ connection.on('ready', function(){
logger.debug(logSystem, logComponent, logSubCat, 'Share processing setup with redis (' + redisConfig.host + logger.debug(logSystem, logComponent, logSubCat, 'Share processing setup with redis (' + redisConfig.host +
':' + redisConfig.port + ')'); ':' + redisConfig.port + ')');
@ -38,7 +37,6 @@ module.exports = function(logger, poolConfig){
connection.on('end', function(){ connection.on('end', function(){
logger.error(logSystem, logComponent, logSubCat, 'Connection to redis database has been ended'); logger.error(logSystem, logComponent, logSubCat, 'Connection to redis database has been ended');
}); });
connection.info(function(error, response){ connection.info(function(error, response){
if (error){ if (error){
logger.error(logSystem, logComponent, logSubCat, 'Redis version check failed'); logger.error(logSystem, logComponent, logSubCat, 'Redis version check failed');
@ -65,18 +63,17 @@ module.exports = function(logger, poolConfig){
} }
}); });
this.handleShare = function(isValidShare, isValidBlock, shareData) {
this.handleShare = function(isValidShare, isValidBlock, shareData){
var redisCommands = []; var redisCommands = [];
if (isValidShare){ if (isValidShare) {
redisCommands.push(['hincrbyfloat', coin + ':shares:roundCurrent', shareData.worker, shareData.difficulty]); redisCommands.push(['hincrbyfloat', coin + ':shares:roundCurrent', shareData.worker, shareData.difficulty]);
redisCommands.push(['hincrby', coin + ':stats', 'validShares', 1]); redisCommands.push(['hincrby', coin + ':stats', 'validShares', 1]);
} } else {
else{
redisCommands.push(['hincrby', coin + ':stats', 'invalidShares', 1]); redisCommands.push(['hincrby', coin + ':stats', 'invalidShares', 1]);
} }
/* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it /* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it
doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to
generate hashrate for each worker and pool. */ generate hashrate for each worker and pool. */
@ -86,6 +83,7 @@ module.exports = function(logger, poolConfig){
if (isValidBlock){ if (isValidBlock){
redisCommands.push(['rename', coin + ':shares:roundCurrent', coin + ':shares:round' + shareData.height]); redisCommands.push(['rename', coin + ':shares:roundCurrent', coin + ':shares:round' + shareData.height]);
redisCommands.push(['rename', coin + ':shares:timesCurrent', coin + ':shares:times' + shareData.height]);
redisCommands.push(['sadd', coin + ':blocksPending', [shareData.blockHash, shareData.txHash, shareData.height, shareData.worker, dateNow].join(':')]); redisCommands.push(['sadd', coin + ':blocksPending', [shareData.blockHash, shareData.txHash, shareData.height, shareData.worker, dateNow].join(':')]);
redisCommands.push(['hincrby', coin + ':stats', 'validBlocks', 1]); redisCommands.push(['hincrby', coin + ':stats', 'validBlocks', 1]);
} }
@ -97,8 +95,6 @@ module.exports = function(logger, poolConfig){
if (err) if (err)
logger.error(logSystem, logComponent, logSubCat, 'Error with share processor multi ' + JSON.stringify(err)); logger.error(logSystem, logComponent, logSubCat, 'Error with share processor multi ' + JSON.stringify(err));
}); });
}; };
}; };

View File

@ -152,6 +152,20 @@ module.exports = function(logger, portalConfig, poolConfigs){
} }
_this.statPoolHistory.push(data); _this.statPoolHistory.push(data);
} }
function readableSeconds(t) {
var seconds = Math.round(t);
var minutes = Math.floor(seconds/60);
var hours = Math.floor(minutes/60);
var days = Math.floor(hours/24);
hours = hours-(days*24);
minutes = minutes-(days*24*60)-(hours*60);
seconds = seconds-(days*24*60*60)-(hours*60*60)-(minutes*60);
if (days > 0) { return (days + "d " + hours + "h " + minutes + "m " + seconds + "s"); }
if (hours > 0) { return (hours + "h " + minutes + "m " + seconds + "s"); }
if (minutes > 0) {return (minutes + "m " + seconds + "s"); }
return (seconds + "s");
}
this.getCoins = function(cback){ this.getCoins = function(cback){
_this.stats.coins = redisClients[0].coins; _this.stats.coins = redisClients[0].coins;
@ -296,7 +310,8 @@ module.exports = function(logger, portalConfig, poolConfigs){
['smembers', ':blocksConfirmed'], ['smembers', ':blocksConfirmed'],
['hgetall', ':shares:roundCurrent'], ['hgetall', ':shares:roundCurrent'],
['hgetall', ':blocksPendingConfirms'], ['hgetall', ':blocksPendingConfirms'],
['zrange', ':payments', -100, -1] ['zrange', ':payments', -100, -1],
['hgetall', ':shares:timesCurrent']
]; ];
var commandsPerCoin = redisCommandTemplates.length; var commandsPerCoin = redisCommandTemplates.length;
@ -317,6 +332,12 @@ module.exports = function(logger, portalConfig, poolConfigs){
else{ else{
for(var i = 0; i < replies.length; i += commandsPerCoin){ for(var i = 0; i < replies.length; i += commandsPerCoin){
var coinName = client.coins[i / commandsPerCoin | 0]; var coinName = client.coins[i / commandsPerCoin | 0];
var marketStats = {};
if (replies[i + 2]) {
if (replies[i + 2].coinmarketcap) {
marketStats = replies[i + 2] ? (JSON.parse(replies[i + 2].coinmarketcap)[0] || 0) : 0;
}
}
var coinStats = { var coinStats = {
name: coinName, name: coinName,
symbol: poolConfigs[coinName].coin.symbol.toUpperCase(), symbol: poolConfigs[coinName].coin.symbol.toUpperCase(),
@ -335,6 +356,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
networkVersion: replies[i + 2] ? (replies[i + 2].networkSubVersion || 0) : 0, networkVersion: replies[i + 2] ? (replies[i + 2].networkSubVersion || 0) : 0,
networkProtocolVersion: replies[i + 2] ? (replies[i + 2].networkProtocolVersion || 0) : 0 networkProtocolVersion: replies[i + 2] ? (replies[i + 2].networkProtocolVersion || 0) : 0
}, },
marketStats: marketStats,
/* block stat counts */ /* block stat counts */
blocks: { blocks: {
pending: replies[i + 3], pending: replies[i + 3],
@ -344,14 +366,17 @@ module.exports = function(logger, portalConfig, poolConfigs){
/* show all pending blocks */ /* show all pending blocks */
pending: { pending: {
blocks: replies[i + 6].sort(sortBlocks), blocks: replies[i + 6].sort(sortBlocks),
confirms: replies[i + 9] confirms: (replies[i + 9] || {})
}, },
/* show last 5 found blocks */ /* show last 5 found blocks */
confirmed: { confirmed: {
blocks: replies[i + 7].sort(sortBlocks).slice(0,5) blocks: replies[i + 7].sort(sortBlocks).slice(0,5)
}, },
payments: [], payments: [],
currentRoundShares: replies[i + 8] currentRoundShares: (replies[i + 8] || {}),
currentRoundTimes: (replies[i + 11] || {}),
maxRoundTime: 0,
shareCount: 0
}; };
for(var j = replies[i + 10].length; j > 0; j--){ for(var j = replies[i + 10].length; j > 0; j--){
var jsonObj; var jsonObj;
@ -364,17 +389,10 @@ module.exports = function(logger, portalConfig, poolConfigs){
coinStats.payments.push(jsonObj); coinStats.payments.push(jsonObj);
} }
} }
/*
for (var b in coinStats.confirmed.blocks) {
var parms = coinStats.confirmed.blocks[b].split(':');
if (parms[4] != null && parms[4] > 0) {
console.log(fancyTimestamp(parseInt(parms[4]), true));
}
break;
}
*/
allCoinStats[coinStats.name] = (coinStats); allCoinStats[coinStats.name] = (coinStats);
} }
// sort pools alphabetically
allCoinStats = sortPoolsByName(allCoinStats);
callback(); callback();
} }
}); });
@ -419,6 +437,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: workerShares, shares: workerShares,
invalidshares: 0, invalidshares: 0,
currRoundShares: 0, currRoundShares: 0,
currRoundTime: 0,
hashrate: null, hashrate: null,
hashrateString: null, hashrateString: null,
luckDays: null, luckDays: null,
@ -436,6 +455,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: workerShares, shares: workerShares,
invalidshares: 0, invalidshares: 0,
currRoundShares: 0, currRoundShares: 0,
currRoundTime: 0,
hashrate: null, hashrate: null,
hashrateString: null, hashrateString: null,
luckDays: null, luckDays: null,
@ -455,6 +475,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: 0, shares: 0,
invalidshares: -workerShares, invalidshares: -workerShares,
currRoundShares: 0, currRoundShares: 0,
currRoundTime: 0,
hashrate: null, hashrate: null,
hashrateString: null, hashrateString: null,
luckDays: null, luckDays: null,
@ -472,6 +493,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: 0, shares: 0,
invalidshares: -workerShares, invalidshares: -workerShares,
currRoundShares: 0, currRoundShares: 0,
currRoundTime: 0,
hashrate: null, hashrate: null,
hashrateString: null, hashrateString: null,
luckDays: null, luckDays: null,
@ -510,6 +532,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
portalStats.algos[algo].workers += Object.keys(coinStats.workers).length; portalStats.algos[algo].workers += Object.keys(coinStats.workers).length;
var _shareTotal = parseFloat(0); var _shareTotal = parseFloat(0);
var _maxTimeShare = parseFloat(0);
for (var worker in coinStats.currentRoundShares) { for (var worker in coinStats.currentRoundShares) {
var miner = worker.split(".")[0]; var miner = worker.split(".")[0];
if (miner in coinStats.miners) { if (miner in coinStats.miners) {
@ -520,8 +543,21 @@ module.exports = function(logger, portalConfig, poolConfigs){
} }
_shareTotal += parseFloat(coinStats.currentRoundShares[worker]); _shareTotal += parseFloat(coinStats.currentRoundShares[worker]);
} }
for (var worker in coinStats.currentRoundTimes) {
var time = parseFloat(coinStats.currentRoundTimes[worker]);
if (_maxTimeShare < time)
_maxTimeShare = time;
var miner = worker.split(".")[0];
if (miner in coinStats.miners) {
coinStats.miners[miner].currRoundTime += parseFloat(coinStats.currentRoundTimes[worker]);
}
}
coinStats.shareCount = _shareTotal; coinStats.shareCount = _shareTotal;
coinStats.maxRoundTime = _maxTimeShare;
coinStats.maxRoundTimeString = readableSeconds(_maxTimeShare);
for (var worker in coinStats.workers) { for (var worker in coinStats.workers) {
var _workerRate = shareMultiplier * coinStats.workers[worker].shares / portalConfig.website.stats.hashrateWindow; var _workerRate = shareMultiplier * coinStats.workers[worker].shares / portalConfig.website.stats.hashrateWindow;
var _wHashRate = (_workerRate / 1000000) * 2; var _wHashRate = (_workerRate / 1000000) * 2;
@ -559,6 +595,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
delete saveStats.pools[pool].pending; delete saveStats.pools[pool].pending;
delete saveStats.pools[pool].confirmed; delete saveStats.pools[pool].confirmed;
delete saveStats.pools[pool].currentRoundShares; delete saveStats.pools[pool].currentRoundShares;
delete saveStats.pools[pool].currentRoundTimes;
delete saveStats.pools[pool].payments; delete saveStats.pools[pool].payments;
delete saveStats.pools[pool].miners; delete saveStats.pools[pool].miners;
}); });
@ -591,13 +628,24 @@ module.exports = function(logger, portalConfig, poolConfigs){
}; };
function sortBlocks(a, b) { function sortPoolsByName(objects) {
var as = a.split(":"); var newObject = {};
var bs = b.split(":"); var sortedArray = sortProperties(objects, 'name', false, false);
if (as[2] > bs[2]) return -1; for (var i = 0; i < sortedArray.length; i++) {
if (as[2] < bs[2]) return 1; var key = sortedArray[i][0];
return 0; var value = sortedArray[i][1];
} newObject[key] = value;
}
return newObject;
}
function sortBlocks(a, b) {
var as = parseInt(a.split(":")[2]);
var bs = parseInt(b.split(":")[2]);
if (as > bs) return -1;
if (as < bs) return 1;
return 0;
}
function sortWorkersByName(objects) { function sortWorkersByName(objects) {
var newObject = {}; var newObject = {};

View File

@ -98,7 +98,13 @@ module.exports = function(logger){
// if an html file was changed reload it // if an html file was changed reload it
/* requires node-watch 0.5.0 or newer */ /* requires node-watch 0.5.0 or newer */
watch(['./website', './website/pages'], function(evt, filename){ watch(['./website', './website/pages'], function(evt, filename){
var basename = path.basename(filename); var basename;
// support older versions of node-watch automatically
if (!filename && evt)
basename = path.basename(evt);
else
basename = path.basename(filename);
if (basename in pageFiles){ if (basename in pageFiles){
readPageFiles([basename]); readPageFiles([basename]);
logger.special(logSystem, 'Server', 'Reloaded file ' + basename); logger.special(logSystem, 'Server', 'Reloaded file ' + basename);

View File

@ -27,6 +27,8 @@
"paymentProcessing": { "paymentProcessing": {
"enabled": true, "enabled": true,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 57, "paymentInterval": 57,
"_comment_paymentInterval": "Interval in seconds to check and perform payments.", "_comment_paymentInterval": "Interval in seconds to check and perform payments.",
"minimumPayment": 0.1, "minimumPayment": 0.1,

View File

@ -20,6 +20,8 @@
"paymentProcessing": { "paymentProcessing": {
"enabled": false, "enabled": false,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20, "paymentInterval": 20,
"minimumPayment": 0.1, "minimumPayment": 0.1,
"maxBlocksPerPayment": 3, "maxBlocksPerPayment": 3,

View File

@ -21,6 +21,8 @@
"paymentProcessing": { "paymentProcessing": {
"enabled": false, "enabled": false,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20, "paymentInterval": 20,
"minimumPayment": 0.1, "minimumPayment": 0.1,
"maxBlocksPerPayment": 1, "maxBlocksPerPayment": 1,

View File

@ -25,6 +25,8 @@
"paymentProcessing": { "paymentProcessing": {
"enabled": true, "enabled": true,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20, "paymentInterval": 20,
"minimumPayment": 0.1, "minimumPayment": 0.1,
"maxBlocksPerPayment": 3, "maxBlocksPerPayment": 3,

View File

@ -153,11 +153,15 @@
{{if (block[4] != null) { }} {{if (block[4] != null) { }}
<span style="padding-left: 18px;"><small>{{=readableDate(block[4])}}</small></span> <span style="padding-left: 18px;"><small>{{=readableDate(block[4])}}</small></span>
{{ } }} {{ } }}
{{if (it.stats.pools[pool].pending.confirms[block[0]]) { }} {{if (it.stats.pools[pool].pending.confirms) { }}
<span style="float:right; color: red;"><small>{{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100</small></span> {{if (it.stats.pools[pool].pending.confirms[block[0]]) { }}
{{ } else { }} <span style="float:right; color: red;"><small>{{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100</small></span>
<span style="float:right; color: red;"><small>*PENDING*</small></span> {{ } else { }}
{{ } }} <span style="float:right; color: red;"><small>*PENDING*</small></span>
{{ } }}
{{ } else { }}
<span style="float:right; color: red;"><small>*PENDING*</small></span>
{{ } }}
<div><i class="fa fa-gavel"></i><small>Mined By:</small> <a href="/workers/{{=block[3].split('.')[0]}}">{{=block[3]}}</a></div> <div><i class="fa fa-gavel"></i><small>Mined By:</small> <a href="/workers/{{=block[3].split('.')[0]}}">{{=block[3]}}</a></div>
</div> </div>
{{ blockscomb.push(block);}} {{ blockscomb.push(block);}}