Merge remote-tracking branch 'upstream/master'

This commit is contained in:
hellcatz 2017-05-11 18:17:53 -07:00
commit fe72b6e533
11 changed files with 566 additions and 359 deletions

View File

@ -265,6 +265,7 @@ var spawnPoolWorkers = function(){
if (!_lastShareTimes[workerAddress] || !_lastStartTimes[workerAddress]) {
_lastShareTimes[workerAddress] = now;
_lastStartTimes[workerAddress] = now;
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' joined current round.');
}
if (_lastShareTimes[workerAddress] != null && _lastShareTimes[workerAddress] > 0) {
lastShareTime = _lastShareTimes[workerAddress];
@ -273,13 +274,13 @@ var spawnPoolWorkers = function(){
var redisCommands = [];
// if its been less than 10 minutes since last share was submitted
// if its been less than 15 minutes since last share was submitted
var timeChangeSec = roundTo(Math.max(now - lastShareTime, 0) / 1000, 4);
var timeChangeTotal = roundTo(Math.max(now - lastStartTime, 0) / 1000, 4);
if (timeChangeSec < 600) {
if (timeChangeSec < 900) {
// loyal miner keeps mining :)
redisCommands.push(['hincrbyfloat', msg.coin + ':shares:timesCurrent', workerAddress, timeChangeSec]);
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}');
//logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+':{totalTimeSec:'+timeChangeTotal+', timeChangeSec:'+timeChangeSec+'}');
connection.multi(redisCommands).exec(function(err, replies){
if (err)
logger.error('PPLNT', msg.coin, 'Thread '+msg.thread, 'Error with time share processor call to redis ' + JSON.stringify(err));
@ -287,6 +288,7 @@ var spawnPoolWorkers = function(){
} else {
// they just re-joined the pool
_lastStartTimes[workerAddress] = now;
logger.debug('PPLNT', msg.coin, 'Thread '+msg.thread, workerAddress+' re-joined current round.');
}
// track last time share

File diff suppressed because it is too large Load Diff

View File

@ -177,9 +177,9 @@ module.exports = function(logger){
var pool = Stratum.createPool(poolOptions, authorizeFN, logger);
pool.on('share', function(isValidShare, isValidBlock, data){
var shareData = JSON.stringify(data);
if (data.blockHash && !isValidBlock)
logger.debug(logSystem, logComponent, logSubCat, 'We thought a block was found but it was rejected by the daemon, share data: ' + shareData);
@ -187,19 +187,22 @@ module.exports = function(logger){
logger.debug(logSystem, logComponent, logSubCat, 'Block found: ' + data.blockHash + ' by ' + data.worker);
if (isValidShare) {
if(data.shareDiff > 1000000000)
if(data.shareDiff > 1000000000) {
logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000.000!');
else if(data.shareDiff > 1000000)
} else if(data.shareDiff > 1000000) {
logger.debug(logSystem, logComponent, logSubCat, 'Share was found with diff higher than 1.000.000!');
}
//logger.debug(logSystem, logComponent, logSubCat, 'Share accepted at diff ' + data.difficulty + '/' + data.shareDiff + ' by ' + data.worker + ' [' + data.ip + ']' );
} else if (!isValidShare)
} else if (!isValidShare) {
logger.debug(logSystem, logComponent, logSubCat, 'Share rejected: ' + shareData);
handlers.share(isValidShare, isValidBlock, data)
}
// handle the share
handlers.share(isValidShare, isValidBlock, data);
// send to master for pplnt time tracking
process.send({type: 'shareTrack', thread:(parseInt(forkId)+1), coin:poolOptions.coin.name, isValidShare:isValidShare, isValidBlock:isValidBlock, data:data});
}).on('difficultyUpdate', function(workerName, diff){
logger.debug(logSystem, logComponent, logSubCat, 'Difficulty update to diff ' + diff + ' workerName=' + JSON.stringify(workerName));
handlers.diff(workerName, diff);

View File

@ -25,9 +25,8 @@ module.exports = function(logger, poolConfig){
var logSystem = 'Pool';
var logComponent = coin;
var logSubCat = 'Thread ' + (parseInt(forkId) + 1);
var connection = redis.createClient(redisConfig.port, redisConfig.host);
connection.on('ready', function(){
logger.debug(logSystem, logComponent, logSubCat, 'Share processing setup with redis (' + redisConfig.host +
':' + redisConfig.port + ')');
@ -38,7 +37,6 @@ module.exports = function(logger, poolConfig){
connection.on('end', function(){
logger.error(logSystem, logComponent, logSubCat, 'Connection to redis database has been ended');
});
connection.info(function(error, response){
if (error){
logger.error(logSystem, logComponent, logSubCat, 'Redis version check failed');
@ -65,18 +63,17 @@ module.exports = function(logger, poolConfig){
}
});
this.handleShare = function(isValidShare, isValidBlock, shareData){
this.handleShare = function(isValidShare, isValidBlock, shareData) {
var redisCommands = [];
if (isValidShare){
if (isValidShare) {
redisCommands.push(['hincrbyfloat', coin + ':shares:roundCurrent', shareData.worker, shareData.difficulty]);
redisCommands.push(['hincrby', coin + ':stats', 'validShares', 1]);
}
else{
} else {
redisCommands.push(['hincrby', coin + ':stats', 'invalidShares', 1]);
}
/* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it
doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to
generate hashrate for each worker and pool. */
@ -86,6 +83,7 @@ module.exports = function(logger, poolConfig){
if (isValidBlock){
redisCommands.push(['rename', coin + ':shares:roundCurrent', coin + ':shares:round' + shareData.height]);
redisCommands.push(['rename', coin + ':shares:timesCurrent', coin + ':shares:times' + shareData.height]);
redisCommands.push(['sadd', coin + ':blocksPending', [shareData.blockHash, shareData.txHash, shareData.height, shareData.worker, dateNow].join(':')]);
redisCommands.push(['hincrby', coin + ':stats', 'validBlocks', 1]);
}
@ -97,8 +95,6 @@ module.exports = function(logger, poolConfig){
if (err)
logger.error(logSystem, logComponent, logSubCat, 'Error with share processor multi ' + JSON.stringify(err));
});
};
};

View File

@ -152,6 +152,20 @@ module.exports = function(logger, portalConfig, poolConfigs){
}
_this.statPoolHistory.push(data);
}
function readableSeconds(t) {
var seconds = Math.round(t);
var minutes = Math.floor(seconds/60);
var hours = Math.floor(minutes/60);
var days = Math.floor(hours/24);
hours = hours-(days*24);
minutes = minutes-(days*24*60)-(hours*60);
seconds = seconds-(days*24*60*60)-(hours*60*60)-(minutes*60);
if (days > 0) { return (days + "d " + hours + "h " + minutes + "m " + seconds + "s"); }
if (hours > 0) { return (hours + "h " + minutes + "m " + seconds + "s"); }
if (minutes > 0) {return (minutes + "m " + seconds + "s"); }
return (seconds + "s");
}
this.getCoins = function(cback){
_this.stats.coins = redisClients[0].coins;
@ -296,7 +310,8 @@ module.exports = function(logger, portalConfig, poolConfigs){
['smembers', ':blocksConfirmed'],
['hgetall', ':shares:roundCurrent'],
['hgetall', ':blocksPendingConfirms'],
['zrange', ':payments', -100, -1]
['zrange', ':payments', -100, -1],
['hgetall', ':shares:timesCurrent']
];
var commandsPerCoin = redisCommandTemplates.length;
@ -317,6 +332,12 @@ module.exports = function(logger, portalConfig, poolConfigs){
else{
for(var i = 0; i < replies.length; i += commandsPerCoin){
var coinName = client.coins[i / commandsPerCoin | 0];
var marketStats = {};
if (replies[i + 2]) {
if (replies[i + 2].coinmarketcap) {
marketStats = replies[i + 2] ? (JSON.parse(replies[i + 2].coinmarketcap)[0] || 0) : 0;
}
}
var coinStats = {
name: coinName,
symbol: poolConfigs[coinName].coin.symbol.toUpperCase(),
@ -335,6 +356,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
networkVersion: replies[i + 2] ? (replies[i + 2].networkSubVersion || 0) : 0,
networkProtocolVersion: replies[i + 2] ? (replies[i + 2].networkProtocolVersion || 0) : 0
},
marketStats: marketStats,
/* block stat counts */
blocks: {
pending: replies[i + 3],
@ -344,14 +366,17 @@ module.exports = function(logger, portalConfig, poolConfigs){
/* show all pending blocks */
pending: {
blocks: replies[i + 6].sort(sortBlocks),
confirms: replies[i + 9]
confirms: (replies[i + 9] || {})
},
/* show last 5 found blocks */
confirmed: {
blocks: replies[i + 7].sort(sortBlocks).slice(0,5)
},
payments: [],
currentRoundShares: replies[i + 8]
currentRoundShares: (replies[i + 8] || {}),
currentRoundTimes: (replies[i + 11] || {}),
maxRoundTime: 0,
shareCount: 0
};
for(var j = replies[i + 10].length; j > 0; j--){
var jsonObj;
@ -364,17 +389,10 @@ module.exports = function(logger, portalConfig, poolConfigs){
coinStats.payments.push(jsonObj);
}
}
/*
for (var b in coinStats.confirmed.blocks) {
var parms = coinStats.confirmed.blocks[b].split(':');
if (parms[4] != null && parms[4] > 0) {
console.log(fancyTimestamp(parseInt(parms[4]), true));
}
break;
}
*/
allCoinStats[coinStats.name] = (coinStats);
}
// sort pools alphabetically
allCoinStats = sortPoolsByName(allCoinStats);
callback();
}
});
@ -419,6 +437,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: workerShares,
invalidshares: 0,
currRoundShares: 0,
currRoundTime: 0,
hashrate: null,
hashrateString: null,
luckDays: null,
@ -436,6 +455,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: workerShares,
invalidshares: 0,
currRoundShares: 0,
currRoundTime: 0,
hashrate: null,
hashrateString: null,
luckDays: null,
@ -455,6 +475,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: 0,
invalidshares: -workerShares,
currRoundShares: 0,
currRoundTime: 0,
hashrate: null,
hashrateString: null,
luckDays: null,
@ -472,6 +493,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
shares: 0,
invalidshares: -workerShares,
currRoundShares: 0,
currRoundTime: 0,
hashrate: null,
hashrateString: null,
luckDays: null,
@ -510,6 +532,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
portalStats.algos[algo].workers += Object.keys(coinStats.workers).length;
var _shareTotal = parseFloat(0);
var _maxTimeShare = parseFloat(0);
for (var worker in coinStats.currentRoundShares) {
var miner = worker.split(".")[0];
if (miner in coinStats.miners) {
@ -520,8 +543,21 @@ module.exports = function(logger, portalConfig, poolConfigs){
}
_shareTotal += parseFloat(coinStats.currentRoundShares[worker]);
}
for (var worker in coinStats.currentRoundTimes) {
var time = parseFloat(coinStats.currentRoundTimes[worker]);
if (_maxTimeShare < time)
_maxTimeShare = time;
var miner = worker.split(".")[0];
if (miner in coinStats.miners) {
coinStats.miners[miner].currRoundTime += parseFloat(coinStats.currentRoundTimes[worker]);
}
}
coinStats.shareCount = _shareTotal;
coinStats.maxRoundTime = _maxTimeShare;
coinStats.maxRoundTimeString = readableSeconds(_maxTimeShare);
for (var worker in coinStats.workers) {
var _workerRate = shareMultiplier * coinStats.workers[worker].shares / portalConfig.website.stats.hashrateWindow;
var _wHashRate = (_workerRate / 1000000) * 2;
@ -559,6 +595,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
delete saveStats.pools[pool].pending;
delete saveStats.pools[pool].confirmed;
delete saveStats.pools[pool].currentRoundShares;
delete saveStats.pools[pool].currentRoundTimes;
delete saveStats.pools[pool].payments;
delete saveStats.pools[pool].miners;
});
@ -591,13 +628,24 @@ module.exports = function(logger, portalConfig, poolConfigs){
};
function sortBlocks(a, b) {
var as = a.split(":");
var bs = b.split(":");
if (as[2] > bs[2]) return -1;
if (as[2] < bs[2]) return 1;
return 0;
}
function sortPoolsByName(objects) {
var newObject = {};
var sortedArray = sortProperties(objects, 'name', false, false);
for (var i = 0; i < sortedArray.length; i++) {
var key = sortedArray[i][0];
var value = sortedArray[i][1];
newObject[key] = value;
}
return newObject;
}
function sortBlocks(a, b) {
var as = parseInt(a.split(":")[2]);
var bs = parseInt(b.split(":")[2]);
if (as > bs) return -1;
if (as < bs) return 1;
return 0;
}
function sortWorkersByName(objects) {
var newObject = {};

View File

@ -98,7 +98,13 @@ module.exports = function(logger){
// if an html file was changed reload it
/* requires node-watch 0.5.0 or newer */
watch(['./website', './website/pages'], function(evt, filename){
var basename = path.basename(filename);
var basename;
// support older versions of node-watch automatically
if (!filename && evt)
basename = path.basename(evt);
else
basename = path.basename(filename);
if (basename in pageFiles){
readPageFiles([basename]);
logger.special(logSystem, 'Server', 'Reloaded file ' + basename);

View File

@ -27,6 +27,8 @@
"paymentProcessing": {
"enabled": true,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 57,
"_comment_paymentInterval": "Interval in seconds to check and perform payments.",
"minimumPayment": 0.1,

View File

@ -20,6 +20,8 @@
"paymentProcessing": {
"enabled": false,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20,
"minimumPayment": 0.1,
"maxBlocksPerPayment": 3,

View File

@ -21,6 +21,8 @@
"paymentProcessing": {
"enabled": false,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20,
"minimumPayment": 0.1,
"maxBlocksPerPayment": 1,

View File

@ -25,6 +25,8 @@
"paymentProcessing": {
"enabled": true,
"paymentMode": "prop",
"_comment_paymentMode":"prop, pplnt",
"paymentInterval": 20,
"minimumPayment": 0.1,
"maxBlocksPerPayment": 3,

View File

@ -153,11 +153,15 @@
{{if (block[4] != null) { }}
<span style="padding-left: 18px;"><small>{{=readableDate(block[4])}}</small></span>
{{ } }}
{{if (it.stats.pools[pool].pending.confirms[block[0]]) { }}
<span style="float:right; color: red;"><small>{{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100</small></span>
{{ } else { }}
<span style="float:right; color: red;"><small>*PENDING*</small></span>
{{ } }}
{{if (it.stats.pools[pool].pending.confirms) { }}
{{if (it.stats.pools[pool].pending.confirms[block[0]]) { }}
<span style="float:right; color: red;"><small>{{=it.stats.pools[pool].pending.confirms[block[0]]}} of 100</small></span>
{{ } else { }}
<span style="float:right; color: red;"><small>*PENDING*</small></span>
{{ } }}
{{ } else { }}
<span style="float:right; color: red;"><small>*PENDING*</small></span>
{{ } }}
<div><i class="fa fa-gavel"></i><small>Mined By:</small> <a href="/workers/{{=block[3].split('.')[0]}}">{{=block[3]}}</a></div>
</div>
{{ blockscomb.push(block);}}