More efficient share processing wtih redis

This commit is contained in:
Matt 2014-03-13 17:20:57 -06:00
parent 0edfbdf0e1
commit 6e41473f01
2 changed files with 43 additions and 22 deletions

View File

@ -48,33 +48,38 @@ module.exports = function(logger, poolConfig){
this.handleShare = function(isValidShare, isValidBlock, shareData){ this.handleShare = function(isValidShare, isValidBlock, shareData){
if (!isValidShare) return; var redisCommands = [];
/*use http://redis.io/commands/zrangebyscore to store shares with timestamps if (isValidShare){
so we can use the min-max to get shares from the last x minutes to determine hash rate :) redisCommands.push(['hincrby', coin + '_shares:roundCurrent', shareData.worker, shareData.difficulty]);
also use a hash like coin_stats:{ invalidShares, validShares, invalidBlocks, validBlocks, etc } redisCommands.push(['hincrby', coin + '_stats', 'validShares', 1]);
for more efficient stats
*/
//store share diff, worker, and unique value with a score that is the timestamp /* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it
//unique value ensures it doesnt overwrite an existing entry doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to
//the timestamp as score lets us query shares from last X minutes to generate hashrate for each worker and pool generate hashrate for each worker and pool. */
connection.zadd(coin + '_hashrate', Date.now() / 1000 | 0, shareData.difficulty + ':' + shareData.worker + ':' + Math.random()); redisCommands.push(['zadd', coin + '_hashrate', Date.now() / 1000 | 0, [shareData.difficulty, shareData.worker, Math.random()].join(':')]);
}
connection.hincrby([coin + '_shares:roundCurrent', shareData.worker, shareData.difficulty], function(error, result){ else{
if (error) redisCommands.push(['hincrby', coin + '_stats', 'invalidShares', 1]);
logger.error('redis', 'Could not store worker share') }
});
if (isValidBlock){ if (isValidBlock){
connection.rename(coin + '_shares:roundCurrent', coin + '_shares:round' + shareData.height, function(result){ redisCommands.push(['rename', coin + '_shares:roundCurrent', coin + '_shares:round' + shareData.height]);
console.log('rename result: ' + result); redisCommands.push(['sadd', coin + '_blocks', shareData.tx + ':' + shareData.height + ':' + shareData.reward]);
}); redisCommands.push(['hincrby', coin + '_stats', 'validBlocks', 1]);
connection.sadd([coin + '_blocks', shareData.tx + ':' + shareData.height + ':' + shareData.reward], function(error, result){
if (error)
logger.error('redis', 'Could not store block data');
});
} }
else if (shareData.solution){
redisCommands.push(['hincrby', coin + '_stats', 'invalidBlocks', 1]);
}
connection.multi(redisCommands).exec(function(err, replies){
if (err)
console.log('error with share processor multi ' + JSON.stringify(err));
else{
console.log(JSON.stringify(replies));
}
});
}; };

View File

@ -0,0 +1,16 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Title</title>
</head>
<body>
</body>
</html>