[REDIS BREAKING UPDATE] Changed coins to use redis branching for cleaner management

This commit is contained in:
Matt 2014-05-09 17:43:11 -06:00
parent 04f769a5a6
commit e6556d416e
3 changed files with 26 additions and 26 deletions

View File

@ -152,8 +152,8 @@ function SetupForPool(logger, poolOptions, setupFinished){
startRedisTimer();
redisClient.multi([
['hgetall', coin + '_balances'],
['smembers', coin + '_blocksPending']
['hgetall', coin + ':balances'],
['smembers', coin + ':blocksPending']
]).exec(function(error, results){
endRedisTimer();
@ -294,7 +294,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
var shareLookups = rounds.map(function(r){
return ['hgetall', coin + '_shares:round' + r.height]
return ['hgetall', coin + ':shares:round' + r.height]
});
startRedisTimer();
@ -419,13 +419,13 @@ function SetupForPool(logger, poolOptions, setupFinished){
if (worker.balanceChange !== 0){
balanceUpdateCommands.push([
'hincrbyfloat',
coin + '_balances',
coin + ':balances',
w,
satoshisToCoins(worker.balanceChange)
]);
}
if (worker.sent !== 0){
workerPayoutsCommand.push(['hincrbyfloat', coin + '_payouts', w, worker.sent]);
workerPayoutsCommand.push(['hincrbyfloat', coin + ':payouts', w, worker.sent]);
totalPaid += worker.sent;
}
}
@ -439,7 +439,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
var moveSharesToCurrent = function(r){
var workerShares = r.workerShares;
Object.keys(workerShares).forEach(function(worker){
orphanMergeCommands.push(['hincrby', coin + '_shares:roundCurrent',
orphanMergeCommands.push(['hincrby', coin + ':shares:roundCurrent',
worker, workerShares[worker]]);
});
};
@ -448,17 +448,17 @@ function SetupForPool(logger, poolOptions, setupFinished){
switch(r.category){
case 'kicked':
movePendingCommands.push(['smove', coin + '_blocksPending', coin + '_blocksKicked', r.serialized]);
movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksKicked', r.serialized]);
case 'orphan':
movePendingCommands.push(['smove', coin + '_blocksPending', coin + '_blocksOrphaned', r.serialized]);
movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksOrphaned', r.serialized]);
if (r.canDeleteShares){
moveSharesToCurrent(r);
roundsToDelete.push(coin + '_shares:round' + r.height);
roundsToDelete.push(coin + ':shares:round' + r.height);
}
return;
case 'generate':
movePendingCommands.push(['smove', coin + '_blocksPending', coin + '_blocksConfirmed', r.serialized]);
roundsToDelete.push(coin + '_shares:round' + r.height);
movePendingCommands.push(['smove', coin + ':blocksPending', coin + ':blocksConfirmed', r.serialized]);
roundsToDelete.push(coin + ':shares:round' + r.height);
return;
}
@ -482,7 +482,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
finalRedisCommands.push(['del'].concat(roundsToDelete));
if (totalPaid !== 0)
finalRedisCommands.push(['hincrbyfloat', coin + '_stats', 'totalPaid', totalPaid]);
finalRedisCommands.push(['hincrbyfloat', coin + ':stats', 'totalPaid', totalPaid]);
if (finalRedisCommands.length === 0){
callback();

View File

@ -45,27 +45,27 @@ module.exports = function(logger, poolConfig){
var redisCommands = [];
if (isValidShare){
redisCommands.push(['hincrbyfloat', coin + '_shares:roundCurrent', shareData.worker, shareData.difficulty]);
redisCommands.push(['hincrby', coin + '_stats', 'validShares', 1]);
redisCommands.push(['hincrbyfloat', coin + ':shares:roundCurrent', shareData.worker, shareData.difficulty]);
redisCommands.push(['hincrby', coin + ':stats', 'validShares', 1]);
/* Stores share diff, worker, and unique value with a score that is the timestamp. Unique value ensures it
doesn't overwrite an existing entry, and timestamp as score lets us query shares from last X minutes to
generate hashrate for each worker and pool. */
var dateNow = Date.now();
var hashrateData = [shareData.difficulty, shareData.worker, dateNow];
redisCommands.push(['zadd', coin + '_hashrate', dateNow / 1000 | 0, hashrateData.join(':')]);
redisCommands.push(['zadd', coin + ':hashrate', dateNow / 1000 | 0, hashrateData.join(':')]);
}
else{
redisCommands.push(['hincrby', coin + '_stats', 'invalidShares', 1]);
redisCommands.push(['hincrby', coin + ':stats', 'invalidShares', 1]);
}
if (isValidBlock){
redisCommands.push(['rename', coin + '_shares:roundCurrent', coin + '_shares:round' + shareData.height]);
redisCommands.push(['sadd', coin + '_blocksPending', [shareData.blockHash, shareData.txHash, shareData.height].join(':')]);
redisCommands.push(['hincrby', coin + '_stats', 'validBlocks', 1]);
redisCommands.push(['rename', coin + ':shares:roundCurrent', coin + '_shares:round' + shareData.height]);
redisCommands.push(['sadd', coin + ':blocksPending', [shareData.blockHash, shareData.txHash, shareData.height].join(':')]);
redisCommands.push(['hincrby', coin + ':stats', 'validBlocks', 1]);
}
else if (shareData.blockHash){
redisCommands.push(['hincrby', coin + '_stats', 'invalidBlocks', 1]);
redisCommands.push(['hincrby', coin + ':stats', 'invalidBlocks', 1]);
}
connection.multi(redisCommands).exec(function(err, replies){

View File

@ -109,12 +109,12 @@ module.exports = function(logger, portalConfig, poolConfigs){
var redisCommandTemplates = [
['zremrangebyscore', '_hashrate', '-inf', '(' + windowTime],
['zrangebyscore', '_hashrate', windowTime, '+inf'],
['hgetall', '_stats'],
['scard', '_blocksPending'],
['scard', '_blocksConfirmed'],
['scard', '_blocksOrphaned']
['zremrangebyscore', ':hashrate', '-inf', '(' + windowTime],
['zrangebyscore', ':hashrate', windowTime, '+inf'],
['hgetall', ':stats'],
['scard', ':blocksPending'],
['scard', ':blocksConfirmed'],
['scard', ':blocksOrphaned']
];
var commandsPerCoin = redisCommandTemplates.length;