Added "emitInvalidBlocksHahes" option for those in MPOS mode that require it.

This commit is contained in:
Matt 2014-04-02 13:01:05 -06:00
parent 6aae77a159
commit b08468ba8d
8 changed files with 84 additions and 13 deletions

3
.gitignore vendored
View File

@ -1,2 +1,3 @@
node_modules/
.idea/
.idea/
config.json

View File

@ -267,6 +267,9 @@ Description of options:
detects those and disconnects them. */
"connectionTimeout": 600, //Remove workers that haven't been in contact for this many seconds
/* Sometimes you want the block hashes even for shares that aren't block candidates. */
"emitInvalidBlockHashes": false,
/* If a worker is submitting a high threshold of invalid shares we can temporarily ban them
to reduce system/network load. Also useful to fight against flooding attacks. */
"banning": {

5
coins/365coin.json Normal file
View File

@ -0,0 +1,5 @@
{
"name": "365coin",
"symbol": "365",
"algorithm": "keccak"
}

61
config_example.json Normal file
View File

@ -0,0 +1,61 @@
{
"logLevel": "debug",
"clustering": {
"enabled": true,
"forks": "auto"
},
"blockNotifyListener": {
"enabled": false,
"port": 8117,
"password": "test"
},
"redisBlockNotifyListener": {
"enabled" : false,
"redisPort" : 6379,
"redisHost" : "hostname",
"psubscribeKey" : "newblocks:*"
},
"website": {
"enabled": true,
"siteTitle": "Cryppit",
"port": 80,
"statUpdateInterval": 1.5,
"hashrateWindow": 300
},
"proxy": {
"enabled": false,
"ports": {
"80": {
"diff": 32,
"varDiff": {
"minDiff" : 8,
"maxDiff" : 512,
"targetTime" : 15,
"retargetTime" : 90,
"variancePercent" : 30
}
},
"6000": {
"diff": 32,
"varDiff": {
"minDiff" : 8,
"maxDiff" : 512,
"targetTime" : 15,
"retargetTime" : 90,
"variancePercent" : 30
}
},
"8080": {
"diff": 32,
"varDiff": {
"minDiff" : 8,
"maxDiff" : 512,
"targetTime" : 15,
"retargetTime" : 90,
"variancePercent" : 30
}
}
}
}
}

View File

@ -70,7 +70,7 @@ module.exports = function(logger, poolConfig){
isValidBlock ? 'Y' : 'N',
shareData.difficulty,
typeof(shareData.error) === 'undefined' ? null : shareData.error,
typeof(shareData.solution) === 'undefined' ? '' : shareData.solution
shareData.blockHash ? shareData.blockHash : (shareData.blockHashInvalid ? shareData.blockHashInvalid : '')
];
connection.query(
'INSERT INTO `shares` SET time = NOW(), rem_host = ?, username = ?, our_result = ?, upstream_result = ?, difficulty = ?, reason = ?, solution = ?',

View File

@ -160,7 +160,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
var details = r.split(':');
return {
category: details[0].category,
solution: details[0],
blockHash: details[0],
txHash: details[1],
height: details[2],
reward: details[3],
@ -191,20 +191,20 @@ function SetupForPool(logger, poolOptions, setupFinished){
txDetails.forEach(function(tx, i){
var round = rounds[i];
if (tx.error && tx.error.code === -5 || round.solution !== tx.result.blockhash){
if (tx.error && tx.error.code === -5 || round.blockHash !== tx.result.blockhash){
/* Block was dropped from coin daemon even after it happily accepted it earlier. */
//If we find another block at the same height then this block was drop-kicked orphaned
var dropKicked = rounds.filter(function(r){
return r.height === round.height && r.solution !== round.solution && r.category !== 'dropkicked';
return r.height === round.height && r.blockHash !== round.blockHash && r.category !== 'dropkicked';
}).length > 0;
if (dropKicked){
logger.warning(logSystem, logComponent,
'A block was drop-kicked orphaned'
+ ' - we found a better block at the same height, solution '
+ round.solution + " round " + round.height);
+ ' - we found a better block at the same height, blockHash '
+ round.blockHash + " round " + round.height);
round.category = 'dropkicked';
}
else{
@ -298,7 +298,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
if (!workerShares){
logger.error(logSystem, logComponent, 'No worker shares for round: '
+ round.height + ' solution: ' + round.solution);
+ round.height + ' blockHash: ' + round.blockHash);
return;
}
@ -486,6 +486,7 @@ function SetupForPool(logger, poolOptions, setupFinished){
if (toBePaid !== 0)
finalRedisCommands.push(['hincrbyfloat', coin + '_stats', 'totalPaid', (toBePaid / magnitude).toFixed(coinPrecision)]);
finalRedisCommands.push(['bgsave']);
callback(null, magnitude, workerPayments, finalRedisCommands);

View File

@ -118,11 +118,11 @@ module.exports = function(logger){
var shareData = JSON.stringify(data);
if (data.solution && !isValidBlock)
logger.debug(logSystem, logComponent, logSubCat, 'We thought a block solution was found but it was rejected by the daemon, share data: ' + shareData);
if (data.blockHash && !isValidBlock)
logger.debug(logSystem, logComponent, logSubCat, 'We thought a block was found but it was rejected by the daemon, share data: ' + shareData);
else if (isValidBlock)
logger.debug(logSystem, logComponent, logSubCat, 'Block solution found: ' + data.solution);
logger.debug(logSystem, logComponent, logSubCat, 'Block found: ' + data.blockHash);
if (isValidShare)

View File

@ -61,10 +61,10 @@ module.exports = function(logger, poolConfig){
if (isValidBlock){
redisCommands.push(['rename', coin + '_shares:roundCurrent', coin + '_shares:round' + shareData.height]);
redisCommands.push(['sadd', coin + '_blocksPending', [shareData.solution, shareData.tx, shareData.height, shareData.reward].join(':')]);
redisCommands.push(['sadd', coin + '_blocksPending', [shareData.blockHash, shareData.txHash, shareData.height, shareData.reward].join(':')]);
redisCommands.push(['hincrby', coin + '_stats', 'validBlocks', 1]);
}
else if (shareData.solution){
else if (shareData.blockHash){
redisCommands.push(['hincrby', coin + '_stats', 'invalidBlocks', 1]);
}