Merge branch 'zone117x-master'

This commit is contained in:
Eugene@ubuntu 2014-04-05 04:45:15 +04:00
commit 245cca1c9b
7 changed files with 86 additions and 55 deletions

View File

@ -170,6 +170,34 @@ Description of options:
"enabled": true, //Set this to false and a pool will not be created from this config file
"coin": "litecoin.json", //Reference to coin config file in 'coins' directory
"address": "mi4iBXbBsydtcc5yFmsff2zCFVX4XG7qJc", //Address to where block rewards are given
"blockRefreshInterval": 1000, //How often to poll RPC daemons for new blocks, in milliseconds
/* How many milliseconds should have passed before new block transactions will trigger a new
job broadcast. */
"txRefreshInterval": 20000,
/* Some miner software is bugged and will consider the pool offline if it doesn't receive
anything for around a minute, so every time we broadcast jobs, set a timeout to rebroadcast
in this many seconds unless we find a new job. Set to zero or remove to disable this. */
"jobRebroadcastTimeout": 55,
//instanceId: 37, //Recommend not using this because a crypto-random one will be generated
/* Some attackers will create thousands of workers that use up all available socket connections,
usually the workers are zombies and don't submit shares after connecting. This feature
detects those and disconnects them. */
"connectionTimeout": 600, //Remove workers that haven't been in contact for this many seconds
/* Sometimes you want the block hashes even for shares that aren't block candidates. */
"emitInvalidBlockHashes": false,
/* We use proper maximum algorithm difficulties found in the coin daemon source code. Most
miners/pools that deal with scrypt use a guesstimated one that is about 5.86% off from the
actual one. So here we can set a tolerable threshold for if a share is slightly too low
due to mining apps using incorrect max diffs and this pool using correct max diffs. */
"shareVariancePercent": 10,
/* This determines what to do with submitted shares (and stratum worker authentication).
You have two options:
@ -249,29 +277,6 @@ Description of options:
}
},
"address": "mi4iBXbBsydtcc5yFmsff2zCFVX4XG7qJc", //Address to where block rewards are given
"blockRefreshInterval": 1000, //How often to poll RPC daemons for new blocks, in milliseconds
/* How many milliseconds should have passed before new block transactions will trigger a new
job broadcast. */
"txRefreshInterval": 20000,
/* Some miner software is bugged and will consider the pool offline if it doesn't receive
anything for around a minute, so every time we broadcast jobs, set a timeout to rebroadcast
in this many seconds unless we find a new job. Set to zero or remove to disable this. */
"jobRebroadcastTimeout": 55,
//instanceId: 37, //Recommend not using this because a crypto-random one will be generated
/* Some attackers will create thousands of workers that use up all available socket connections,
usually the workers are zombies and don't submit shares after connecting. This feature
detects those and disconnects them. */
"connectionTimeout": 600, //Remove workers that haven't been in contact for this many seconds
/* Sometimes you want the block hashes even for shares that aren't block candidates. */
"emitInvalidBlockHashes": false,
/* If a worker is submitting a high threshold of invalid shares we can temporarily ban them
to reduce system/network load. Also useful to fight against flooding attacks. */
"banning": {
@ -385,14 +390,14 @@ Donations
---------
To support development of this project feel free to donate :)
* BTC: 1KRotMnQpxu3sePQnsVLRy3EraRFYfJQFR
* LTC: LKfavSDJmwiFdcgaP1bbu46hhyiWw5oFhE
* VTC: VgW4uFTZcimMSvcnE4cwS3bjJ6P8bcTykN
* MAX: mWexUXRCX5PWBmfh34p11wzS5WX2VWvTRT
* QRK: QehPDAhzVQWPwDPQvmn7iT3PoFUGT7o8bC
* DRK: XcQmhp8ANR7okWAuArcNFZ2bHSB81jpapQ
* DOGE: DBGGVtwAAit1NPZpRm5Nz9VUFErcvVvHYW
* Cryptsy Trade Key: 254ca13444be14937b36c44ba29160bd8f02ff76
* BTC: `1KRotMnQpxu3sePQnsVLRy3EraRFYfJQFR`
* LTC: `LKfavSDJmwiFdcgaP1bbu46hhyiWw5oFhE`
* VTC: `VgW4uFTZcimMSvcnE4cwS3bjJ6P8bcTykN`
* MAX: `mWexUXRCX5PWBmfh34p11wzS5WX2VWvTRT`
* QRK: `QehPDAhzVQWPwDPQvmn7iT3PoFUGT7o8bC`
* DRK: `XcQmhp8ANR7okWAuArcNFZ2bHSB81jpapQ`
* DOGE: `DBGGVtwAAit1NPZpRm5Nz9VUFErcvVvHYW`
* Cryptsy Trade Key: `254ca13444be14937b36c44ba29160bd8f02ff76`
Credits
-------

View File

@ -126,10 +126,10 @@ module.exports = function(logger){
if (isValidShare)
logger.debug(logSystem, logComponent, logSubCat, 'Valid share of difficulty ' + data.difficulty + ' by ' + data.worker + ' [' + data.ip + ']' );
logger.debug(logSystem, logComponent, logSubCat, 'Share accepted at diff ' + data.difficulty + ' with diff ' + data.shareDiff + ' by ' + data.worker + ' [' + data.ip + ']' );
else if (!isValidShare)
logger.debug(logSystem, logComponent, logSubCat, 'Invalid share submitted, share data: ' + shareData);
logger.debug(logSystem, logComponent, logSubCat, 'Share rejected: ' + shareData);
handlers.share(isValidShare, isValidBlock, data)

View File

@ -135,8 +135,7 @@ module.exports = function(logger, portalConfig, poolConfigs){
});
var shareMultiplier = algos[coinStats.algorithm].multiplier || 0;
var hashratePre = shareMultiplier * coinStats.shares / portalConfig.website.hashrateWindow;
coinStats.hashrate = hashratePre / 1e3 | 0;
portalStats.global.hashrate += coinStats.hashrate;
coinStats.hashrate = hashratePre | 0;
portalStats.global.workers += Object.keys(coinStats.workers).length;
/* algorithm specific global stats */

View File

@ -2,6 +2,14 @@
"enabled": false,
"coin": "litecoin.json",
"address": "n4jSe18kZMCdGcZqaYprShXW6EH1wivUK1",
"blockRefreshInterval": 1000,
"txRefreshInterval": 20000,
"jobRebroadcastTimeout": 55,
"connectionTimeout": 600,
"emitInvalidBlockHashes": false,
"shareVariancePercent": 15,
"shareProcessing": {
"internal": {
"enabled": true,
@ -35,12 +43,6 @@
}
},
"address": "n4jSe18kZMCdGcZqaYprShXW6EH1wivUK1",
"blockRefreshInterval": 1000,
"txRefreshInterval": 20000,
"jobRebroadcastTimeout": 55,
"connectionTimeout": 600,
"banning": {
"enabled": true,
"time": 600,
@ -61,7 +63,7 @@
}
},
"3032": {
"diff": 16
"diff": 8
},
"3256": {
"diff": 256

View File

@ -50,10 +50,12 @@
</li>
</ul>
<div class="stats">
<div><i class="fa fa-users"></i>&nbsp;<span id="statsMiners">{{=it.stats.global.workers}}</span> Miners</div>
<div><i class="fa fa-tachometer"></i>&nbsp;<span id="statsHashrate">{{=it.stats.global.hashrate}}</span> KH/s</div>
</div>
{{ for(var algo in it.stats.algos) { }}
{{=algo}}: <div class="stats">
<div><i class="fa fa-users"></i>&nbsp;<span id="statsMiners{{=algo}}">{{=it.stats.algos[algo].workers}}</span> Miners</div>
<div><i class="fa fa-tachometer"></i>&nbsp;<span id="statsHashrate{{=algo}}">{{=it.stats.algos[algo].hashrateString}}</span></div>
</div>&nbsp;
{{ } }}
</div>
</header>
@ -86,4 +88,4 @@
</body>
</html>
</html>

View File

@ -1,5 +1,27 @@
<div>
fancy graphs here
<br>
{{=JSON.stringify(it.stats)}}
</div>
<table class="pure-table">
<thead>
<tr>
<th>Pool</th>
<th>Algo</th>
<th>Workers</th>
<th>Valid Shares</th>
<th>Invalid Shares</th>
<th>Blocks</th>
<th>Hashrate</th>
</tr>
</tr>
</thead>
{{ for(var pool in it.stats.pools) { }}
<tr class="pure-table-odd">
<td>{{=it.stats.pools[pool].name}}</td>
<td>{{=it.stats.pools[pool].algorithm}}</td>
<td>{{=Object.keys(it.stats.pools[pool].workers).length}}</td>
<td>{{=it.stats.pools[pool].poolStats.validShares}}</td>
<td>{{=it.stats.pools[pool].poolStats.invalidShares}}</td>
<td>{{=it.stats.pools[pool].poolStats.validBlocks}}</td>
<td>{{=it.stats.pools[pool].hashrateString}}</div>
</tr>
{{ } }}
</table>
</div>

View File

@ -1,6 +1,5 @@
$(function(){
var hotSwap = function(page, pushSate){
if (pushSate) history.pushState(null, null, '/' + page);
$('.selected').removeClass('selected');
@ -29,8 +28,10 @@ $(function(){
var statsSource = new EventSource("/api/live_stats");
statsSource.addEventListener('message', function(e){
var stats = JSON.parse(e.data);
$('#statsMiners').text(stats.global.workers);
$('#statsHashrate').text(stats.global.hashrate);
for (algo in algos) {
$('#statsMiners'+algo).text(stats.algos[algo].workers);
$('#statsHashrate'+algo).text(stats.algos[algo].hashrateString);
}
});
});
});