diff --git a/config.json b/config.json index 2a2fdfd..c1c566f 100644 --- a/config.json +++ b/config.json @@ -7,5 +7,10 @@ "enabled": true, "port": 8117, "password": "test" + }, + "website": { + "enabled": true, + "port": 80, + "liveStats": true } } \ No newline at end of file diff --git a/init.js b/init.js index 1a57431..e9f524b 100644 --- a/init.js +++ b/init.js @@ -9,6 +9,7 @@ var BlocknotifyListener = require('./libs/blocknotifyListener.js'); var WorkerListener = require('./libs/workerListener.js'); var PoolWorker = require('./libs/poolWorker.js'); var PaymentProcessor = require('./libs/paymentProcessor.js'); +var Website = require('./libs/website.js'); JSON.minify = JSON.minify || require("node-json-minify"); @@ -48,6 +49,9 @@ if (cluster.isWorker){ case 'paymentProcessor': new PaymentProcessor(loggerInstance); break; + case 'website': + new Website(loggerInstance); + break; } return; @@ -145,7 +149,24 @@ var startPaymentProcessor = function(poolConfigs){ worker.on('exit', function(code, signal){ logError('paymentProcessor', 'system', 'Payment processor died, spawning replacement...'); setTimeout(function(){ - startPaymentProcessor(poolConfigs); + startPaymentProcessor.apply(null, arguments); + }, 2000); + }); +}; + + +var startWebsite = function(portalConfig, poolConfigs){ + if (!portalConfig.website.enabled) return; + + var worker = cluster.fork({ + workerType: 'website', + pools: JSON.stringify(poolConfigs), + portalConfig: JSON.stringify(portalConfig) + }); + worker.on('exit', function(code, signal){ + logError('website', 'system', 'Website process died, spawning replacement...'); + setTimeout(function(){ + startWebsite.apply(null, arguments); }, 2000); }); }; @@ -165,4 +186,6 @@ var startPaymentProcessor = function(poolConfigs){ startWorkerListener(poolConfigs); + startWebsite(portalConfig, poolConfigs); + })(); \ No newline at end of file diff --git a/libs/apis.js b/libs/api.js similarity index 89% rename from libs/apis.js rename to libs/api.js index 966fc12..da646f8 100644 --- a/libs/apis.js +++ b/libs/api.js @@ -5,7 +5,7 @@ var app = express(); app.get('/getstatus', function (req, res) { res.send({ 'loadavg': os.loadavg(), - 'freemem': os.freemem(), + 'freemem': os.freemem() }); }); diff --git a/libs/paymentProcessor.js b/libs/paymentProcessor.js index 17b859d..8952eda 100644 --- a/libs/paymentProcessor.js +++ b/libs/paymentProcessor.js @@ -211,7 +211,7 @@ function SetupForPool(logger, poolOptions){ callback('done - redis error with multi get rounds share') return; } - console.dir(workerRewards); + var workerBalances = {}; @@ -219,7 +219,6 @@ function SetupForPool(logger, poolOptions){ workerBalances[workers[i]] = parseInt(results[i]) || 0; } - console.dir(workerBalances); callback(null, rounds, workerRewards, workerBalances) }); diff --git a/libs/shareProcessor.js b/libs/shareProcessor.js index 620f6d5..ca9c055 100644 --- a/libs/shareProcessor.js +++ b/libs/shareProcessor.js @@ -19,9 +19,6 @@ module.exports = function(logger, poolConfig){ var redisConfig = internalConfig.redis; var coin = poolConfig.coin.name; - - - var connection; function connect(){ @@ -47,6 +44,13 @@ module.exports = function(logger, poolConfig){ connect(); + //Every 10 minutes clear out old hashrate stat data from redis + setInterval(function(){ + var tenMinutesAgo = (Date.now() / 1000 | 0) - (60 * 10); + connection.zremrangebyscore([coin + '_hashrate', '-inf', tenMinutesAgo]); + }, 10 * 60 * 1000); + + this.handleShare = function(isValidShare, isValidBlock, shareData){ @@ -58,6 +62,10 @@ module.exports = function(logger, poolConfig){ for more efficient stats */ + //store share diff, worker, and unique value with a score that is the timestamp + //unique value ensures it doesnt overwrite an existing entry + //the timestamp as score lets us query shares from last X minutes to generate hashrate for each worker and pool + connection.zadd(coin + '_hashrate', Date.now() / 1000 | 0, shareData.difficulty + ':' + shareData.worker + ':' + Math.random()); connection.hincrby([coin + '_shares:roundCurrent', shareData.worker, shareData.difficulty], function(error, result){ if (error) diff --git a/libs/website.js b/libs/website.js index 068bf73..635f3d6 100644 --- a/libs/website.js +++ b/libs/website.js @@ -1,12 +1,67 @@ /* TODO -listen on port 80 for requests, maybe use express. -read website folder files into memory, and use fs.watch to reload changes to any files into memory -on some interval, apply a templating process to it with the latest api stats. on http requests, serve -this templated file and the other resources in memory. +Need to condense the entire website into a single html page. Embedding the javascript and css is easy. For images, +hopefully we can only use svg which can be embedded - otherwise we can convert the image into a data-url that can +be embedded, Favicon can also be a data-url which some javascript kungfu can display in browser. I'm focusing on +this mainly to help mitigate ddos and other kinds of attacks - and to just have a badass blazing fast project. -ideally, all css/js should be included in the html file (try to avoid images, uses embeddable svg) -this would give us one file to have to serve +Don't worry about doing any of that condensing yourself - go head and keep all the resources as separate files. +I will write a script for when the server starts to read all the files in the /website folder and minify and condense +it all together into one file, saved in memory. We will have 1 persistent condensed file that servers as our "template" +file that contains things like: +