mirror of https://github.com/BTCPrivate/z-nomp.git
More fixes for payment processing
This commit is contained in:
parent
ceb6f0dae0
commit
cb30876b33
32
README.md
32
README.md
|
@ -19,7 +19,8 @@ have different properties and hashing algorithms). It can be used to create a po
|
|||
coins at once. The pools use clustering to load balance across multiple CPU cores.
|
||||
|
||||
* For reward/payment processing, shares are inserted into Redis (a fast NoSQL key/value store). The PROP (proportional)
|
||||
reward system is used. Each and every share will be rewarded - even for rounds resulting in orphaned blocks.
|
||||
reward system is used with [Redis Transactions](http://redis.io/topics/transactions) for secure and super speedy payouts.
|
||||
Each and every share will be rewarded - even for rounds resulting in orphaned blocks.
|
||||
|
||||
* This portal does not have user accounts/logins/registrations. Instead, miners simply use their coin address for stratum
|
||||
authentication. A minimalistic HTML5 front-end connects to the portals statistics API to display stats from from each
|
||||
|
@ -28,23 +29,11 @@ pool such as connected miners, network/pool difficulty/hash rate, etc.
|
|||
|
||||
#### Planned Features
|
||||
|
||||
* To knock down the barrier to entry for cryptocurrency and mining for those not programmers or tech-origiented, instead
|
||||
of the "help" page on the website being being confusing for non-techies (when most people see a black command prompt screen
|
||||
they run away screaming), there will be a simple "Download NOMP Desktop App" to get started mining immediately for your
|
||||
platform (use javascript to detect platform and default them to the correct one). I will create this app using C# + Mono
|
||||
so runs with ease on all platforms, and it will have its own github repo that NOMP links to. So a pool operator does a
|
||||
`git clone --recursive` on NOMP repo, it will download NOMP app executables for each platform. There will be a nomp.ini
|
||||
file paired with the executable which the pool operator configures to user their NOMP pool's API url. When the NOMP portal
|
||||
initiates creates a zip for each platform with the nomp.ini inside. When user's download the app, it auto-connects to the
|
||||
NOMP pool API to get available coins along with the version-byte for each coin so the app can securely generate a local private
|
||||
key and valid address to mine with. The app pill prompt printing the private key to paper and also enforce a
|
||||
STRONG (uncrackable) password encryption on the file.
|
||||
The app will scan their system to get the appropriate mining software - run in background - parse the gibberish (to a noob) output
|
||||
into something that makes sense. It will also prompt them to download the coins wallet software and import their private key.
|
||||
When using the app they can choose a unique username that is used
|
||||
with stratum authentication like "zone117x.mfsm1ckZKTTjDz94KonZZsbZnAbm1UV4BF", so that on a NOMP mobile app, a user can
|
||||
enter in the NOMP pool and their username in order to see how their mining rig is doing since the API will report stats
|
||||
back for the address such as hashrate and balance.
|
||||
* NOMP API - this API be used in several ways.
|
||||
* The website will use the API to display stats and information about the pool(s) on the portal's front-end website.
|
||||
* The NOMP Desktop app will use the API to connect to the portal to display a list of available coins to mine
|
||||
* NOMP server will have to send the desktop app each coin's version-byte so that a wallet (private key & address) can be
|
||||
generated securely and locally then used to mine on the pool.
|
||||
|
||||
* To reduce variance for pools just starting out which have little to no hashing power a feature is planned which will
|
||||
allow your own pool to connect upstream to a larger pool server. It will request work from the larger pool then
|
||||
|
@ -285,10 +274,13 @@ Description of options:
|
|||
}
|
||||
},
|
||||
|
||||
|
||||
/* Recommended to have at least two daemon instances running in case one drops out-of-sync
|
||||
or offline. For redundancy, all instances will be polled for block/transaction updates
|
||||
and be used for submitting blocks. */
|
||||
and be used for submitting blocks. Creating a backup daemon involves spawning a daemon
|
||||
using the "-datadir=/backup" argument which creates a new daemon instance with it's own
|
||||
RPC config. For more info on this see:
|
||||
- https://en.bitcoin.it/wiki/Data_directory
|
||||
- https://en.bitcoin.it/wiki/Running_bitcoind */
|
||||
"daemons": [
|
||||
{ //Main daemon instance
|
||||
"host": "localhost",
|
||||
|
|
|
@ -100,7 +100,13 @@ function SetupForPool(logger, poolOptions){
|
|||
|
||||
var rounds = results.map(function(r){
|
||||
var details = r.split(':');
|
||||
return {txHash: details[0], height: details[1], reward: details[2], serialized: r};
|
||||
return {
|
||||
category: details[0].category,
|
||||
txHash: details[0],
|
||||
height: details[1],
|
||||
reward: details[2],
|
||||
serialized: r
|
||||
};
|
||||
});
|
||||
|
||||
callback(null, rounds);
|
||||
|
@ -125,45 +131,51 @@ function SetupForPool(logger, poolOptions){
|
|||
|
||||
txDetails = txDetails.filter(function(tx){
|
||||
if (tx.error || !tx.result){
|
||||
console.log('error with requesting transaction from block daemon: ' + JSON.stringify(t));
|
||||
paymentLogger.error('error with requesting transaction from block daemon: ' + JSON.stringify(t));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
var orphanedRounds = [];
|
||||
var confirmedRounds = [];
|
||||
//Rounds that are not confirmed yet are removed from the round array
|
||||
//We also get reward amount for each block from daemon reply
|
||||
rounds.forEach(function(r){
|
||||
|
||||
var magnitude;
|
||||
|
||||
rounds = rounds.filter(function(r){
|
||||
var tx = txDetails.filter(function(tx){return tx.result.txid === r.txHash})[0];
|
||||
|
||||
if (!tx){
|
||||
console.log('daemon did not give us back a transaction that we asked for: ' + r.txHash);
|
||||
paymentLogger.error('system', 'daemon did not give us back a transaction that we asked for: ' + r.txHash);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
r.category = tx.result.details[0].category;
|
||||
|
||||
if (r.category === 'orphan'){
|
||||
orphanedRounds.push(r);
|
||||
|
||||
}
|
||||
else if (r.category === 'generate'){
|
||||
if (r.category === 'generate'){
|
||||
r.amount = tx.result.amount;
|
||||
r.magnitude = r.reward / r.amount;
|
||||
confirmedRounds.push(r);
|
||||
|
||||
var roundMagnitude = r.reward / r.amount;
|
||||
if (!magnitude){
|
||||
magnitude = roundMagnitude;
|
||||
|
||||
if (roundMagnitude % 10 !== 0)
|
||||
paymentLogger.error('system', 'Satosihis in coin is not divisible by 10 which is very odd');
|
||||
}
|
||||
else if (magnitude != roundMagnitude){
|
||||
paymentLogger.error('system', 'Magnitude in a round was different than in another round. HUGE PROBLEM.');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else if (r.category === 'orphan')
|
||||
return true;
|
||||
|
||||
});
|
||||
|
||||
if (orphanedRounds.length === 0 && confirmedRounds.length === 0){
|
||||
callback('done - no confirmed or orhpaned rounds');
|
||||
|
||||
if (rounds.length === 0){
|
||||
callback('done - no confirmed or orphaned rounds');
|
||||
}
|
||||
else{
|
||||
callback(null, confirmedRounds, orphanedRounds);
|
||||
callback(null, rounds, magnitude);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
@ -171,85 +183,62 @@ function SetupForPool(logger, poolOptions){
|
|||
|
||||
/* Does a batch redis call to get shares contributed to each round. Then calculates the reward
|
||||
amount owned to each miner for each round. */
|
||||
function(confirmedRounds, orphanedRounds, callback){
|
||||
|
||||
|
||||
var rounds = [];
|
||||
for (var i = 0; i < orphanedRounds.length; i++) rounds.push(orphanedRounds[i]);
|
||||
for (var i = 0; i < confirmedRounds.length; i++) rounds.push(confirmedRounds[i]);
|
||||
function(rounds, magnitude, callback){
|
||||
|
||||
|
||||
var shareLookups = rounds.map(function(r){
|
||||
return ['hgetall', coin + '_shares:round' + r.height]
|
||||
});
|
||||
|
||||
|
||||
redisClient.multi(shareLookups).exec(function(error, allWorkerShares){
|
||||
if (error){
|
||||
callback('done - redis error with multi get rounds share')
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Iterate through the beginning of the share results which are for the orphaned rounds
|
||||
var orphanMergeCommands = []
|
||||
for (var i = 0; i < orphanedRounds.length; i++){
|
||||
var workerShares = allWorkerShares[i];
|
||||
Object.keys(workerShares).forEach(function(worker){
|
||||
orphanMergeCommands.push(['hincrby', coin + '_shares:roundCurrent', worker, workerShares[worker]]);
|
||||
});
|
||||
orphanMergeCommands.push([]);
|
||||
}
|
||||
|
||||
// Iterate through the rest of the share results which are for the worker rewards
|
||||
var orphanMergeCommands = [];
|
||||
var workerRewards = {};
|
||||
for (var i = orphanedRounds.length; i < allWorkerShares.length; i++){
|
||||
|
||||
var round = rounds[i];
|
||||
|
||||
rounds.forEach(function(round, i){
|
||||
var workerShares = allWorkerShares[i];
|
||||
|
||||
var reward = round.reward * (1 - processingConfig.feePercent);
|
||||
|
||||
var totalShares = Object.keys(workerShares).reduce(function(p, c){
|
||||
return p + parseInt(workerShares[c])
|
||||
}, 0);
|
||||
|
||||
|
||||
for (var worker in workerShares){
|
||||
var percent = parseInt(workerShares[worker]) / totalShares;
|
||||
var workerRewardTotal = Math.floor(reward * percent);
|
||||
if (!(worker in workerRewards)) workerRewards[worker] = 0;
|
||||
workerRewards[worker] += workerRewardTotal;
|
||||
if (round.category === 'orphan'){
|
||||
Object.keys(workerShares).forEach(function(worker){
|
||||
orphanMergeCommands.push(['hincrby', coin + '_shares:roundCurrent', worker, workerShares[worker]]);
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (round.category === 'generate'){
|
||||
|
||||
var reward = round.reward * (1 - processingConfig.feePercent);
|
||||
|
||||
//this calculates profit if you wanna see it
|
||||
/*
|
||||
var workerTotalRewards = Object.keys(workerRewards).reduce(function(p, c){
|
||||
return p + workerRewards[c];
|
||||
}, 0);
|
||||
var totalShares = Object.keys(workerShares).reduce(function(p, c){
|
||||
return p + parseInt(workerShares[c])
|
||||
}, 0);
|
||||
|
||||
var poolTotalRewards = rounds.reduce(function(p, c){
|
||||
return p + c.amount * c.magnitude;
|
||||
}, 0);
|
||||
for (var worker in workerShares){
|
||||
var percent = parseInt(workerShares[worker]) / totalShares;
|
||||
var workerRewardTotal = Math.floor(reward * percent);
|
||||
if (!(worker in workerRewards)) workerRewards[worker] = 0;
|
||||
workerRewards[worker] += workerRewardTotal;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(workerRewards);
|
||||
console.log('pool profit percent' + ((poolTotalRewards - workerTotalRewards) / poolTotalRewards));
|
||||
*/
|
||||
|
||||
callback(null, rounds, workerRewards, orphanMergeCommands);
|
||||
callback(null, rounds, magnitude, workerRewards, orphanMergeCommands);
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
/* Does a batch call to redis to get worker existing balances from coin_balances*/
|
||||
function(rounds, workerRewards, orphanMergeCommands, callback){
|
||||
function(rounds, magnitude, workerRewards, orphanMergeCommands, callback){
|
||||
|
||||
var workers = Object.keys(workerRewards);
|
||||
|
||||
redisClient.hmget([coin + '_balances'].concat(workers), function(error, results){
|
||||
if (error){
|
||||
callback('done - redis error with multi get balances');
|
||||
if (error && workers.length !== 0){
|
||||
callback('done - redis error with multi get balances ' + JSON.stringify(error));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -257,11 +246,11 @@ function SetupForPool(logger, poolOptions){
|
|||
var workerBalances = {};
|
||||
|
||||
for (var i = 0; i < workers.length; i++){
|
||||
workerBalances[workers[i]] = parseInt(results[i]) || 0;
|
||||
workerBalances[workers[i]] = (parseInt(results[i]) || 0) * magnitude;
|
||||
}
|
||||
|
||||
|
||||
callback(null, rounds, workerRewards, workerBalances, orphanMergeCommands);
|
||||
callback(null, rounds, magnitude, workerRewards, orphanMergeCommands, workerBalances);
|
||||
});
|
||||
|
||||
},
|
||||
|
@ -273,9 +262,10 @@ function SetupForPool(logger, poolOptions){
|
|||
when deciding the sent balance, it the difference should be -1*amount they had in db,
|
||||
if not sending the balance, the differnce should be +(the amount they earned this round)
|
||||
*/
|
||||
function(rounds, workerRewards, workerBalances, orphanMergeCommands, callback){
|
||||
function(rounds, magnitude, workerRewards, orphanMergeCommands, workerBalances, callback){
|
||||
|
||||
//number of satoshis in a single coin unit - this can be different for coins so we calculate it :)
|
||||
|
||||
var magnitude = rounds[0].magnitude;
|
||||
|
||||
daemon.cmd('getbalance', [], function(results){
|
||||
|
||||
|
@ -288,22 +278,29 @@ function SetupForPool(logger, poolOptions){
|
|||
var workerPayoutsCommand = [];
|
||||
|
||||
for (var worker in workerRewards){
|
||||
workerPayments[worker] = (workerPayments[worker] || 0) + workerRewards[worker];
|
||||
workerPayments[worker] = ((workerPayments[worker] || 0) + workerRewards[worker]);
|
||||
}
|
||||
for (var worker in workerBalances){
|
||||
workerPayments[worker] = (workerPayments[worker] || 0) + workerBalances[worker];
|
||||
workerPayments[worker] = ((workerPayments[worker] || 0) + workerBalances[worker]);
|
||||
}
|
||||
for (var worker in workerPayments){
|
||||
if (workerPayments[worker] < processingConfig.minimumPayment * magnitude){
|
||||
balanceUpdateCommands.push(['hincrby', coin + '_balances', worker, workerRewards[worker]]);
|
||||
delete workerPayments[worker];
|
||||
}
|
||||
else{
|
||||
if (workerBalances[worker] !== 0)
|
||||
balanceUpdateCommands.push(['hincrby', coin + '_balances', worker, -1 * workerBalances[worker]]);
|
||||
workerPayoutsCommand.push(['hincrby', coin + '_balances', worker, workerRewards[worker]]);
|
||||
toBePaid += workerPayments[worker];
|
||||
|
||||
if (Object.keys(workerPayments).length > 0){
|
||||
var coinPrecision = magnitude.toString().length - 1;
|
||||
for (var worker in workerPayments){
|
||||
if (workerPayments[worker] < processingConfig.minimumPayment * magnitude){
|
||||
balanceUpdateCommands.push(['hincrby', coin + '_balances', worker, workerRewards[worker]]);
|
||||
delete workerPayments[worker];
|
||||
}
|
||||
else{
|
||||
if (workerBalances[worker] !== 0){
|
||||
balanceUpdateCommands.push(['hincrby', coin + '_balances', worker, -1 * workerBalances[worker]]);
|
||||
}
|
||||
var rewardInPrecision = (workerRewards[worker] / magnitude).toFixed(coinPrecision);
|
||||
workerPayoutsCommand.push(['hincrbyfloat', coin + '_payouts', worker, rewardInPrecision]);
|
||||
toBePaid += workerPayments[worker];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var balanceLeftOver = totalBalance - toBePaid;
|
||||
|
@ -318,61 +315,83 @@ function SetupForPool(logger, poolOptions){
|
|||
|
||||
|
||||
var movePendingCommands = [];
|
||||
var deleteRoundsCommand = ['del'];
|
||||
var roundsToDelete = [];
|
||||
rounds.forEach(function(r){
|
||||
var destinationSet = r.category === 'orphan' ? '_blocksOrphaned' : '_blocksConfirmed';
|
||||
movePendingCommands.push(['smove', coin + '_blocksPending', coin + destinationSet, r.serialized]);
|
||||
deleteRoundsCommand.push(coin + '_shares:round' + r.height)
|
||||
roundsToDelete.push(coin + '_shares:round' + r.height)
|
||||
});
|
||||
|
||||
var finalRedisCommands = [];
|
||||
|
||||
finalRedisCommands = finalRedisCommands.concat(
|
||||
movePendingCommands,
|
||||
orphanMergeCommands,
|
||||
balanceUpdateCommands,
|
||||
workerPayoutsCommand
|
||||
);
|
||||
if (movePendingCommands.length > 0)
|
||||
finalRedisCommands = finalRedisCommands.concat(movePendingCommands);
|
||||
|
||||
finalRedisCommands.push(deleteRoundsCommand);
|
||||
finalRedisCommands.push(['hincrby', coin + '_stats', 'totalPaid', toBePaid / magnitude]);
|
||||
if (orphanMergeCommands.length > 0)
|
||||
finalRedisCommands = finalRedisCommands.concat(orphanMergeCommands);
|
||||
|
||||
|
||||
if (balanceUpdateCommands.length > 0)
|
||||
finalRedisCommands = finalRedisCommands.concat(balanceUpdateCommands);
|
||||
|
||||
|
||||
if (workerPayoutsCommand.length > 0)
|
||||
finalRedisCommands = finalRedisCommands.concat(workerPayoutsCommand);
|
||||
|
||||
|
||||
if (roundsToDelete.length > 0)
|
||||
finalRedisCommands.push(['del'].concat(roundsToDelete));
|
||||
|
||||
|
||||
if (toBePaid !== 0)
|
||||
finalRedisCommands.push(['hincrbyfloat', coin + '_stats', 'totalPaid', (toBePaid / magnitude).toFixed(coinPrecision)]);
|
||||
|
||||
|
||||
callback(null, magnitude, workerPayments, finalRedisCommands);
|
||||
|
||||
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
function(magnitude, workerPayments, finalRedisCommands, callback){
|
||||
|
||||
var sendManyCmd = ['', {}];
|
||||
for (var address in workerPayments){
|
||||
sendManyCmd[1][address] = workerPayments[address] / magnitude;
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(finalRedisCommands, null, 4));
|
||||
console.log(JSON.stringify(workerPayments, null, 4));
|
||||
console.log(JSON.stringify(sendManyCmd, null, 4));
|
||||
|
||||
daemon.cmd('sendmany', sendManyCmd, function(results){
|
||||
if (results[0].error){
|
||||
callback('done - error with sendmany ' + JSON.stringify(results[0].error));
|
||||
return;
|
||||
}
|
||||
|
||||
//This does the final all-or-nothing atom transaction if block deamon sent payments
|
||||
//This does the final all-or-nothing atom transaction if block deamon sent payments
|
||||
var finalizeRedisTx = function(){
|
||||
redisClient.multi(finalRedisCommands).exec(function(error, results){
|
||||
if (error){
|
||||
callback('done - error with final redis commands for cleaning up ' + JSON.stringify(error));
|
||||
return;
|
||||
}
|
||||
callback(null, 'Payments processing performed an interval');
|
||||
});
|
||||
};
|
||||
|
||||
if (Object.keys(workerPayments).length === 0){
|
||||
finalizeRedisTx();
|
||||
}
|
||||
else{
|
||||
|
||||
|
||||
var coinPrecision = magnitude.toString().length - 1;
|
||||
var addressAmounts = {};
|
||||
for (var address in workerPayments){
|
||||
addressAmounts[address] = parseFloat((workerPayments[address] / magnitude).toFixed(coinPrecision));
|
||||
}
|
||||
|
||||
paymentLogger.debug('system', 'Payments about to be sent to: ' + JSON.stringify(addressAmounts));
|
||||
daemon.cmd('sendmany', ['', addressAmounts], function(results){
|
||||
if (results[0].error){
|
||||
callback('done - error with sendmany ' + JSON.stringify(results[0].error));
|
||||
return;
|
||||
}
|
||||
finalizeRedisTx();
|
||||
var totalWorkers = Object.keys(workerPayments).length;
|
||||
var totalAmount = Object.keys(workerPayments).reduce(function(p, c){return p + workerPayments[c]}, 0);
|
||||
callback(null, 'Payments sent, a total of ' + totalAmount + ' was sent to ' + totalWorkers);
|
||||
paymentLogger.debug('system', 'Payments sent, a total of ' + totalAmount +
|
||||
' was sent to ' + totalWorkers + ' miners');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -131,7 +131,6 @@ module.exports = function(logger, portalConfig, poolConfigs){
|
|||
|
||||
_this.stats = portalStats;
|
||||
_this.statsString = JSON.stringify(portalStats);
|
||||
console.log(JSON.stringify(portalStats, null, 4));
|
||||
callback();
|
||||
});
|
||||
|
||||
|
|
Loading…
Reference in New Issue