add pager to blocks within certain date

This commit is contained in:
Matias Alejo Garcia 2014-05-30 11:50:43 -03:00
parent d538e154d6
commit 4ac7165276
7 changed files with 59 additions and 27 deletions

View File

@ -4,7 +4,6 @@ module.exports = function(grunt) {
//Load NPM tasks
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-mocha-test');
grunt.loadNpmTasks('grunt-nodemon');
grunt.loadNpmTasks('grunt-concurrent');
@ -21,7 +20,6 @@ module.exports = function(grunt) {
},
js: {
files: ['Gruntfile.js', 'insight.js', 'app/**/*.js'],
tasks: ['jshint'],
options: {
livereload: true,
},
@ -90,7 +88,7 @@ module.exports = function(grunt) {
grunt.option('force', true);
//Default task(s).
grunt.registerTask('default', ['jshint', 'concurrent']);
grunt.registerTask('default', ['concurrent']);
//Test task.
grunt.registerTask('test', ['env:test', 'mochaTest']);

View File

@ -61,7 +61,7 @@ var getBlock = function(blockhash, cb) {
// TODO
if (!block.info) {
console.log('[blocks.js.60]: could not get %s from RPC. Orphan? Error?', blockhash); //TODO
console.log('Could not get %s from RPC. Orphan? Error?', blockhash); //TODO
// Probably orphan
block.info = {
hash: blockhash,
@ -80,6 +80,11 @@ var getBlock = function(blockhash, cb) {
/**
* List of blocks by date
*/
var DFLT_LIMIT=200;
// in testnet, this number is much bigger, we dont support
// exploring blocks by date.
exports.list = function(req, res) {
var isToday = false;
@ -95,6 +100,7 @@ exports.list = function(req, res) {
var dateStr;
var todayStr = formatTimestamp(new Date());
console.log('[blocks.js.102]'); //TODO
if (req.query.blockDate) {
// TODO: Validate format yyyy-mm-dd
dateStr = req.query.blockDate;
@ -103,27 +109,32 @@ exports.list = function(req, res) {
dateStr = todayStr;
isToday = true;
}
var gte = Math.round((new Date(dateStr)).getTime() / 1000);
console.log('[blocks.js.112:gte:]',gte); //TODO
//pagination
var lte = gte + 86400;
var lte = parseInt(req.query.startTimestamp) || gte + 86400;
console.log('[blocks.js.115:lte:]',lte); //TODO
var prev = formatTimestamp(new Date((gte - 86400) * 1000));
var next = formatTimestamp(new Date(lte * 1000));
var next = lte ? formatTimestamp(new Date(lte * 1000)) :null;
console.log('[blocks.js.116:next:]',next, lte); //TODO
var limit = parseInt(req.query.limit || DFLT_LIMIT) + 1;
var more;
console.log('[blocks.js.119]'); //TODO
bdb.getBlocksByDate(gte, lte, limit, function(err, blockList) {
bdb.getBlocksByDate(gte, lte, function(err, blocks) {
if (err) {
res.status(500).send(err);
} else {
var blockList = [];
var l = blocks.length;
var limit = parseInt(req.query.limit || l);
if (l < limit) limit = l;
var l = blockList.length;
for (var i = 0; i < limit; i++) {
blockList.push(blocks[i]);
if (l===limit) {
more = true;
blockList.pop;
}
var moreTs=lte;
async.mapSeries(blockList,
function(b, cb) {
getBlock(b.hash, function(err, info) {
@ -131,6 +142,7 @@ exports.list = function(req, res) {
console.log(err);
return cb(err);
}
if (b.ts < moreTs) moreTs = b.ts;
return cb(err, {
height: info.height,
size: info.size,
@ -141,6 +153,7 @@ exports.list = function(req, res) {
});
});
}, function(err, allblocks) {
// sort blocks by height
allblocks.sort(
function compare(a,b) {
@ -157,7 +170,9 @@ exports.list = function(req, res) {
prev: prev,
currentTs: lte - 1,
current: dateStr,
isToday: isToday
isToday: isToday,
more: more,
moreTs: moreTs,
}
});
});

View File

@ -293,13 +293,17 @@ BlockDb.prototype.fromHashWithInfo = function(hash, cb) {
});
};
BlockDb.prototype.getBlocksByDate = function(start_ts, end_ts, cb) {
BlockDb.prototype.getBlocksByDate = function(start_ts, end_ts, limit, cb) {
var list = [];
db.createReadStream({
start: TIMESTAMP_PREFIX + start_ts,
end: TIMESTAMP_PREFIX + end_ts,
fillCache: true
})
var opts = {
start: TIMESTAMP_PREFIX + end_ts, //Inverted since list is reversed
end: TIMESTAMP_PREFIX + start_ts,
limit: limit,
reverse: 1,
};
console.log('[BlockDb.js.298:opts:]',opts); //TODO
db.createReadStream(opts)
.on('data', function (data) {
var k = data.key.split('-');
list.push({

View File

@ -15,7 +15,7 @@ var bitcoreUtil = bitcore.util;
var logger = require('./logger').logger;
var info = logger.info;
var error = logger.error;
var PERCENTAGE_TO_START_FROM_RPC = 1.1;
var PERCENTAGE_TO_START_FROM_RPC = 0.96;
// TODO TODO TODO
//var PERCENTAGE_TO_START_FROM_RPC = 0.98;
@ -206,7 +206,7 @@ HistoricSync.prototype.updateStartBlock = function(next) {
self.sync.bDb.fromHashWithInfo(tip, function(err, bi) {
blockInfo = bi ? bi.info : {};
if (oldtip)
self.sync.setBlockHeight(oldtip, -1, cb);
self.sync.bDb.setBlockNotMain(oldtip, cb);
else
return cb();
});

View File

@ -287,10 +287,25 @@ TransactionDb.prototype.fromTxIdN = function(txid, n, cb) {
ret = {
addr: a[0],
valueSat: parseInt(a[1]),
// ts: parseInt(a[2]), // TODO
};
}
return cb(err, ret);
// spent?
var k = SPENT_PREFIX + txid + '-' + n + '-';
db.createReadStream({
start: k,
end: k + '~'
})
.on('data', function(data) {
var k = data.key.split('-');
self._addSpentInfo(ret, k[3], k[4], data.value);
})
.on('error', function(error) {
return cb(error);
})
.on('end', function() {
return cb(null, ret);
});
});
};

View File

@ -91,7 +91,7 @@ describe('Address cache ', function() {
a.totalReceived.should.equal(1376000, 'totalReceived');
a.txApperances.should.equal(8003, 'txApperances');
return done();
},{noTxList:1});
},{txLimit:0});
});
});

View File

@ -23,7 +23,7 @@ describe('BlockDb getBlocksByDate', function(){
it('Get Hash by Date', function(done) {
bDb.getBlocksByDate(START_TS, END_TS, function(err, list) {
bDb.getBlocksByDate(START_TS, END_TS, 1000, function(err, list) {
if (err) done(err);
assert(list, 'returns list');
assert.equal(list.length,2, 'list has 2 items');