make block reading more efficient

...by using streams. This way we don't load all the blocks before parsing them.
We parse them as we go.
This commit is contained in:
Ryan X. Charles 2014-09-19 16:40:29 -07:00
parent cfd509f859
commit f191e93711
1 changed files with 17 additions and 10 deletions

View File

@ -6,15 +6,22 @@ var BufferWriter = require('../lib/bufferwriter');
//To use, pipe in a blk*****.dat file. e.g.:
//cat blk00000.dat | node blockreader.js
var bw = new BufferWriter();
var head = null;
var body = null;
process.stdin.on('data', function(buf) {
bw.write(buf);
});
process.stdin.on('end', function(buf) {
var blocksbuf = bw.concat();
var br = new BufferReader(blocksbuf);
while (!br.eof())
console.log(JSON.stringify(Block().fromBufferReader(br).toJSON(), null, 2));
process.stdin.on('readable', function() {
if (!head) {
head = process.stdin.read(8);
if (!head)
return;
}
var body = process.stdin.read(head.slice(4).readUInt32LE(0));
if (!body)
return;
var blockbuf = BufferWriter().write(head).write(body).concat();
var block = Block().fromBuffer(blockbuf);
console.log(block.toJSON());
head = null;
body = null;
process.stdin.unshift(process.stdin.read());
});