From f191e93711393d83e2e65b5ee2d5e60cc2586e59 Mon Sep 17 00:00:00 2001 From: "Ryan X. Charles" Date: Fri, 19 Sep 2014 16:40:29 -0700 Subject: [PATCH] make block reading more efficient ...by using streams. This way we don't load all the blocks before parsing them. We parse them as we go. --- examples/blockreader.js | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/examples/blockreader.js b/examples/blockreader.js index 33d4c5eea..4c3db6c24 100644 --- a/examples/blockreader.js +++ b/examples/blockreader.js @@ -6,15 +6,22 @@ var BufferWriter = require('../lib/bufferwriter'); //To use, pipe in a blk*****.dat file. e.g.: //cat blk00000.dat | node blockreader.js -var bw = new BufferWriter(); +var head = null; +var body = null; -process.stdin.on('data', function(buf) { - bw.write(buf); -}); - -process.stdin.on('end', function(buf) { - var blocksbuf = bw.concat(); - var br = new BufferReader(blocksbuf); - while (!br.eof()) - console.log(JSON.stringify(Block().fromBufferReader(br).toJSON(), null, 2)); +process.stdin.on('readable', function() { + if (!head) { + head = process.stdin.read(8); + if (!head) + return; + } + var body = process.stdin.read(head.slice(4).readUInt32LE(0)); + if (!body) + return; + var blockbuf = BufferWriter().write(head).write(body).concat(); + var block = Block().fromBuffer(blockbuf); + console.log(block.toJSON()); + head = null; + body = null; + process.stdin.unshift(process.stdin.read()); });