Browse Source

make block reading more efficient

...by using streams. This way we don't load all the blocks before parsing them.
We parse them as we go.
patch-2
Ryan X. Charles 10 years ago
parent
commit
f191e93711
  1. 27
      examples/blockreader.js

27
examples/blockreader.js

@ -6,15 +6,22 @@ var BufferWriter = require('../lib/bufferwriter');
//To use, pipe in a blk*****.dat file. e.g.: //To use, pipe in a blk*****.dat file. e.g.:
//cat blk00000.dat | node blockreader.js //cat blk00000.dat | node blockreader.js
var bw = new BufferWriter(); var head = null;
var body = null;
process.stdin.on('data', function(buf) { process.stdin.on('readable', function() {
bw.write(buf); if (!head) {
}); head = process.stdin.read(8);
if (!head)
process.stdin.on('end', function(buf) { return;
var blocksbuf = bw.concat(); }
var br = new BufferReader(blocksbuf); var body = process.stdin.read(head.slice(4).readUInt32LE(0));
while (!br.eof()) if (!body)
console.log(JSON.stringify(Block().fromBufferReader(br).toJSON(), null, 2)); return;
var blockbuf = BufferWriter().write(head).write(body).concat();
var block = Block().fromBuffer(blockbuf);
console.log(block.toJSON());
head = null;
body = null;
process.stdin.unshift(process.stdin.read());
}); });

Loading…
Cancel
Save