Matias Alejo Garcia
11 years ago
3 changed files with 443 additions and 0 deletions
@ -0,0 +1,213 @@ |
|||
require('classtool'); |
|||
|
|||
function ClassSpec(b) { |
|||
var assert = require('assert'); |
|||
var fs = require('fs'); |
|||
var Block = require('bitcore/Block').class(); |
|||
var Deserialize = require('bitcore/Deserialize'); |
|||
var Parser = require('bitcore/util/BinaryParser').class(); |
|||
var coinUtil = require('bitcore/util/util'); |
|||
|
|||
function HeaderDB(b) { |
|||
this.network = b.network; |
|||
this.fd = null; |
|||
this.blocks = {}; |
|||
this.byHeight = []; |
|||
this.bestBlock = null; |
|||
this.cached_size = 0; |
|||
} |
|||
|
|||
HeaderDB.prototype.size = function() { |
|||
this.cached_size = Object.keys(this.blocks).length; |
|||
return this.cached_size; |
|||
}; |
|||
|
|||
HeaderDB.prototype.locator = function(block) { |
|||
if (!block) |
|||
block = this.bestBlock; |
|||
|
|||
var step = 1; |
|||
var start = 0; |
|||
var loc = []; |
|||
// see https://en.bitcoin.it/wiki/Protocol_specification#getblocks
|
|||
for (var i = block.height; i > 0; i -= step, ++start) { |
|||
if (start >= 10) |
|||
step *= 2; |
|||
loc.push(this.byHeight[i]); |
|||
} |
|||
assert.equal(this.byHeight[0].toString(), |
|||
this.network.genesisBlock.hash.toString()); |
|||
loc.push(this.byHeight[0]); |
|||
console.log("Requesting more headers. Current height: " + block.height ); |
|||
return loc; |
|||
}; |
|||
|
|||
HeaderDB.prototype.add = function(block) { |
|||
var hash = block.calcHash(); |
|||
block.hash = hash; |
|||
var curWork = Deserialize.intFromCompact(block.bits); |
|||
|
|||
if (hash in this.blocks) { |
|||
var old = this.blocks[hash]; |
|||
throw new Error("duplicate block (was at height " + old.height + ")"); |
|||
} |
|||
|
|||
var bestChain = false; |
|||
|
|||
var reorg = { |
|||
oldBest: null, |
|||
conn: 0, |
|||
disconn: 0, |
|||
}; |
|||
|
|||
if (this.cached_size == 0) { |
|||
if (this.network.genesisBlock.hash.toString() != |
|||
hash.toString()) |
|||
throw new Error("Invalid genesis block"); |
|||
|
|||
block.height = 0; |
|||
block.work = curWork; |
|||
bestChain = true; |
|||
this.cached_size++; |
|||
} else { |
|||
var prevBlock = this.blocks[block.prev_hash]; |
|||
if (!prevBlock) |
|||
throw new Error("orphan block; prev not found"); |
|||
|
|||
block.height = prevBlock.height + 1; |
|||
block.work = prevBlock.work + curWork; |
|||
this.cached_size++; |
|||
|
|||
if (block.work > this.bestBlock.work) |
|||
bestChain = true; |
|||
} |
|||
|
|||
|
|||
// add to by-hash index
|
|||
this.blocks[hash] = block; |
|||
|
|||
if (bestChain) { |
|||
var oldBest = this.bestBlock; |
|||
var newBest = block; |
|||
|
|||
reorg.oldBest = oldBest; |
|||
|
|||
// likely case: new best chain has greater height
|
|||
if (!oldBest) { |
|||
while (newBest) { |
|||
newBest = this.blocks[newBest.prev_hash]; |
|||
reorg.conn++; |
|||
} |
|||
} else { |
|||
while (newBest && |
|||
(newBest.height > oldBest.height)) { |
|||
newBest = this.blocks[newBest.prev_hash]; |
|||
reorg.conn++; |
|||
} |
|||
} |
|||
|
|||
// unlikely: old best chain has greater height
|
|||
while (oldBest && newBest && |
|||
(oldBest.height > newBest.height)) { |
|||
oldBest = this.blocks[oldBest.prev_hash]; |
|||
reorg.disconn++; |
|||
} |
|||
|
|||
// height matches, but still walking parallel
|
|||
while (oldBest && newBest && (oldBest != newBest)) { |
|||
newBest = this.blocks[newBest.prev_hash]; |
|||
reorg.conn++; |
|||
|
|||
oldBest = this.blocks[oldBest.prev_hash]; |
|||
reorg.disconn++; |
|||
} |
|||
|
|||
var shuf = (reorg.conn > reorg.disconn) ? |
|||
reorg.conn : reorg.disconn; |
|||
|
|||
// reorg analyzed, updated best-chain pointer
|
|||
this.bestBlock = block; |
|||
|
|||
// update by-height index
|
|||
var ptr = block; |
|||
var updated = []; |
|||
for (var idx = block.height; |
|||
idx > (block.height - shuf); idx--) { |
|||
if (idx < 0) |
|||
break; |
|||
var update = [ idx, ptr ]; |
|||
updated.push(update); |
|||
ptr = this.blocks[ptr.prev_hash]; |
|||
} |
|||
|
|||
updated.reverse(); |
|||
|
|||
for (var i = 0; i < updated.length; i++) { |
|||
var update = updated[i]; |
|||
var idx = update[0]; |
|||
var ptr = update[1]; |
|||
|
|||
if (idx < this.byHeight.length) |
|||
this.byHeight[idx] = ptr.hash; |
|||
else |
|||
this.byHeight.push(ptr.hash); |
|||
} |
|||
} |
|||
return reorg; |
|||
}; |
|||
|
|||
HeaderDB.prototype.addBuf = function(buf) { |
|||
var block = new Block(); |
|||
var parser = new Parser(buf); |
|||
block.parse(parser, true); |
|||
this.add(block); |
|||
|
|||
}; |
|||
|
|||
|
|||
HeaderDB.prototype.readFile = function(filename) { |
|||
var fd = fs.openSync(filename, 'r'); |
|||
var stats = fs.fstatSync(fd); |
|||
if (stats.size % 80 != 0) |
|||
throw new Error("Corrupted header db"); |
|||
|
|||
while (1) { |
|||
var buf = new Buffer(80); |
|||
var bread = fs.readSync(fd, buf, 0, 80, null); |
|||
if (bread < 80) |
|||
break; |
|||
|
|||
this.addBuf(buf); |
|||
|
|||
if ( ! ( this.cached_size % 1000 )) { |
|||
console.log("\tblock..." + this.cached_size ) ; |
|||
} |
|||
} |
|||
|
|||
fs.closeSync(fd); |
|||
}; |
|||
|
|||
HeaderDB.prototype.writeFile = function(filename) { |
|||
var block = this.bestBlock; |
|||
var data = []; |
|||
while (block) { |
|||
var s = block.getHeader(); |
|||
data.push(s); |
|||
block = this.blocks[block.prev_hash]; |
|||
} |
|||
|
|||
data.reverse(); |
|||
|
|||
var fd = fs.openSync(filename, 'w'); |
|||
|
|||
data.forEach(function(datum) { |
|||
fs.writeSync(fd, datum, 0, 80, null); |
|||
}); |
|||
|
|||
fs.closeSync(fd); |
|||
}; |
|||
|
|||
return HeaderDB; |
|||
}; |
|||
module.defineClass(ClassSpec); |
|||
|
@ -0,0 +1,220 @@ |
|||
var fs = require('fs'); |
|||
var HeaderDB = require('./HeaderDB').class(); |
|||
var Block = require('bitcore/Block').class(); |
|||
var CoinConst = require('bitcore/const'); |
|||
var coinUtil = require('bitcore/util/util'); |
|||
var networks = require('bitcore/networks'); |
|||
var Parser = require('bitcore/util/BinaryParser').class(); |
|||
|
|||
var peerdb_fn = 'peerdb.json'; |
|||
|
|||
var peerdb = undefined; |
|||
var hdrdb = undefined; |
|||
var network = networks.testnet; |
|||
var config = { |
|||
network : network.name |
|||
}; |
|||
var PeerManager = require('bitcore/PeerManager').createClass({ |
|||
config : config |
|||
}); |
|||
var Peer = require('bitcore/Peer').class(); |
|||
|
|||
var syncState = 'init'; |
|||
|
|||
function peerdb_load() { |
|||
try { |
|||
peerdb = JSON.parse(fs.readFileSync(peerdb_fn)); |
|||
} catch (d) { |
|||
console.warn('Unable to read peer db', peerdb_fn, 'creating new one.'); |
|||
peerdb = [ { |
|||
ipv4 : '127.0.0.1', |
|||
port : 18333 |
|||
}, ]; |
|||
|
|||
fs.writeFileSync(peerdb_fn, JSON.stringify(peerdb)); |
|||
} |
|||
} |
|||
|
|||
function hdrdb_load() |
|||
{ |
|||
hdrdb = new HeaderDB({network: network}); |
|||
} |
|||
|
|||
function get_more_headers(info) { |
|||
var conn = info.conn; |
|||
var loc = hdrdb.locator(); |
|||
conn.sendGetHeaders(loc, coinUtil.NULL_HASH); |
|||
} |
|||
|
|||
function add_header(info, block) { |
|||
var hashStr = coinUtil.formatHashFull(block.calcHash()); |
|||
|
|||
try { |
|||
hdrdb.add(block); |
|||
} catch (e) { |
|||
return; |
|||
} |
|||
} |
|||
|
|||
function handle_headers(info) { |
|||
console.log("handle headers"); |
|||
var conn = info.conn; |
|||
var headers = info.message.headers; |
|||
|
|||
headers.forEach(function(hdr) { |
|||
add_header(info, hdr); |
|||
}); |
|||
|
|||
// We persist the header DB after each batch
|
|||
//hdrdb.writeFile(hdrdb_fn);
|
|||
|
|||
// Only one request per batch of headers we receive.
|
|||
get_more_headers(info); |
|||
} |
|||
|
|||
function handle_block(info) { |
|||
console.log("handle block") |
|||
var block = info.message.block; |
|||
add_header(info, block); |
|||
|
|||
if (syncState === 'init') { |
|||
syncState = 'headers'; |
|||
get_more_headers(info); |
|||
} |
|||
} |
|||
|
|||
function handle_verack(info) { |
|||
var inv = { |
|||
type : CoinConst.MSG.BLOCK, |
|||
hash : network.genesisBlock.hash, |
|||
}; |
|||
var invs = [ inv ]; |
|||
console.log('p2psync: Asking for the genesis block'); |
|||
|
|||
// Asks for the genesis block
|
|||
info.conn.sendGetData(invs); |
|||
} |
|||
|
|||
function handle_connected(data) { |
|||
var peerman = data.pm; |
|||
var peers_n = peerman.peers.length; |
|||
console.log('p2psync: Connected to ' + peers_n + ' peer' |
|||
+ (peers_n != 1 ? 's' : '')); |
|||
} |
|||
|
|||
function p2psync() { |
|||
var peerman = new PeerManager(); |
|||
|
|||
peerdb.forEach(function(datum) { |
|||
var peer = new Peer(datum.ipv4, datum.port); |
|||
peerman.addPeer(peer); |
|||
}); |
|||
|
|||
peerman.on('connection', function(conn) { |
|||
conn.on('verack', handle_verack); |
|||
conn.on('block', handle_block); |
|||
conn.on('headers', handle_headers); |
|||
}); |
|||
peerman.on('connect', handle_connected); |
|||
|
|||
peerman.start(); |
|||
} |
|||
|
|||
function filesync_block_buf(blkdir, fn, buf) { |
|||
var parser = new Parser(buf); |
|||
var block = new Block(); |
|||
block.parse(parser, true); |
|||
|
|||
var hashStr = coinUtil.formatHashFull(block.calcHash()); |
|||
|
|||
try { |
|||
hdrdb.add(block); |
|||
} catch (e) { |
|||
var height = hdrdb.size(); |
|||
console.log('HeaderDB failed adding block #' + height + ', ' + hashStr); |
|||
console.log(' Reason: ' + e); |
|||
return; |
|||
} |
|||
|
|||
var height = hdrdb.size() - 1; |
|||
if ((height % 1000) == 0) |
|||
console.log('HeaderDB added block #' + height + ', ' + hashStr); |
|||
} |
|||
|
|||
function filesync_open_cb(err, fd, blkdir, fn) { |
|||
if (err) |
|||
throw err; |
|||
|
|||
var hdrbuf = new Buffer(4 * 2); |
|||
while (1) { |
|||
// read 2x 32-bit header
|
|||
var bread = fs.readSync(fd, hdrbuf, 0, 4 * 2, null); |
|||
if (bread < (4 * 2)) { |
|||
console.log('Short read/EOF, ending scan of ' + fn); |
|||
break; |
|||
} |
|||
|
|||
// check magic matches
|
|||
var magic = hdrbuf.slice(0, 4); |
|||
if (magic.toString() != network.magic.toString()) { |
|||
console.log('Invalid network magic, ending scan of ' + fn); |
|||
break; |
|||
} |
|||
|
|||
// block size
|
|||
var blkSize = hdrbuf.readUInt32LE(4); |
|||
if (blkSize > (1 * 1024 * 1024)) |
|||
throw new Error('Invalid block size ' + blkSize); |
|||
|
|||
// read raw block data
|
|||
var blkBuf = new Buffer(blkSize); |
|||
bread = fs.readSync(fd, blkBuf, 0, blkSize, null); |
|||
if (bread != blkSize) |
|||
throw new Error('Failed to read block'); |
|||
|
|||
// process block
|
|||
filesync_block_buf(blkdir, fn, blkBuf); |
|||
} |
|||
|
|||
fs.closeSync(fd); |
|||
|
|||
hdrdb.writeFile(hdrdb_fn); |
|||
console.log('Wrote header db'); |
|||
} |
|||
|
|||
function filesync_block_file(blkdir, fn) { |
|||
console.log('Scanning ' + fn + ' for block data.'); |
|||
|
|||
var pathname = blkdir + '/' + fn; |
|||
fs.open(pathname, 'r', function(err, fd) { |
|||
filesync_open_cb(err, fd, blkdir, fn); |
|||
}); |
|||
} |
|||
|
|||
function cmd_filesync_rd(err, files, blkdir) { |
|||
if (err) |
|||
throw err; |
|||
|
|||
files = files.sort(); |
|||
|
|||
var scanned = 0; |
|||
files.forEach(function(fn) { |
|||
var re = /^blk\d+\.dat$/; |
|||
if (fn.match(re)) { |
|||
filesync_block_file(blkdir, fn); |
|||
scanned++; |
|||
} |
|||
}); |
|||
|
|||
console.log('Scanned ' + scanned + ' of ' + files.length + ' files in ' |
|||
+ blkdir); |
|||
} |
|||
|
|||
function main() { |
|||
peerdb_load(); |
|||
hdrdb_load(); |
|||
|
|||
p2psync(); |
|||
} |
|||
|
|||
main(); |
Loading…
Reference in new issue