p2p block and transaction importing

This commit is contained in:
Manuel Araoz 2014-01-10 16:02:33 -03:00
parent ec0c78d914
commit df397936ea
14 changed files with 369 additions and 440 deletions

View File

@ -34,7 +34,7 @@
"inject",
"expect"
],
"indent": 2, // Specify indentation spacing
"indent": false, // Specify indentation spacing
"devel": true, // Allow development statements e.g. `console.log();`.
"noempty": true // Prohibit use of empty blocks.
}

View File

@ -13,10 +13,10 @@ var mongoose = require('mongoose'),
* Find block by hash ...
*/
exports.block = function(req, res, next, hash) {
Block.fromHash(hash, function(err, block) {
Block.fromHashWithInfo(hash, function(err, block) {
if (err) return next(err);
if (!block) return next(new Error('Failed to load block ' + hash));
req.block = block;
req.block = block.info;
next();
});
};

View File

@ -15,10 +15,10 @@ var Transaction = require('../models/Transaction');
* Find block by hash ...
*/
exports.transaction = function(req, res, next, txid) {
Transaction.fromID(txid, function(err, tx) {
Transaction.fromIdWithInfo(txid, function(err, tx) {
if (err) return next(err);
if (!tx) return next(new Error('Failed to load TX ' + txid));
req.transaction = tx;
req.transaction = tx.info;
next();
});
};

View File

@ -5,7 +5,6 @@
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
async = require('async'),
RpcClient = require('bitcore/RpcClient').class(),
config = require('../../config/config')
;
@ -23,6 +22,7 @@ var BlockSchema = new Schema({
index: true,
unique: true,
},
time: Number,
});

View File

@ -5,7 +5,10 @@
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
async = require('async');
async = require('async'),
RpcClient = require('bitcore/RpcClient').class(),
config = require('../../config/config');
/**
*/
@ -31,14 +34,14 @@ TransactionSchema.statics.load = function(id, cb) {
};
TransactionSchema.statics.fromID = function(txid, cb) {
TransactionSchema.statics.fromId = function(txid, cb) {
this.findOne({
txid: txid,
}).exec(cb);
};
TransactionSchema.statics.fromIDWithInfo = function(txid, cb) {
this.fromHash(hash, function(err, tx) {
TransactionSchema.statics.fromIdWithInfo = function(txid, cb) {
this.fromId(txid, function(err, tx) {
if (err) return cb(err);
tx.getInfo(function(err) { return cb(err,tx); } );
@ -46,31 +49,20 @@ TransactionSchema.statics.fromIDWithInfo = function(txid, cb) {
};
TransactionSchema.statics.createFromArray = function(txs, next) {
var that = this;
if (!txs) return next();
// console.log('exploding ', txs);
async.forEach( txs,
function(tx, callback) {
// console.log('procesing TX %s', tx);
that.create({ txid: tx }, function(err) {
if (err && ! err.toString().match(/E11000/)) {
return callback();
}
if (err) {
return callback(err);
}
return callback();
});
},
function(err) {
if (err) return next(err);
return next();
return next(err);
}
);
};
@ -82,7 +74,7 @@ TransactionSchema.methods.getInfo = function (next) {
var that = this;
var rpc = new RpcClient(config.bitcoind);
rpc.getRawTransaction(this.txid, function(err, txInfo) {
rpc.getRawTransaction(this.txid, 1, function(err, txInfo) {
if (err) return next(err);
that.info = txInfo.result;

View File

@ -10,6 +10,6 @@ module.exports = {
pass: 'real_mystery',
protocol: 'http',
host: process.env.BITCOIND_HOST || '127.0.0.1',
port: process.env.BITCOIND_PORT || '8332',
port: process.env.BITCOIND_PORT || '18332',
}
}

View File

@ -1,8 +1,6 @@
rpcuser=mystery
rpcpassword=real_mystery
server=1
rpcport=8332
testnet=3
txindex=1
# Allow connections outsite localhost?

View File

@ -1,226 +1,213 @@
require('classtool');
'use strict';
require('classtool');
/* We dont sync any contents from TXs, only their IDs are stored */
var isSyncTxEnabled = 0;
function spec(b) {
var mongoose = require('mongoose');
var util = require('util');
function spec() {
var mongoose = require('mongoose');
var util = require('util');
var RpcClient = require('bitcore/RpcClient').class();
var networks = require('bitcore/networks');
var async = require('async');
var RpcClient = require('bitcore/RpcClient').class();
var networks = require('bitcore/networks');
var async = require('async');
var config = require('../config/config');
var Block = require('../app/models/Block');
var Transaction=require('../app/models/Transaction');
var config = require('../config/config');
var Block = require('../app/models/Block');
var Transaction = require('../app/models/Transaction');
function Sync(config) {
this.network = config.networkName == 'testnet' ? networks.testnet : networks.livenet;
}
function Sync(config) {
this.network = config.networkName === 'testnet' ? networks.testnet: networks.livenet;
}
var progress_bar = function(string, current, total) {
console.log( util.format("\t%s %d/%d [%d%%]",
string, current, total, parseInt(100 * current/total))
);
}
var progress_bar = function(string, current, total) {
console.log(util.format('\t%s %d/%d [%d%%]', string, current, total, parseInt(100 * current / total)));
};
Sync.prototype.getNextBlock = function (blockHash,cb) {
var that = this;
Sync.prototype.getNextBlock = function(blockHash, cb) {
var that = this;
if (!blockHash) {
return cb();
}
this.rpc.getBlock(blockHash, function(err, blockInfo) {
if (err) return cb(err);
if (blockInfo.result.height % 1000 === 0) {
var h = blockInfo.result.height,
d = blockInfo.result.confirmations;
progress_bar('height', h, h + d);
}
if ( !blockHash ) {
return cb();
}
this.rpc.getBlock(blockHash, function(err, blockInfo) {
if (err) return cb(err);
if ( ! ( blockInfo.result.height % 1000) ) {
var h = blockInfo.result.height,
d = blockInfo.result.confirmations;
progress_bar('height', h, h + d);
}
Block.create( blockInfo.result, function(err, inBlock) {
// E11000 => already exists
if (err && ! err.toString().match(/E11000/)) {
return cb(err);
}
if (inBlock) {
Transaction.createFromArray( blockInfo.result.tx,function (err) {
return that.getNextBlock(blockInfo.result.nextblockhash, cb);
});
}
else
return that.getNextBlock(blockInfo.result.nextblockhash, cb);
});
});
}
Sync.prototype.syncBlocks = function (reindex, cb) {
var that = this;
var genesisHash = this.network.genesisBlock.hash.reverse().toString('hex');
console.log("Syncing Blocks...");
if (reindex)
return this.getNextBlock(genesisHash, cb);
Block.findOne({}, {}, { sort: { 'confirmations' : 1 } }, function(err, block) {
if (err) return cb(err);
var nextHash =
block && block.hash
? block.hash
: genesisHash
;
console.log('\tStarting at hash: ' + nextHash);
return that.getNextBlock(nextHash, cb);
});
}
// This is not currently used. Transactions are represented by txid only
// in mongodb
Sync.prototype.syncTXs = function (reindex, cb) {
var that = this;
console.log("Syncing TXs...");
if (reindex) {
// TODO?
}
Transaction.find({blockhash: null}, function(err, txs) {
if (err) return cb(err);
var read = 0;
var pull = 0;
var write = 0;
var total = txs.length;
console.log("\tneed to pull %d txs", total);
if (!total) return cb();
async.each(txs,
function(tx, next){
if (! tx.txid) {
console.log("NO TXID skipping...", tx);
return next();
}
if ( ! ( read++ % 1000) )
progress_bar('read', read, total);
that.rpc.getRawTransaction(tx.txid, 1, function(err, txInfo) {
if ( ! ( pull++ % 1000) )
progress_bar('\tpull', pull, total);
if (!err && txInfo) {
Transaction.update({txid: tx.txid}, txInfo.result, function(err) {
if (err) return next(err);
if ( ! ( write++ % 1000) )
progress_bar('\t\twrite', write, total);
return next();
});
that.storeBlock(blockInfo.result, function(err) {
if (!err) {
var txs = blockInfo.result.tx;
that.storeTxs(txs, function(err) {
if (!err) {
return that.getNextBlock(blockInfo.result.nextblockhash, cb);
}
else return next();
});
},
function(err){
if (err) return cb(err);
return cb(err);
}
);
});
}
});
});
};
Sync.prototype.start = function (opts, next) {
Sync.prototype.storeBlock = function(block, cb) {
Block.create(block, function(err, inBlock) {
// E11000 => already exists
if (err && ! err.toString().match(/E11000/)) {
return cb(err);
}
cb();
});
};
Sync.prototype.storeTxs = function(txs, cb) {
Transaction.createFromArray(txs, cb);
};
mongoose.connect(config.db);
var db = mongoose.connection;
this.rpc = new RpcClient(config.bitcoind);
var that = this;
Sync.prototype.syncBlocks = function(reindex, cb) {
var that = this;
var genesisHash = this.network.genesisBlock.hash.reverse().toString('hex');
console.log('Syncing Blocks... '+reindex);
if (reindex) {
return this.getNextBlock(genesisHash, cb);
}
db.on('error', console.error.bind(console, 'connection error:'));
Block.findOne({},
{},
{
sort: {
'time': - 1
}
},
function(err, block) {
if (err) return cb(err);
db.once('open', function (){
var nextHash = block && block.hash ? block.hash: genesisHash;
async.series([
function(cb){
if (opts.destroy) {
console.log("Deleting Blocks...");
return db.collections['blocks'].drop(cb);
}
return cb();
},
function(cb){
if (opts.destroy) {
console.log("Deleting TXs...");
return db.collections['transactions'].drop(cb);
}
return cb();
},
function(cb) {
console.log('\tStarting at hash: ' + nextHash);
return that.getNextBlock(nextHash, cb);
});
};
if (! opts.skip_blocks) {
that.syncBlocks(opts.reindex, function(err) {
if (err) {
return cb(err);
// This is not currently used. Transactions are represented by txid only
// in mongodb
Sync.prototype.syncTXs = function(reindex, cb) {
}
console.log("\tBlocks done.");
var that = this;
return cb();
});
}
else {
return cb();
}
},
function(cb) {
if ( isSyncTxEnabled && ! opts.skip_txs) {
that.syncTXs(opts.reindex, function(err) {
if (err) {
return cb(err);
console.log('Syncing TXs...');
if (reindex) {
// TODO?
}
}
return cb();
});
}
else {
return cb();
}
},
function(cb) {
db.close();
return cb();
},
],
function(err) {
if (err) {
db.close();
return next(err);
Transaction.find({
blockhash: null
},
function(err, txs) {
if (err) return cb(err);
var read = 0;
var pull = 0;
var write = 0;
var total = txs.length;
console.log('\tneed to pull %d txs', total);
if (!total) return cb();
async.each(txs, function(tx, next) {
if (!tx.txid) {
console.log('NO TXID skipping...', tx);
return next();
}
if (read++ % 1000 === 0) progress_bar('read', read, total);
that.rpc.getRawTransaction(tx.txid, 1, function(err, txInfo) {
if (pull++ % 1000 === 0) progress_bar('\tpull', pull, total);
if (!err && txInfo) {
Transaction.update({
txid: tx.txid
},
txInfo.result, function(err) {
if (err) return next(err);
if (write++ % 1000 === 0) progress_bar('\t\twrite', write, total);
return next();
});
}
else return next();
});
},
function(err) {
if (err) return cb(err);
return cb(err);
});
});
};
Sync.prototype.init = function(opts) {
mongoose.connect(config.db);
this.db = mongoose.connection;
this.rpc = new RpcClient(config.bitcoind);
this.db.on('error', console.error.bind(console, 'connection error:'));
};
Sync.prototype.import_history = function(opts, next) {
var that = this;
this.db.once('open', function() {
async.series([
function(cb) {
if (opts.destroy) {
console.log('Deleting Blocks...');
that.db.collections.blocks.drop(cb);
} else {
cb();
}
return next();
});
});
}
return Sync;
};
},
function(cb) {
if (opts.destroy) {
console.log('Deleting TXs...');
that.db.collections.transactions.drop(cb);
} else {
cb();
}
},
function(cb) {
if (!opts.skip_blocks) {
that.syncBlocks(opts.reindex, cb);
} else {
cb();
}
},
function(cb) {
if (isSyncTxEnabled && ! opts.skip_txs) {
that.syncTXs(opts.reindex, cb);
}
else {
return cb();
}
}], function(err) {
return next(err);
});
});
};
Sync.prototype.close = function() {
console.log("closing connection");
this.db.close();
};
return Sync;
}
module.defineClass(spec);

276
p2p.js
View File

@ -1,5 +1,8 @@
'use strict';
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
var fs = require('fs');
var HeaderDB = require('./HeaderDB').class();
var Block = require('bitcore/Block').class();
@ -7,6 +10,8 @@ var CoinConst = require('bitcore/const');
var coinUtil = require('bitcore/util/util');
var networks = require('bitcore/networks');
var Parser = require('bitcore/util/BinaryParser').class();
var async = require('async');
var Sync = require('./lib/Sync').class();
var peerdb_fn = 'peerdb.json';
@ -14,222 +19,155 @@ var peerdb = undefined;
var hdrdb = undefined;
var network = networks.testnet;
var config = {
network : network.name
network: network.name
};
var PeerManager = require('bitcore/PeerManager').createClass({
config : config
config: config
});
var Peer = require('bitcore/Peer').class();
function peerdb_load() {
try {
peerdb = JSON.parse(fs.readFileSync(peerdb_fn));
} catch (d) {
console.warn('Unable to read peer db', peerdb_fn, 'creating new one.');
peerdb = [ {
ipv4 : '127.0.0.1',
port : 18333
}, ];
try {
peerdb = JSON.parse(fs.readFileSync(peerdb_fn));
} catch(d) {
console.warn('Unable to read peer db', peerdb_fn, 'creating new one.');
peerdb = [{
ipv4: '127.0.0.1',
port: 18333
},
];
fs.writeFileSync(peerdb_fn, JSON.stringify(peerdb));
}
fs.writeFileSync(peerdb_fn, JSON.stringify(peerdb));
}
}
function hdrdb_load()
{
hdrdb = new HeaderDB({network: network});
function hdrdb_load() {
hdrdb = new HeaderDB({
network: network
});
}
function get_more_headers(info) {
var conn = info.conn;
var loc = hdrdb.locator();
conn.sendGetHeaders(loc, coinUtil.NULL_HASH);
var conn = info.conn;
var loc = hdrdb.locator();
conn.sendGetHeaders(loc, coinUtil.NULL_HASH);
}
function add_header(info, block) {
var hashStr = coinUtil.formatHashFull(block.calcHash());
var hashStr = coinUtil.formatHashFull(block.calcHash());
try {
hdrdb.add(block);
} catch (e) {
return;
}
try {
hdrdb.add(block);
} catch(e) {
return;
}
}
function handle_headers(info) {
console.log('handle headers');
var headers = info.message.headers;
console.log('handle headers');
var headers = info.message.headers;
headers.forEach(function(hdr) {
add_header(info, hdr);
});
headers.forEach(function(hdr) {
add_header(info, hdr);
});
// We persist the header DB after each batch
//hdrdb.writeFile(hdrdb_fn);
// Only one request per batch of headers we receive.
get_more_headers(info);
// We persist the header DB after each batch
//hdrdb.writeFile(hdrdb_fn);
// Only one request per batch of headers we receive.
get_more_headers(info);
}
function handle_verack(info) {
var inv = {
type : CoinConst.MSG.BLOCK,
hash : network.genesisBlock.hash,
};
var invs = [ inv ];
// Asks for the genesis block
// console.log('p2psync: Asking for the genesis block');
// info.conn.sendGetData(invs);
var inv = {
type: CoinConst.MSG.BLOCK,
hash: network.genesisBlock.hash,
};
var invs = [inv];
// Asks for the genesis block
// console.log('p2psync: Asking for the genesis block');
// info.conn.sendGetData(invs);
}
function handle_inv(info) {
console.log('handle inv');
// TODO: should limit the invs to objects we haven't seen yet
var invs = info.message.invs;
invs.forEach(function(inv) {
console.log('Received inv for a '+CoinConst.MSG.to_str(inv.type));
}
);
console.log('requesting getData');
info.conn.sendGetData(invs);
// TODO: should limit the invs to objects we haven't seen yet
var invs = info.message.invs;
invs.forEach(function(inv) {
console.log('Handle inv for a ' + CoinConst.MSG.to_str(inv.type));
});
info.conn.sendGetData(invs);
}
function handle_tx(info) {
var tx = info.message.tx.getStandardizedObject();
console.log('handle tx: '+JSON.stringify(tx));
var sync = new Sync({
networkName: networks.testnet
});
sync.init();
function handle_tx(info) {
var tx = info.message.tx.getStandardizedObject();
console.log('Handle tx: ' + tx.hash);
sync.storeTxs([tx.hash], function(err) {
if (err) {
console.log('error in handle TX: '+err);
}
});
}
function handle_block(info) {
console.log('handle block');
var block = info.message.block;
add_header(info, block);
var block = info.message.block;
var now = Math.round(new Date().getTime() / 1000);
var blockHash = coinUtil.formatHashFull(block.calcHash());
console.log('Handle block: '+ blockHash);
sync.storeBlock({
'hash': blockHash,
'time': now
},
function(err) {
if (err) {
console.log('error in handle Block: ' + err);
} else {
var hashes = block.txs.map(function(tx) {
return coinUtil.formatHashFull(tx.hash);
});
sync.storeTxs(hashes, function(){});
}
});
}
function handle_connected(data) {
var peerman = data.pm;
var peers_n = peerman.peers.length;
console.log('p2psync: Connected to ' + peers_n + ' peer' + (peers_n !== 1 ? 's' : ''));
var peerman = data.pm;
var peers_n = peerman.peers.length;
console.log('p2psync: Connected to ' + peers_n + ' peer' + (peers_n !== 1 ? 's': ''));
}
function p2psync() {
var peerman = new PeerManager();
var peerman = new PeerManager();
peerdb.forEach(function(datum) {
var peer = new Peer(datum.ipv4, datum.port);
peerman.addPeer(peer);
});
peerdb.forEach(function(datum) {
var peer = new Peer(datum.ipv4, datum.port);
peerman.addPeer(peer);
});
peerman.on('connection', function(conn) {
conn.on('verack', handle_verack);
conn.on('block', handle_block);
conn.on('headers', handle_headers);
conn.on('inv', handle_inv);
conn.on('tx', handle_tx);
});
peerman.on('connect', handle_connected);
peerman.on('connection', function(conn) {
conn.on('verack', handle_verack);
conn.on('block', handle_block);
conn.on('headers', handle_headers);
conn.on('inv', handle_inv);
conn.on('tx', handle_tx);
});
peerman.on('connect', handle_connected);
peerman.start();
}
function filesync_block_buf(blkdir, fn, buf) {
var parser = new Parser(buf);
var block = new Block();
block.parse(parser, true);
var hashStr = coinUtil.formatHashFull(block.calcHash());
try {
hdrdb.add(block);
} catch (e) {
var height = hdrdb.size();
console.log('HeaderDB failed adding block #' + height + ', ' + hashStr);
console.log(' Reason: ' + e);
return;
}
var height = hdrdb.size() - 1;
if ((height % 1000) == 0)
console.log('HeaderDB added block #' + height + ', ' + hashStr);
}
function filesync_open_cb(err, fd, blkdir, fn) {
if (err)
throw err;
var hdrbuf = new Buffer(4 * 2);
while (1) {
// read 2x 32-bit header
var bread = fs.readSync(fd, hdrbuf, 0, 4 * 2, null);
if (bread < (4 * 2)) {
console.log('Short read/EOF, ending scan of ' + fn);
break;
}
// check magic matches
var magic = hdrbuf.slice(0, 4);
if (magic.toString() != network.magic.toString()) {
console.log('Invalid network magic, ending scan of ' + fn);
break;
}
// block size
var blkSize = hdrbuf.readUInt32LE(4);
if (blkSize > (1 * 1024 * 1024))
throw new Error('Invalid block size ' + blkSize);
// read raw block data
var blkBuf = new Buffer(blkSize);
bread = fs.readSync(fd, blkBuf, 0, blkSize, null);
if (bread != blkSize)
throw new Error('Failed to read block');
// process block
filesync_block_buf(blkdir, fn, blkBuf);
}
fs.closeSync(fd);
hdrdb.writeFile(hdrdb_fn);
console.log('Wrote header db');
}
function filesync_block_file(blkdir, fn) {
console.log('Scanning ' + fn + ' for block data.');
var pathname = blkdir + '/' + fn;
fs.open(pathname, 'r', function(err, fd) {
filesync_open_cb(err, fd, blkdir, fn);
});
}
function cmd_filesync_rd(err, files, blkdir) {
if (err)
throw err;
files = files.sort();
var scanned = 0;
files.forEach(function(fn) {
var re = /^blk\d+\.dat$/;
if (fn.match(re)) {
filesync_block_file(blkdir, fn);
scanned++;
}
});
console.log('Scanned ' + scanned + ' of ' + files.length + ' files in '
+ blkdir);
peerman.start();
}
function main() {
peerdb_load();
hdrdb_load();
peerdb_load();
hdrdb_load();
p2psync();
p2psync();
}
main();

View File

@ -11,7 +11,12 @@
{
"name": "Matias Alejo Garcia",
"email": "ematiu@gmail.com"
},
{
"name": "Manuel Araoz",
"email": "manuelaraoz@gmail.com"
}
],
"bugs": {
"url": "https://github.com/bitpay/mystery/issues"

View File

@ -1,4 +1,5 @@
--require should
-R spec
--ui bdd
--recursive

View File

@ -11,31 +11,37 @@ var
config = require('../../config/config'),
Block = require('../../app/models/Block');
mongoose.connect(config.db);
var db = mongoose.connection;
mongoose.connection.on('error', function(err) { console.log(err); });
describe('getInfo', function(){
describe('Block getInfo', function(){
var block_hash = TESTING_BLOCK;
before(function(done) {
mongoose.connect(config.db);
done();
});
after(function(done) {
mongoose.connection.close();
done();
});
db.on('error', console.error.bind(console, 'connection error:'));
it('should poll block\'s info from mongoose', function(done) {
var block2 = Block.fromHashWithInfo(TESTING_BLOCK, function(err, b2) {
if (err) done(err);
db.once('open', function (){
assert.equal(b2.hash, TESTING_BLOCK);
done();
});
});
var block2 = Block.fromHashWithInfo(block_hash, function(err, b2) {
if (err) done(err);
console.log("Block obj:");
console.log(b2);
console.log("Block.info:");
console.log(b2.info);
db.close();
done();
it('should poll block\'s info from bitcoind', function(done) {
var block2 = Block.fromHashWithInfo(TESTING_BLOCK, function(err, b2) {
if (err) done(err);
assert.equal(b2.info.hash, TESTING_BLOCK);
assert.equal(b2.info.chainwork, '00000000000000000000000000000000000000000000000000446af21d50acd3');
done();
});
});
});

View File

@ -1,9 +0,0 @@
var assert = require("assert")
describe('Array', function(){
describe('#indexOf()', function(){
it('should return -1 when the value is not present', function(){
assert.equal(-1, [1,2,3].indexOf(5));
assert.equal(-1, [1,2,3].indexOf(0));
})
})
})

View File

@ -1,34 +1,45 @@
#!/usr/bin/env node
#! /usr/bin / env node
'use strict';
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
require('buffertools').extend();
var SYNC_VERSION = '0.1';
var program = require('commander');
var Sync = require('../lib/Sync').class();
var SYNC_VERSION = '0.1';
var program = require('commander');
var Sync = require('../lib/Sync').class();
var async = require('async');
program
.version(SYNC_VERSION)
.option('-N --network [livenet]', 'Set bitcoin network [testnet]', 'testnet')
.option('-R --reindex', 'Force reindexing', '0')
.option('-D --destroy', 'Remove current DB', '0')
.option('--skip_blocks', 'Sync blocks')
.option('--skip_txs', 'Sync transactions')
.parse(process.argv);
program.version(SYNC_VERSION).option('-N --network [livenet]', 'Set bitcoin network [testnet]', 'testnet').option('-R --reindex', 'Force reindexing', '0').option('-D --destroy', 'Remove current DB', '0').option('--skip_blocks', 'Sync blocks').option('--skip_txs', 'Sync transactions').parse(process.argv);
var sync = new Sync({ networkName: program.network });
var sync = new Sync({
networkName: program.network
});
if (program.remove) {
}
sync.start( program, function(err){
if (err) {
console.log("CRITICAL ERROR: ", err);
}
else {
console.log('Done!');
}
});
async.series([
function(cb) {
sync.init(program);
cb();
},
function(cb) {
sync.import_history(program, function(err) {
if (err) {
console.log('CRITICAL ERROR: ', err);
}
else {
console.log('Done!');
}
cb();
});
},
function(cb) {
sync.close();
cb();
}]);