Browse Source

adjust format of logs generated by nodejs apps

use-env-var-docker
kenshin-samourai 5 years ago
parent
commit
3f359767da
  1. 2
      accounts/api-helper.js
  2. 2
      accounts/fees-rest-api.js
  3. 4
      accounts/headers-rest-api.js
  4. 6
      accounts/index-cluster.js
  5. 4
      accounts/index.js
  6. 4
      accounts/multiaddr-rest-api.js
  7. 6
      accounts/notifications-server.js
  8. 34
      accounts/notifications-service.js
  9. 2
      accounts/status-rest-api.js
  10. 26
      accounts/support-rest-api.js
  11. 8
      accounts/transactions-rest-api.js
  12. 4
      accounts/unspent-rest-api.js
  13. 22
      accounts/xpub-rest-api.js
  14. 2
      lib/auth/authentication-manager.js
  15. 6
      lib/auth/localapikey-strategy-configurator.js
  16. 2
      lib/bitcoin/addresses-helper.js
  17. 6
      lib/bitcoin/hd-accounts-helper.js
  18. 10
      lib/bitcoin/hd-accounts-service.js
  19. 2
      lib/bitcoind-rpc/fees.js
  20. 4
      lib/bitcoind-rpc/latest-block.js
  21. 2
      lib/bitcoind-rpc/rpc-client.js
  22. 6
      lib/bitcoind-rpc/transactions.js
  23. 24
      lib/db/mysql-db-wrapper.js
  24. 6
      lib/http-server/http-server.js
  25. 2
      lib/indexer-rpc/rpc-client.js
  26. 9
      lib/logger.js
  27. 4
      lib/remote-importer/bitcoind-wrapper.js
  28. 2
      lib/remote-importer/esplora-wrapper.js
  29. 4
      lib/remote-importer/local-indexer-wrapper.js
  30. 4
      lib/remote-importer/oxt-wrapper.js
  31. 28
      lib/remote-importer/remote-importer.js
  32. 6
      lib/remote-importer/sources-mainnet.js
  33. 6
      lib/remote-importer/sources-testnet.js
  34. 4
      lib/remote-importer/sources.js
  35. 4
      pushtx/index-orchestrator.js
  36. 4
      pushtx/index.js
  37. 22
      pushtx/orchestrator.js
  38. 6
      pushtx/pushtx-processor.js
  39. 8
      pushtx/pushtx-rest-api.js
  40. 2
      pushtx/status.js
  41. 4
      pushtx/transactions-scheduler.js
  42. 12
      tracker/block.js
  43. 44
      tracker/blockchain-processor.js
  44. 4
      tracker/index.js
  45. 30
      tracker/mempool-processor.js
  46. 10
      tracker/transaction.js

2
accounts/api-helper.js

@ -137,7 +137,7 @@ class ApiHelper {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
params,
`ApiHelper.validateEntitiesParams() : Invalid arguments`
`API : ApiHelper.validateEntitiesParams() : Invalid arguments`
)
}
}

2
accounts/fees-rest-api.js

@ -46,7 +46,7 @@ class FeesRestApi {
} catch (e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /fees`)
debugApi && Logger.info(`API : Completed GET /fees`)
}
}

4
accounts/headers-rest-api.js

@ -49,7 +49,7 @@ class HeadersRestApi {
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /header/${req.params.hash}`)
debugApi && Logger.info(`API : Completed GET /header/${req.params.hash}`)
}
}
@ -66,7 +66,7 @@ class HeadersRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.hash,
'HeadersRestApi.validateArgsGetHeader() : Invalid hash'
'API : HeadersRestApi.validateArgsGetHeader() : Invalid hash'
)
} else {
next()

6
accounts/index-cluster.js

@ -20,15 +20,15 @@ if (cluster.isMaster) {
})
cluster.on('listening', function(worker) {
Logger.info(`Cluster ${worker.process.pid} connected`)
Logger.info(`API : Cluster ${worker.process.pid} connected`)
})
cluster.on('disconnect', function(worker) {
Logger.info(`Cluster ${worker.process.pid} disconnected`)
Logger.info(`API : Cluster ${worker.process.pid} disconnected`)
})
cluster.on('exit', function(worker) {
Logger.info(`Cluster ${worker.process.pid} is dead`)
Logger.info(`API : Cluster ${worker.process.pid} is dead`)
// Ensuring a new cluster will start if an old one dies
cluster.fork()
})

4
accounts/index.js

@ -28,8 +28,8 @@
/**
* Samourai REST API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the REST API')
Logger.info('API : Process ID: ' + process.pid)
Logger.info('API : Preparing the REST API')
// Wait for Bitcoind RPC API
// being ready to process requests

4
accounts/multiaddr-rest-api.js

@ -85,7 +85,7 @@ class MultiaddrRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /multiaddr ${strParams}`)
Logger.info(`API : Completed GET /multiaddr ${strParams}`)
}
}
}
@ -126,7 +126,7 @@ class MultiaddrRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /multiaddr ${strParams}`)
Logger.info(`API : Completed POST /multiaddr ${strParams}`)
}
}
}

6
accounts/notifications-server.js

@ -61,7 +61,7 @@ class NotificationsServer {
const header = JSON.parse(message.toString())
this.notifService.notifyBlock(header)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in block message')
Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in block message')
}
break
case 'transaction':
@ -69,11 +69,11 @@ class NotificationsServer {
const tx = JSON.parse(message.toString())
this.notifService.notifyTransaction(tx)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in transaction message')
Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in transaction message')
}
break
default:
Logger.info(`Unknown ZMQ message topic: "${topic}"`)
Logger.info(`API : Unknown ZMQ message topic: "${topic}"`)
}
})
}

34
accounts/notifications-service.js

@ -59,7 +59,7 @@ class NotificationsService {
_initWSServer(server) {
this.ws = new WebSocket.server({httpServer: server})
Logger.info('Created WebSocket server')
Logger.info('API : Created WebSocket server')
this.ws.on('request', req => {
try {
@ -67,14 +67,14 @@ class NotificationsService {
conn.id = status.sessions++
conn.subs = []
debug && Logger.info(`Client ${conn.id} connected`)
debug && Logger.info(`API : Client ${conn.id} connected`)
conn.on('close', () => {
this._closeWSConnection(conn, false)
})
conn.on('error', err => {
Logger.error(err, `NotificationsService : Error on connection ${conn.id}`)
Logger.error(err, `API : NotificationsService : Error on connection ${conn.id}`)
if (conn.connected)
this._closeWSConnection(conn, true)
})
@ -91,7 +91,7 @@ class NotificationsService {
status.maxConn = Math.max(status.maxConn, Object.keys(this.conn).length)
} catch(e) {
Logger.error(e, `NotificationsService._initWSServer() : Error during request accept`)
Logger.error(e, `API : NotificationsService._initWSServer() : Error during request accept`)
}
})
}
@ -120,10 +120,10 @@ class NotificationsService {
if (forcedClose && conn.connected)
conn.drop(1008, 'Get out of here!')
debug && Logger.info(`Client ${conn.id} disconnected`)
debug && Logger.info(`API : Client ${conn.id} disconnected`)
} catch(e) {
Logger.error(e, 'NotificationsService._closeWSConnection()')
Logger.error(e, 'API : NotificationsService._closeWSConnection()')
}
}
@ -134,7 +134,7 @@ class NotificationsService {
*/
_filterWSMessage(msg) {
if (this.cacheSubs.has(msg)) {
debug && Logger.info('Duplicate subscriptions detected')
debug && Logger.info('API : Duplicate subscriptions detected')
return false
} else {
this.cacheSubs.set(msg, true)
@ -150,7 +150,7 @@ class NotificationsService {
*/
_handleWSMessage(msg, conn) {
try {
debug && Logger.info(`Received from client ${conn.id}: ${msg}`)
debug && Logger.info(`API : Received from client ${conn.id}: ${msg}`)
const data = JSON.parse(msg)
@ -183,7 +183,7 @@ class NotificationsService {
break
}
} catch(e) {
Logger.error(e, 'NotificationsService._handleWSMessage() : WebSocket message error')
Logger.error(e, 'API : NotificationsService._handleWSMessage() : WebSocket message error')
}
}
@ -223,7 +223,7 @@ class NotificationsService {
this.subs[topic].push(conn.id)
debug && Logger.info(`Client ${conn.id} subscribed to ${topic}`)
debug && Logger.info(`API : Client ${conn.id} subscribed to ${topic}`)
}
/**
@ -267,7 +267,7 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(msg)
} catch(e) {
Logger.error(e, `NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
Logger.error(e, `API : NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
}
}
}
@ -284,7 +284,7 @@ class NotificationsService {
}
this.dispatch('block', JSON.stringify(data))
} catch(e) {
Logger.error(e, `NotificationsService.notifyBlock()`)
Logger.error(e, `API : NotificationsService.notifyBlock()`)
}
}
@ -440,14 +440,14 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent ctx ${ctx.hash} to client ${cid}`)
debug && Logger.error(`API : Sent ctx ${ctx.hash} to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
Logger.error(e, `API : NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
}
}
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction()`)
Logger.error(e, `API : NotificationsService.notifyTransaction()`)
}
}
@ -464,9 +464,9 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent authentication error to client ${cid}`)
debug && Logger.error(`API : Sent authentication error to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
Logger.error(e, `API : NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
}
}

2
accounts/status-rest-api.js

@ -47,7 +47,7 @@ class StatusRestApi {
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /status`)
debugApi && Logger.info(`API : Completed GET /status`)
}
}

26
accounts/support-rest-api.js

@ -108,7 +108,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/info`)
debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/info`)
}
}
@ -175,7 +175,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/rescan`)
debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/rescan`)
}
}
@ -212,7 +212,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/info`)
debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/info`)
}
}
@ -270,7 +270,7 @@ class SupportRestApi {
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} else {
ret.status = 'Rescan Error'
Logger.error(e, 'SupportRestApi.getXpubRescan() : Support rescan error')
Logger.error(e, 'API : SupportRestApi.getXpubRescan() : Support rescan error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
}
}
@ -279,7 +279,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/rescan`)
debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/rescan`)
}
}
@ -300,10 +300,10 @@ class SupportRestApi {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getPairing() : Support pairing error')
Logger.error(e, 'API : SupportRestApi.getPairing() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /pairing`)
debugApi && Logger.info(`API : Completed GET /pairing`)
}
}
@ -318,7 +318,7 @@ class SupportRestApi {
url = fs.readFileSync('/var/lib/tor/hsv3explorer/hostname', 'utf8')
url = url.replace('\n', '')
} catch(e) {
Logger.error(e, 'SupportRestApi.getPairing() : Cannot read explorer onion address')
Logger.error(e, 'API : SupportRestApi.getPairing() : Cannot read explorer onion address')
}
}
const ret = {
@ -333,10 +333,10 @@ class SupportRestApi {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getPairingExplorer() : Support pairing error')
Logger.error(e, 'API : SupportRestApi.getPairingExplorer() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /pairing/explorer`)
debugApi && Logger.info(`API : Completed GET /pairing/explorer`)
}
}
@ -351,7 +351,7 @@ class SupportRestApi {
if (!isValidXpub) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
Logger.error(null, `API : SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
} else {
next()
}
@ -369,7 +369,7 @@ class SupportRestApi {
if (!(isValidXpub && isValidGap)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, 'SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
Logger.error(null, 'API : SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
} else {
next()
}
@ -386,7 +386,7 @@ class SupportRestApi {
if (!isValidAddress) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
Logger.error(null, `API : SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
} else {
next()
}

8
accounts/transactions-rest-api.js

@ -63,7 +63,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, e)
} finally {
const strParams = `${req.query.fees ? req.query.fees : ''}`
debugApi && Logger.info(`Completed GET /tx/${req.params.txid} ${strParams}`)
debugApi && Logger.info(`API : Completed GET /tx/${req.params.txid} ${strParams}`)
}
}
@ -97,7 +97,7 @@ class TransactionsRestApi {
${req.query.page ? req.query.page : ''} \
${req.query.count ? req.query.count : ''}`
debugApi && Logger.info(`Completed GET /txs ${strParams}`)
debugApi && Logger.info(`API : Completed GET /txs ${strParams}`)
}
}
@ -118,7 +118,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
'API : HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
)
Logger.error(req.query, '')
} else {
@ -145,7 +145,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.query,
'HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
'API : HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
)
} else {
next()

4
accounts/unspent-rest-api.js

@ -85,7 +85,7 @@ class UnspentRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /unspent ${strParams}`)
Logger.info(`API : Completed GET /unspent ${strParams}`)
}
}
}
@ -126,7 +126,7 @@ class UnspentRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /unspent ${strParams}`)
Logger.info(`API : Completed POST /unspent ${strParams}`)
}
}
}

22
accounts/xpub-rest-api.js

@ -151,7 +151,7 @@ class XPubRestApi {
return HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed POST /xpub ${req.body.xpub}`)
debugApi && Logger.info(`API : Completed POST /xpub ${req.body.xpub}`)
}
}
@ -194,11 +194,11 @@ class XPubRestApi {
HttpServer.sendOkData(res, ret)
} catch(e) {
Logger.error(e, 'XpubRestApi.getXpub()')
Logger.error(e, 'API : XpubRestApi.getXpub()')
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /xpub/${req.params.xpub}`)
debugApi && Logger.info(`API : Completed GET /xpub/${req.params.xpub}`)
}
}
@ -253,7 +253,7 @@ class XPubRestApi {
}
} finally {
debugApi && Logger.info(`Completed POST /xpub/${req.params.xpub}/lock`)
debugApi && Logger.info(`API : Completed POST /xpub/${req.params.xpub}/lock`)
}
}
@ -303,7 +303,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed DELETE /xpub/${req.params.xpub}`)
debugApi && Logger.info(`API : Completed DELETE /xpub/${req.params.xpub}`)
}
}
@ -327,8 +327,8 @@ class XPubRestApi {
xpub = hdaHelper.xlatXPUB(origXpub)
scheme = isYpub ? hdaHelper.BIP49 : hdaHelper.BIP84
if (trace) {
Logger.info('Converted: ' + origXpub)
Logger.info('Resulting xpub: ' + xpub)
Logger.info('API : Converted: ' + origXpub)
Logger.info('API : Resulting xpub: ' + xpub)
}
}
@ -371,7 +371,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.body,
'XpubRestApi.validateArgsPostXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsPostXpub() : Invalid arguments'
)
} else {
next()
@ -391,7 +391,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.xpub,
'XpubRestApi.validateArgsGetXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsGetXpub() : Invalid arguments'
)
} else {
next()
@ -414,7 +414,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {
@ -437,7 +437,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {

2
lib/auth/authentication-manager.js

@ -40,7 +40,7 @@ class AuthenticationManager {
if (Configurator) {
this.activeStrategy = new Configurator()
this.activeStrategy.configure()
Logger.info(`Authentication strategy ${this.activeStrategyName} successfully configured`)
Logger.info(`Auth : Authentication strategy ${this.activeStrategyName} successfully configured`)
}
} catch(e) {

6
lib/auth/localapikey-strategy-configurator.js

@ -43,14 +43,14 @@ class LocalApiKeyStrategyConfigurator {
if (apiKey == _adminKey) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an admin key')
Logger.info('Auth : Successful authentication with an admin key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_ADMIN})
} else if (_apiKeys.indexOf(apiKey) >= 0) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an api key')
Logger.info('Auth : Successful authentication with an api key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_API})
} else {
Logger.error(null, `Authentication failure (apikey=${apiKey})`)
Logger.error(null, `Auth : Authentication failure (apikey=${apiKey})`)
return done('Invalid API key', false)
}
}

2
lib/bitcoin/addresses-helper.js

@ -101,7 +101,7 @@ class AddressesHelper {
try {
return bitcoin.address.fromBech32(str).data.toString('hex')
} catch(e) {
Logger.error(e, 'AddressesHelper.getScriptHashFromBech32()')
Logger.error(e, 'AddressesHelper : getScriptHashFromBech32()')
return null
}
}

6
lib/bitcoin/hd-accounts-helper.js

@ -123,7 +123,7 @@ class HDAccountsHelper {
&& ver != this.MAGIC_ZPUB
&& ver != this.MAGIC_VPUB
) {
//Logger.error(null, 'HdAccountsHelper.xlatXPUB() : Incorrect format')
//Logger.error(null, 'HdAccountsHelper : xlatXPUB() : Incorrect format')
return ''
}
@ -379,12 +379,12 @@ class HDAccountsHelper {
if (msg.status = 'ok') {
resolve(msg.addresses)
} else {
Logger.error(null, 'A problem was met during parallel addresses derivation')
Logger.error(null, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject()
}
} catch(e) {
Logger.error(e, 'A problem was met during parallel addresses derivation')
Logger.error(e, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject(e)
}
})

10
lib/bitcoin/hd-accounts-service.js

@ -40,7 +40,7 @@ class HDAccountsService {
const isInvalidXpub = (e == errors.xpub.INVALID || e == errors.xpub.PRIVKEY)
const isLockedXpub = (e == errors.xpub.LOCKED)
const err = (isInvalidXpub || isLockedXpub) ? e : errors.xpub.CREATE
Logger.error(e, 'HdAccountsService.createHdAccount()' + err)
Logger.error(e, 'HdAccountsService : createHdAccount()' + err)
return Promise.reject(err)
}
}
@ -139,7 +139,7 @@ class HDAccountsService {
else if (scheme == hdaHelper.BIP84)
segwit = ' SegWit (BIP84)'
Logger.info(`Created HD Account: ${xpub}${segwit}`)
Logger.info(`HdAccountsService : Created HD Account: ${xpub}${segwit}`)
const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme)
const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme)
@ -196,15 +196,15 @@ class HDAccountsService {
// check for a derivation scheme mismatch
if (info.type != scheme) {
if (info.locked && !forceOverride) {
Logger.info(`Attempted override on locked account: ${xpub}`)
Logger.info(`HdAccountsService : Attempted override on locked account: ${xpub}`)
return Promise.reject(errors.xpub.LOCKED)
} else {
Logger.info(`Derivation scheme override: ${xpub}`)
Logger.info(`HdAccountsService : Derivation scheme override: ${xpub}`)
return db.deleteHDAccount(xpub)
}
}
} catch(e) {
Logger.error(e, 'HDAccountsService.derivationOverrideCheck()')
Logger.error(e, 'HDAccountsService : derivationOverrideCheck()')
return Promise.reject(e)
}
}

2
lib/bitcoind-rpc/fees.js

@ -58,7 +58,7 @@ class Fees {
const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType)
this.fees[tgt] = Math.round(level.feerate * 1e5)
} catch(e) {
Logger.error(e, 'Fees.refresh()')
Logger.error(e, 'Bitcoind RPC : Fees.refresh()')
delete this.fees[tgt]
}
})

4
lib/bitcoind-rpc/latest-block.js

@ -43,7 +43,7 @@ class LatestBlock {
this.onBlockHash(msg.toString('hex'))
break
default:
Logger.info(topic.toString())
Logger.info(`Bitcoind RPC : ${topic.toString()}`)
}
})
}
@ -61,7 +61,7 @@ class LatestBlock {
this.time = header.mediantime
this.diff = header.difficulty
Logger.info(`Block ${this.height} ${this.hash}`)
Logger.info(`Bitcoind RPC : Block ${this.height} ${this.hash}`)
}
}

2
lib/bitcoind-rpc/rpc-client.js

@ -78,7 +78,7 @@ class RpcClient {
await client.getblockchaininfo()
} catch(e) {
client = null
Logger.info('Bitcoind RPC API is still unreachable. New attempt in 20s.')
Logger.info('Bitcoind RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForBitcoindRpcApi()
})

6
lib/bitcoind-rpc/transactions.js

@ -65,7 +65,7 @@ class Transactions {
return await util.seriesCall(txs, async tx => {
if (tx.result == null) {
Logger.info(` got null for ${txids[tx.id]}`)
Logger.info(`Bitcoind RPC : got null for ${txids[tx.id]}`)
return null
} else {
return this._prepareTxResult(tx.result, fees)
@ -73,7 +73,7 @@ class Transactions {
})
} catch(e) {
Logger.error(e, 'Transaction.getTransactions()')
Logger.error(e, 'Bitcoind RPC : Transaction.getTransactions()')
return Promise.reject(errors.generic.GEN)
}
}
@ -99,7 +99,7 @@ class Transactions {
this.txCache.set(txid, ret)
return ret
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
Logger.error(e, 'Bitcoind RPC : Transaction.getTransaction()')
return Promise.reject(errors.generic.GEN)
}
}

24
lib/db/mysql-db-wrapper.js

@ -312,21 +312,21 @@ class MySqlDbWrapper {
handleConnect() {
try {
this.pool = mysql.createPool(this.dbConfig)
Logger.info(`Created a database pool of ${this.dbConfig.connectionLimit} connections`)
Logger.info(`Db Wrapper : Created a database pool of ${this.dbConfig.connectionLimit} connections`)
if (debug) {
this.pool.on('acquire', function (conn) {
Logger.info(`Connection ${conn.threadId} acquired`)
Logger.info(`Db Wrapper : Connection ${conn.threadId} acquired`)
})
this.pool.on('enqueue', function (conn) {
Logger.info('Waiting for a new connection slot')
Logger.info('Db Wrapper : Waiting for a new connection slot')
})
this.pool.on('release', function (conn) {
Logger.info(`Connection ${conn.threadId} released`)
Logger.info(`Db Wrapper : Connection ${conn.threadId} released`)
})
}
} catch(e) {
Logger.error(err, 'MySqlDbWrapper.handleConnect() : Problem met while trying to initialize a new pool')
Logger.error(err, 'Db Wrapper : handleConnect() : Problem met while trying to initialize a new pool')
throw e
}
}
@ -348,7 +348,7 @@ class MySqlDbWrapper {
// Destroy previous pool
this.pool.end(err => {
if (err) {
Logger.error(err, 'MySqlDbWrapper.handleReconnect() : Problem met while terminating the pool')
Logger.error(err, 'Db Wrapper : handleReconnect() : Problem met while terminating the pool')
this.timerReconnect = setTimeout(this.handleReconnect.bind(this), 2000)
} else {
this.handleConnect()
@ -362,14 +362,14 @@ class MySqlDbWrapper {
* Ping the mysql server
*/
ping() {
debug && Logger.info(`MySqlDbWrapper.ping() : ${this.pool._freeConnections.length} free connections`)
debug && Logger.info(`Db Wrapper : ping() : ${this.pool._freeConnections.length} free connections`)
// Iterate over all free connections
// which might have been disconnected by the mysql server
for (let c of this.pool._freeConnections) {
c.query('SELECT 1', (err, res, fields) => {
if (debug && err) {
Logger.error(err, `MySqlDbWrapper.ping() : Ping Error`)
Logger.error(err, `Db Wrapper : ping() : Ping Error`)
}
})
}
@ -379,7 +379,7 @@ class MySqlDbWrapper {
* Send a query
*/
async _query(query, retries) {
queryDebug && Logger.info(query)
queryDebug && Logger.info(`Db Wrapper : ${query}`)
if (retries == null)
retries = 5
@ -406,7 +406,7 @@ class MySqlDbWrapper {
reject(err)
}
} else {
queryDebug && Logger.info(result)
queryDebug && Logger.info(`Db Wrapper : ${result}`)
resolve(result)
}
})
@ -421,8 +421,8 @@ class MySqlDbWrapper {
* Log a query error
*/
queryError(err, query) {
Logger.error(err, 'MySqlDbWrapper.query() : Query Error')
Logger.error(query)
Logger.error(err, 'Db Wrapper : query() : Query Error')
Logger.error(null, `Db Wrapper : ${query}`)
}
/**

6
lib/http-server/http-server.js

@ -50,14 +50,14 @@ class HttpServer {
// Error handler, should be final middleware
this.app.use(function(err, req, res, next) {
if (res.headersSent) return next(err)
Logger.error(err.stack, 'HttpServer.start()')
Logger.error(err.stack, 'HttpServer : start()')
const ret = {status: 'Server error'}
HttpServer.sendError(res, ret, 500)
})
// Start a http server
this.server = this.app.listen(this.port, this.host, () => {
Logger.info(`HTTP server listening on ${this.host}:${this.port}`)
Logger.info(`HttpServer : Listening on ${this.host}:${this.port}`)
})
this.server.timeout = 600 * 1000
@ -184,7 +184,7 @@ class HttpServer {
* @param {function} next - next middleware
*/
static requestLogger(req, res, next) {
Logger.info(`${req.method} ${req.url}`)
Logger.info(`HttpServer : ${req.method} ${req.url}`)
next()
}

2
lib/indexer-rpc/rpc-client.js

@ -82,7 +82,7 @@ class RpcClient {
await client.sendRequest('server.version', 'dojo', ['1.0', '1.4'])
} catch(e) {
client = null
Logger.info('Indexer RPC API is still unreachable. New attempt in 20s.')
Logger.info('Indexer RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForIndexerRpcApi()
})

9
lib/logger.js

@ -18,7 +18,7 @@ class Logger {
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static info(msg, json) {
const logEntry = Logger._formatLog(msg, json)
const logEntry = Logger._formatLog('INFO', msg, json)
console.log(logEntry)
}
@ -28,7 +28,7 @@ class Logger {
* @param {string} msg - message associated to the error
*/
static error(e, msg) {
const logEntry = Logger._formatLog(msg)
const logEntry = Logger._formatLog('ERROR', msg)
console.error(logEntry)
//const errorEntry = Logger._formatLog(e)
@ -40,10 +40,11 @@ class Logger {
/**
* Format log entry
* @param {string} level - log level label
* @param {string/object} msg
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static _formatLog(msg, json) {
static _formatLog(level, msg, json) {
json = json || false
const data = json ? JSON.stringify(msg, null, 2) : msg
@ -59,7 +60,7 @@ class Logger {
const s = util.pad10(D.getUTCSeconds())
const ms = util.pad100(D.getUTCMilliseconds())
const parts = [y, '-', m, '-', d, 'T', h, ':', mn, ':', s, '.', ms,'Z ', data]
const parts = [y, '-', m, '-', d, 'T', h, ':', mn, ':', s, 'Z ', level, ' ', data]
return parts.join('')
}
}

4
lib/remote-importer/bitcoind-wrapper.js

@ -70,7 +70,7 @@ class BitcoindWrapper extends Wrapper {
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
@ -116,7 +116,7 @@ class BitcoindWrapper extends Wrapper {
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}

2
lib/remote-importer/esplora-wrapper.js

@ -93,7 +93,7 @@ class EsploraWrapper extends Wrapper {
return ret
} else if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
// we have too many transactions
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
ret.txids = []
ret.ntx = 0
return ret

4
lib/remote-importer/local-indexer-wrapper.js

@ -70,7 +70,7 @@ class LocalIndexerWrapper extends Wrapper {
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
@ -123,7 +123,7 @@ class LocalIndexerWrapper extends Wrapper {
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}

4
lib/remote-importer/oxt-wrapper.js

@ -64,7 +64,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret
}
@ -98,7 +98,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address
if (filterAddr && retAddr.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
Logger.info(`Importer : Import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
} else {
retAddr.txids = r.txids
}

28
lib/remote-importer/remote-importer.js

@ -133,14 +133,14 @@ class RemoteImporter {
return Promise.reject(errors.xpub.INVALID)
if (this.importing[xpub]) {
Logger.info(` Import overlap for ${xpub}`)
Logger.info(`Importer : Import overlap for ${xpub}`)
return Promise.reject(errors.xpub.OVERLAP)
}
this.importing[xpub] = true
const ts = hdaHelper.typeString(type)
Logger.info(`Importing ${xpub} ${ts}`)
Logger.info(`Importer : Importing ${xpub} ${ts}`)
const t0 = Date.now()
const chains = [0,1]
@ -181,9 +181,9 @@ class RemoteImporter {
await this._importTransactions(aAddresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importHDAccount() : xpub ${xpub}`)
Logger.error(e, `Importer : RemoteImporter.importHDAccount() : xpub ${xpub}`)
} finally {
Logger.info(` xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
Logger.info(`Importer : xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
delete this.importing[xpub]
return true
}
@ -233,7 +233,7 @@ class RemoteImporter {
// Update derived index
d = u + G
Logger.info(` derived M/${c}/${A.join(',')}`)
Logger.info(`Importer : derived M/${c}/${A.join(',')}`)
const addrMap = {}
for (let a of ret.addresses)
@ -261,7 +261,7 @@ class RemoteImporter {
}
}
Logger.info(` Got ${scanTx.length} transactions`)
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 200 transactions
const txsChunks = util.splitList(scanTx, 200)
@ -276,7 +276,7 @@ class RemoteImporter {
}
}
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : getTransactions error`)
Logger.error(e, `Importer : RemoteImporter.xpubScan() : getTransactions error`)
}
if (gotTransactions) {
@ -290,7 +290,7 @@ class RemoteImporter {
}
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
Logger.error(e, `Importer : RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
} finally {
// Push everything up the rabbit hole
return ret
@ -313,14 +313,14 @@ class RemoteImporter {
addresses.push(address)
this.importing[address] = true
} else {
Logger.info(`Note: Import overlap for ${address}. Skipping`)
Logger.info(`Importer : Import overlap for ${address}. Skipping`)
}
}
if (addresses.length == 0)
return true
Logger.info(`Importing ${addresses.join(',')}`)
Logger.info(`Importer : Importing ${addresses.join(',')}`)
try {
const scanTx = []
@ -338,7 +338,7 @@ class RemoteImporter {
}
}
Logger.info(` Got ${scanTx.length} transactions`)
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 100 transactions
const txsChunks = util.splitList(scanTx, 100)
@ -354,7 +354,7 @@ class RemoteImporter {
await this._importTransactions(addresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importAddresses() : ${candidates.join(',')}`)
Logger.error(e, `Importer : RemoteImporter.importAddresses() : ${candidates.join(',')}`)
} finally {
const dt = Date.now() - t0
@ -362,7 +362,7 @@ class RemoteImporter {
const N = addresses.length
if (N > 0)
Logger.info(` Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
Logger.info(`Importer : Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
for (let address of addresses)
delete this.importing[address]
@ -451,7 +451,7 @@ class RemoteImporter {
await db.addInputs(inputs)
} catch(e) {
Logger.error(e, `RemoteImporter.addTransactions() :`)
Logger.error(e, `Importer : RemoteImporter.addTransactions() :`)
}
}

6
lib/remote-importer/sources-mainnet.js

@ -34,16 +34,16 @@ class SourcesMainnet extends Sources {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports')
Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports')
Logger.info('Importer : Activated local indexer as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by OXT
this.source = new OxtWrapper(keys.indexer.oxt)
Logger.info('Activated OXT API as the data source for imports')
Logger.info('Importer : Activated OXT API as the data source for imports')
}
}

6
lib/remote-importer/sources-testnet.js

@ -35,16 +35,16 @@ class SourcesTestnet extends Sources {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports')
Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports')
Logger.info('Importer : Activated local indexer as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by Esplora
this.source = new EsploraWrapper(keys.indexer.esplora)
Logger.info('Activated Esplora API as the data source for imports')
Logger.info('Importer : Activated Esplora API as the data source for imports')
}
}

4
lib/remote-importer/sources.js

@ -48,7 +48,7 @@ class Sources {
ret.txids = result.txids
} catch(e) {
Logger.error(null, `Sources.getAddress() : ${address} from ${this.source.base}`)
Logger.error(null, `Importer : Sources.getAddress() : ${address} from ${this.source.base}`)
} finally {
return ret
}
@ -75,7 +75,7 @@ class Sources {
}
} catch(e) {
Logger.error(null, `Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`)
Logger.error(null, `Importer : Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`)
} finally {
return ret
}

4
pushtx/index-orchestrator.js

@ -18,8 +18,8 @@
/**
* PushTx Orchestrator
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx Orchestrator')
Logger.info('Orchestrator : Process ID: ' + process.pid)
Logger.info('Orchestrator : Preparing the pushTx Orchestrator')
// Wait for Bitcoind RPC API
// being ready to process requests

4
pushtx/index.js

@ -19,8 +19,8 @@
/**
* PushTx API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx API')
Logger.info('PushTx : Process ID: ' + process.pid)
Logger.info('PushTx : Preparing the pushTx API')
// Wait for Bitcoind RPC API
// being ready to process requests

22
pushtx/orchestrator.js

@ -59,11 +59,11 @@ class Orchestrator {
this.onBlockHash(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Orchestrator : ${topic.toString()}`)
}
})
Logger.info('Listening for blocks')
Logger.info('Orchestrator : Listening for blocks')
}
/**
@ -80,7 +80,7 @@ class Orchestrator {
const header = await this.rpcClient.getblockheader(blockHash, true)
const height = header.height
Logger.info(`Block ${height} ${blockHash}`)
Logger.info(`Orchestrator : Block ${height} ${blockHash}`)
let nbTxsPushed
let rpcConnOk = true
@ -102,7 +102,7 @@ class Orchestrator {
try {
parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true)
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
Logger.error(e, 'Orchestrator : Transaction.getTransaction()')
}
}
@ -110,14 +110,14 @@ class Orchestrator {
// Push the transaction
try {
await pushTxProcessor.pushTx(tx.schRaw)
Logger.info(`Pushed scheduled transaction ${tx.schTxid}`)
Logger.info(`Orchestrator : Pushed scheduled transaction ${tx.schTxid}`)
} catch(e) {
const msg = 'A problem was met while trying to push a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
// Check if it's an issue with the connection to the RPC API
// (=> immediately stop the loop)
if (RpcClient.isConnectionError(e)) {
Logger.info('Connection issue')
Logger.info('Orchestrator : Connection issue')
rpcConnOk = false
break
}
@ -130,7 +130,7 @@ class Orchestrator {
await this.updateTriggers(tx.schID, shift)
} catch(e) {
const msg = 'A problem was met while shifting scheduled transactions'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
}
}
@ -141,14 +141,14 @@ class Orchestrator {
nbTxsPushed++
} catch(e) {
const msg = 'A problem was met while trying to delete a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
}
}
}
} while (rpcConnOk && nbTxsPushed > 0)
} catch(e) {
Logger.error(e, 'Orchestrator.onBlockHash() : Error')
Logger.error(e, 'Orchestrator : Orchestrator.onBlockHash() : Error')
} finally {
// Release the semaphor
await this._onBlockHashSemaphor.release()
@ -173,7 +173,7 @@ class Orchestrator {
await db.updateTriggerScheduledTransaction(tx.schID, newTrigger)
// Update the triggers of next transactions in the chain
await this.updateTriggers(tx.schID, shift)
Logger.info(`Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
Logger.info(`Orchestrator : Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
}
}

6
pushtx/pushtx-processor.js

@ -51,7 +51,7 @@ class PushTxProcessor {
const tx = bitcoin.Transaction.fromHex(rawtx)
for (let output of tx.outs)
value += output.value
Logger.info('Push for ' + (value / 1e8).toFixed(8) + ' BTC')
Logger.info('PushTx : Push for ' + (value / 1e8).toFixed(8) + ' BTC')
} catch(e) {
throw errors.tx.PARSE
}
@ -60,14 +60,14 @@ class PushTxProcessor {
// Attempt to send via RPC to the bitcoind instance
try {
const txid = await this.rpcClient.sendrawtransaction(rawtx)
Logger.info('Pushed!')
Logger.info('PushTx : Pushed!')
// Update the stats
status.updateStats(value)
// Notify the tracker
this.notifSock.send(['pushtx', rawtx])
return txid
} catch(err) {
Logger.info('Push failed')
Logger.info('PushTx : Push failed')
throw err
}
}

8
pushtx/pushtx-rest-api.js

@ -197,21 +197,21 @@ class PushTxRestApi {
} catch(e) {}
if (msg.code && msg.message) {
Logger.error(null, 'Error ' + msg.code + ': ' + msg.message)
Logger.error(null, 'PushTx : Error ' + msg.code + ': ' + msg.message)
ret = {
message: msg.message,
code: msg.code
}
} else {
Logger.error(err.message, 'ERROR')
Logger.error(err.message, 'PushTx : ')
ret = err.message
}
} else {
Logger.error(err, 'ERROR')
Logger.error(err, 'PushTx : )
ret = err
}
} catch (e) {
Logger.error(e, 'ERROR')
Logger.error(e, 'PushTx : ')
ret = e
} finally {
HttpServer.sendError(res, ret)

2
pushtx/status.js

@ -78,7 +78,7 @@ class Status {
await this._refreshNetworkInfo()
await this._refreshBlockchainInfo()
} catch (e) {
Logger.error(e, 'Status.getCurrent() : Error')
Logger.error(e, 'PushTx : Status.getCurrent() : Error')
} finally {
return this.status
}

4
pushtx/transactions-scheduler.js

@ -69,7 +69,7 @@ class TransactionsScheduler {
// Check that nlocktimes are matching
if (!(tx.locktime && tx.locktime == entry.nlocktime)) {
const msg = `TransactionsScheduler.schedule() : nLockTime mismatch : ${tx.locktime} - ${entry.nlocktime}`
Logger.error(null, msg)
Logger.error(null, `PushTx : ${msg}`)
throw errors.pushtx.NLOCK_MISMATCH
}
// Check that order of hop and nlocktime values are consistent
@ -113,7 +113,7 @@ class TransactionsScheduler {
}
parentId = await db.addScheduledTransaction(objTx)
Logger.info(`Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`)
Logger.info(`PushTx : Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`)
parentTxid = tx.getId()
parentNlocktime = entry.nlocktime
}

12
tracker/block.js

@ -33,7 +33,7 @@ class Block extends TransactionsBundle {
* @returns {Promise - object[]} returns an array of transactions to be broadcast
*/
async checkBlock() {
Logger.info('Beginning to process new block.')
Logger.info('Tracker : Beginning to process new block.')
let block
const txsForBroadcast = []
@ -42,7 +42,7 @@ class Block extends TransactionsBundle {
block = bitcoin.Block.fromHex(this.hex)
this.transactions = block.transactions
} catch (e) {
Logger.error(e, 'Block.checkBlock()')
Logger.error(e, 'Tracker : Block.checkBlock()')
Logger.error(null, this.header)
return Promise.reject(e)
}
@ -74,7 +74,7 @@ class Block extends TransactionsBundle {
blockParent: prevID
})
Logger.info(` Added block ${this.header.height} (id=${blockId})`)
Logger.info(`Tracker : Added block ${this.header.height} (id=${blockId})`)
// Confirms the transactions
const txids = this.transactions.map(t => t.getId())
@ -85,7 +85,7 @@ class Block extends TransactionsBundle {
// Logs and result returned
const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/ntx).toFixed(0)
Logger.info(` Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
Logger.info(`Tracker : Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
return txsForBroadcast
}
@ -96,7 +96,7 @@ class Block extends TransactionsBundle {
* @returns {Promise}
*/
async checkBlockHeader(prevBlockID) {
Logger.info('Beginning to process new block header.')
Logger.info('Tracker : Beginning to process new block header.')
// Insert the block header into the database
const blockId = await db.addBlock({
@ -106,7 +106,7 @@ class Block extends TransactionsBundle {
blockParent: prevBlockID
})
Logger.info(` Added block header ${this.header.height} (id=${blockId})`)
Logger.info(`Tracker : Added block header ${this.header.height} (id=${blockId})`)
return blockId
}

44
tracker/blockchain-processor.js

@ -78,7 +78,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/
async catchupIBDMode() {
try {
Logger.info('Tracker Startup (IBD mode)')
Logger.info('Tracker : Tracker Startup (IBD mode)')
const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks
@ -91,7 +91,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If no header or block loaded by bitcoind => try later
if (daemonNbHeaders == 0 || daemonNbBlocks == 0) {
Logger.info('New attempt scheduled in 30s (waiting for block headers)')
Logger.info('Tracker : New attempt scheduled in 30s (waiting for block headers)')
return util.delay(30000).then(() => {
return this.catchupIBDMode()
})
@ -101,7 +101,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If blocks need to be downloaded by bitcoind => try later
if (daemonNbBlocks - 1 <= dbMaxHeight) {
Logger.info('New attempt scheduled in 10s (waiting for blocks)')
Logger.info('Tracker : New attempt scheduled in 10s (waiting for blocks)')
return util.delay(10000).then(() => {
return this.catchupIBDMode()
})
@ -110,7 +110,7 @@ class BlockchainProcessor extends AbstractProcessor {
} else {
const blockRange = _.range(dbMaxHeight + 1, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`)
Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
await util.seriesCall(blockRange, async height => {
try {
@ -118,13 +118,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(blockHash, true)
prevBlockId = await this.processBlockHeader(header, prevBlockId)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
process.exit()
}
}, 'Tracker syncing', true)
// Schedule a new iteration (in case more blocks need to be loaded)
Logger.info('Start a new iteration')
Logger.info('Tracker : Start a new iteration')
return this.catchupIBDMode()
}
@ -134,7 +134,7 @@ class BlockchainProcessor extends AbstractProcessor {
}
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
throw e
}
}
@ -149,7 +149,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/
async catchupNormalMode() {
try {
Logger.info('Tracker Startup (normal mode)')
Logger.info('Tracker : Tracker Startup (normal mode)')
const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks
@ -162,7 +162,7 @@ class BlockchainProcessor extends AbstractProcessor {
// Compute blocks range to be processed
const blockRange = _.range(highest.blockHeight, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`)
Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
// Process the blocks
return util.seriesCall(blockRange, async height => {
@ -171,13 +171,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
process.exit()
}
}, 'Tracker syncing', true)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
}
}
@ -196,11 +196,11 @@ class BlockchainProcessor extends AbstractProcessor {
this.onBlockHash(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for blocks')
Logger.info('Tracker : Listening for blocks')
}
/**
@ -241,11 +241,11 @@ class BlockchainProcessor extends AbstractProcessor {
try {
const header = await this.client.getblockheader(blockHash, true)
Logger.info(`Block #${header.height} ${blockHash}`)
Logger.info(`Tracker : Block #${header.height} ${blockHash}`)
// Grab all headers between this block and last known
headers = await this.chainBacktrace([header])
} catch(err) {
Logger.error(err, `BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`)
Logger.error(err, `Tracker : BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`)
}
if(headers == null)
@ -267,7 +267,7 @@ class BlockchainProcessor extends AbstractProcessor {
})
} catch(e) {
Logger.error(e, 'BlockchainProcessor.onBlockHash()')
Logger.error(e, 'Tracker : BlockchainProcessor.onBlockHash()')
} finally {
// Release the semaphor
await this._onBlockHashSemaphor.release()
@ -285,7 +285,7 @@ class BlockchainProcessor extends AbstractProcessor {
const deepest = headers[headers.length - 1]
if (headers.length > 1)
Logger.info(`chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`)
Logger.info(`Tracker : chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`)
// Look for previous block in the database
const block = await db.getBlockByHash(deepest.previousblockhash)
@ -313,7 +313,7 @@ class BlockchainProcessor extends AbstractProcessor {
if (txs.length > 0) {
// Cancel confirmation of transactions included in reorg'd blocks
Logger.info(`Backtrace: unconfirm ${txs.length} transactions in reorg`)
Logger.info(`Tracker : Backtrace: unconfirm ${txs.length} transactions in reorg`)
const txids = txs.map(t => t.txnTxid)
await db.unconfirmTransactions(txids)
}
@ -345,12 +345,12 @@ class BlockchainProcessor extends AbstractProcessor {
// Process the blocks
return util.seriesCall(blockRange, async height => {
try {
Logger.info(`Rescanning block ${height}`)
Logger.info(`Tracker : Rescanning block ${height}`)
const hash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.rescan()')
Logger.error(e, 'Tracker : BlockchainProcessor.rescan()')
throw e
}
}, 'Tracker rescan', true)
@ -379,7 +379,7 @@ class BlockchainProcessor extends AbstractProcessor {
} catch(e) {
// The show must go on.
// TODO: further notification that this block did not check out
Logger.error(e, 'BlockchainProcessor.processBlock()')
Logger.error(e, 'Tracker : BlockchainProcessor.processBlock()')
}
}
@ -394,7 +394,7 @@ class BlockchainProcessor extends AbstractProcessor {
const block = new Block(null, header)
return block.checkBlockHeader(prevBlockID)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.processBlockHeader()')
Logger.error(e, 'Tracker : BlockchainProcessor.processBlockHeader()')
throw e
}
}

4
tracker/index.js

@ -16,8 +16,8 @@
const TrackerRestApi = require('./tracker-rest-api')
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the tracker')
Logger.info('Tracker : Process ID: ' + process.pid)
Logger.info('Tracker : Preparing the tracker')
// Wait for Bitcoind RPC API
// being ready to process requests

30
tracker/mempool-processor.js

@ -95,11 +95,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for pushTx')
Logger.info('Tracker : Listening for pushTx')
// Socket listening to pushTx Orchestrator
this.orchestratorSock = zmq.socket('sub')
@ -112,11 +112,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for pushTx orchestrator')
Logger.info('Tracker : Listening for pushTx orchestrator')
// Socket listening to bitcoind Txs messages
this.txSock = zmq.socket('sub')
@ -129,11 +129,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for mempool transactions')
Logger.info('Tracker : Listening for mempool transactions')
}
/**
@ -145,7 +145,7 @@ class MempoolProcessor extends AbstractProcessor {
await this._refreshActiveStatus()
const activeLbl = this.isActive ? 'active' : 'inactive'
Logger.info(`Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`)
Logger.info(`Tracker : Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`)
let currentMempool = new TransactionsBundle(this.mempoolBuffer.toArray())
this.mempoolBuffer.clear()
@ -171,7 +171,7 @@ class MempoolProcessor extends AbstractProcessor {
let tx = bitcoin.Transaction.fromBuffer(buf)
this.mempoolBuffer.addTransaction(tx)
} catch (e) {
Logger.error(e, 'MempoolProcessor.onTx()')
Logger.error(e, 'Tracker : MempoolProcessor.onTx()')
return Promise.reject(e)
}
}
@ -190,7 +190,7 @@ class MempoolProcessor extends AbstractProcessor {
let pushedTx = bitcoin.Transaction.fromHex(buf.toString())
const txid = pushedTx.getId()
Logger.info(`Processing tx for pushtx ${txid}`)
Logger.info(`Tracker : Processing tx for pushtx ${txid}`)
if (!TransactionsBundle.cache.has(txid)) {
// Process the transaction
@ -201,7 +201,7 @@ class MempoolProcessor extends AbstractProcessor {
this.notifyTx(txCheck.tx)
}
} catch (e) {
Logger.error(e, 'MempoolProcessor.onPushTx()')
Logger.error(e, 'Tracker : MempoolProcessor.onPushTx()')
return Promise.reject(e)
}
}
@ -213,7 +213,7 @@ class MempoolProcessor extends AbstractProcessor {
async checkUnconfirmed() {
const t0 = Date.now()
Logger.info('Processing unconfirmed transactions')
Logger.info('Tracker : Processing unconfirmed transactions')
const unconfirmedTxs = await db.getUnconfirmedTransactions()
@ -226,7 +226,7 @@ class MempoolProcessor extends AbstractProcessor {
// Transaction is confirmed
const block = await db.getBlockByHash(rtx.blockhash)
if (block && block.blockID) {
Logger.info(`Marking TXID ${tx.txnTxid} confirmed`)
Logger.info(`Tracker : Marking TXID ${tx.txnTxid} confirmed`)
return db.confirmTransactions([tx.txnTxid], block.blockID)
}
},
@ -238,7 +238,7 @@ class MempoolProcessor extends AbstractProcessor {
}
)
} catch(e) {
Logger.error(e, 'MempoolProcessor.checkUnconfirmed()')
Logger.error(e, 'Tracker : MempoolProcessor.checkUnconfirmed()')
}
})
}
@ -247,7 +247,7 @@ class MempoolProcessor extends AbstractProcessor {
const ntx = unconfirmedTxs.length
const dt = ((Date.now() - t0) / 1000).toFixed(1)
const per = (ntx == 0) ? 0 : ((Date.now() - t0) / ntx).toFixed(0)
Logger.info(` Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`)
Logger.info(`Tracker : Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`)
}
/**
@ -273,7 +273,7 @@ class MempoolProcessor extends AbstractProcessor {
* Log mempool statistics
*/
displayMempoolStats() {
Logger.info(`Mempool Size: ${this.mempoolBuffer.size()}`)
Logger.info(`Tracker : Mempool Size: ${this.mempoolBuffer.size()}`)
}
}

10
tracker/transaction.js

@ -63,7 +63,7 @@ class Transaction {
}
} catch(e) {
Logger.error(e, 'Transaction.checkTransaction()')
Logger.error(e, 'Tracker : Transaction.checkTransaction()')
return Promise.reject(e)
}
}
@ -122,7 +122,7 @@ class Transaction {
// Detect potential double spends
if (r.spendingTxnID !== null && r.spendingTxnID != this.storedTxnID) {
Logger.info(`DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`)
Logger.info(`Tracker : DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`)
// Delete the existing transaction that has been double-spent:
// since the deepest block keeps its transactions, this will
// eventually work itself out, and the wallet will not show
@ -366,7 +366,7 @@ class Transaction {
const derived = await hdaHelper.deriveAddresses(xpub, chain, indices, hdType)
Array.prototype.push.apply(newAddresses, derived)
Logger.info(`Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`)
Logger.info(`Tracker : Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`)
// Update view of derived address indices
derivedIndices[chain] = chainMaxUsedIndex + gapLimit[chain]
@ -374,7 +374,7 @@ class Transaction {
// Check derived addresses for use in this transaction
for (let d of derived) {
if (indexedOutputs[d.address]) {
Logger.info(`Derived address already in outputs: M/${d.chain}/${d.index}`)
Logger.info(`Tracker : Derived address already in outputs: M/${d.chain}/${d.index}`)
// This transaction spends to an address
// beyond the original derived gap limit!
chainMaxUsedIndex = d.index
@ -405,7 +405,7 @@ class Transaction {
locktime: this.tx.locktime,
})
Logger.info(`Storing transaction ${this.txid}`)
Logger.info(`Tracker : Storing transaction ${this.txid}`)
}
}

Loading…
Cancel
Save