Browse Source

Merge pull request #154 from Samourai-Wallet/feat_mydojo_logs

manage all logs with docker log system
use-env-var-docker
kenshin samourai 5 years ago
committed by GitHub
parent
commit
f94b0f4dd8
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      accounts/api-helper.js
  2. 2
      accounts/fees-rest-api.js
  3. 4
      accounts/headers-rest-api.js
  4. 6
      accounts/index-cluster.js
  5. 4
      accounts/index.js
  6. 4
      accounts/multiaddr-rest-api.js
  7. 6
      accounts/notifications-server.js
  8. 34
      accounts/notifications-service.js
  9. 2
      accounts/status-rest-api.js
  10. 26
      accounts/support-rest-api.js
  11. 8
      accounts/transactions-rest-api.js
  12. 4
      accounts/unspent-rest-api.js
  13. 22
      accounts/xpub-rest-api.js
  14. 4
      doc/DOCKER_mac_setup.MD
  15. 18
      doc/DOCKER_setup.md
  16. 6
      docker/my-dojo/.env
  17. 1
      docker/my-dojo/bitcoin/restart.sh
  18. 25
      docker/my-dojo/docker-compose.yaml
  19. 85
      docker/my-dojo/dojo.sh
  20. 8
      docker/my-dojo/explorer/Dockerfile
  21. 2
      docker/my-dojo/explorer/restart.sh
  22. 6
      docker/my-dojo/nginx/Dockerfile
  23. 5
      docker/my-dojo/nginx/nginx.conf
  24. 6
      docker/my-dojo/node/Dockerfile
  25. 14
      docker/my-dojo/node/restart.sh
  26. 5
      docker/my-dojo/overrides/bitcoind.install.yaml
  27. 10
      docker/my-dojo/overrides/explorer.install.yaml
  28. 5
      docker/my-dojo/overrides/indexer.install.yaml
  29. 5
      docker/my-dojo/overrides/whirlpool.install.yaml
  30. 1
      docker/my-dojo/whirlpool/restart.sh
  31. 2
      lib/auth/authentication-manager.js
  32. 6
      lib/auth/localapikey-strategy-configurator.js
  33. 2
      lib/bitcoin/addresses-helper.js
  34. 6
      lib/bitcoin/hd-accounts-helper.js
  35. 10
      lib/bitcoin/hd-accounts-service.js
  36. 2
      lib/bitcoind-rpc/fees.js
  37. 4
      lib/bitcoind-rpc/latest-block.js
  38. 2
      lib/bitcoind-rpc/rpc-client.js
  39. 6
      lib/bitcoind-rpc/transactions.js
  40. 24
      lib/db/mysql-db-wrapper.js
  41. 6
      lib/http-server/http-server.js
  42. 2
      lib/indexer-rpc/rpc-client.js
  43. 9
      lib/logger.js
  44. 4
      lib/remote-importer/bitcoind-wrapper.js
  45. 2
      lib/remote-importer/esplora-wrapper.js
  46. 4
      lib/remote-importer/local-indexer-wrapper.js
  47. 4
      lib/remote-importer/oxt-wrapper.js
  48. 28
      lib/remote-importer/remote-importer.js
  49. 6
      lib/remote-importer/sources-mainnet.js
  50. 6
      lib/remote-importer/sources-testnet.js
  51. 4
      lib/remote-importer/sources.js
  52. 4
      pushtx/index-orchestrator.js
  53. 4
      pushtx/index.js
  54. 22
      pushtx/orchestrator.js
  55. 6
      pushtx/pushtx-processor.js
  56. 8
      pushtx/pushtx-rest-api.js
  57. 2
      pushtx/status.js
  58. 4
      pushtx/transactions-scheduler.js
  59. 12
      tracker/block.js
  60. 44
      tracker/blockchain-processor.js
  61. 4
      tracker/index.js
  62. 30
      tracker/mempool-processor.js
  63. 10
      tracker/transaction.js

2
accounts/api-helper.js

@ -137,7 +137,7 @@ class ApiHelper {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
params, params,
`ApiHelper.validateEntitiesParams() : Invalid arguments` `API : ApiHelper.validateEntitiesParams() : Invalid arguments`
) )
} }
} }

2
accounts/fees-rest-api.js

@ -46,7 +46,7 @@ class FeesRestApi {
} catch (e) { } catch (e) {
HttpServer.sendError(res, e) HttpServer.sendError(res, e)
} finally { } finally {
debugApi && Logger.info(`Completed GET /fees`) debugApi && Logger.info(`API : Completed GET /fees`)
} }
} }

4
accounts/headers-rest-api.js

@ -49,7 +49,7 @@ class HeadersRestApi {
} catch(e) { } catch(e) {
HttpServer.sendError(res, e) HttpServer.sendError(res, e)
} finally { } finally {
debugApi && Logger.info(`Completed GET /header/${req.params.hash}`) debugApi && Logger.info(`API : Completed GET /header/${req.params.hash}`)
} }
} }
@ -66,7 +66,7 @@ class HeadersRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.params.hash, req.params.hash,
'HeadersRestApi.validateArgsGetHeader() : Invalid hash' 'API : HeadersRestApi.validateArgsGetHeader() : Invalid hash'
) )
} else { } else {
next() next()

6
accounts/index-cluster.js

@ -20,15 +20,15 @@ if (cluster.isMaster) {
}) })
cluster.on('listening', function(worker) { cluster.on('listening', function(worker) {
Logger.info(`Cluster ${worker.process.pid} connected`) Logger.info(`API : Cluster ${worker.process.pid} connected`)
}) })
cluster.on('disconnect', function(worker) { cluster.on('disconnect', function(worker) {
Logger.info(`Cluster ${worker.process.pid} disconnected`) Logger.info(`API : Cluster ${worker.process.pid} disconnected`)
}) })
cluster.on('exit', function(worker) { cluster.on('exit', function(worker) {
Logger.info(`Cluster ${worker.process.pid} is dead`) Logger.info(`API : Cluster ${worker.process.pid} is dead`)
// Ensuring a new cluster will start if an old one dies // Ensuring a new cluster will start if an old one dies
cluster.fork() cluster.fork()
}) })

4
accounts/index.js

@ -28,8 +28,8 @@
/** /**
* Samourai REST API * Samourai REST API
*/ */
Logger.info('Process ID: ' + process.pid) Logger.info('API : Process ID: ' + process.pid)
Logger.info('Preparing the REST API') Logger.info('API : Preparing the REST API')
// Wait for Bitcoind RPC API // Wait for Bitcoind RPC API
// being ready to process requests // being ready to process requests

4
accounts/multiaddr-rest-api.js

@ -85,7 +85,7 @@ class MultiaddrRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \ ${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}` ${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /multiaddr ${strParams}`) Logger.info(`API : Completed GET /multiaddr ${strParams}`)
} }
} }
} }
@ -126,7 +126,7 @@ class MultiaddrRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \ ${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}` ${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /multiaddr ${strParams}`) Logger.info(`API : Completed POST /multiaddr ${strParams}`)
} }
} }
} }

6
accounts/notifications-server.js

@ -61,7 +61,7 @@ class NotificationsServer {
const header = JSON.parse(message.toString()) const header = JSON.parse(message.toString())
this.notifService.notifyBlock(header) this.notifService.notifyBlock(header)
} catch(e) { } catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in block message') Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in block message')
} }
break break
case 'transaction': case 'transaction':
@ -69,11 +69,11 @@ class NotificationsServer {
const tx = JSON.parse(message.toString()) const tx = JSON.parse(message.toString())
this.notifService.notifyTransaction(tx) this.notifService.notifyTransaction(tx)
} catch(e) { } catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in transaction message') Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in transaction message')
} }
break break
default: default:
Logger.info(`Unknown ZMQ message topic: "${topic}"`) Logger.info(`API : Unknown ZMQ message topic: "${topic}"`)
} }
}) })
} }

34
accounts/notifications-service.js

@ -59,7 +59,7 @@ class NotificationsService {
_initWSServer(server) { _initWSServer(server) {
this.ws = new WebSocket.server({httpServer: server}) this.ws = new WebSocket.server({httpServer: server})
Logger.info('Created WebSocket server') Logger.info('API : Created WebSocket server')
this.ws.on('request', req => { this.ws.on('request', req => {
try { try {
@ -67,14 +67,14 @@ class NotificationsService {
conn.id = status.sessions++ conn.id = status.sessions++
conn.subs = [] conn.subs = []
debug && Logger.info(`Client ${conn.id} connected`) debug && Logger.info(`API : Client ${conn.id} connected`)
conn.on('close', () => { conn.on('close', () => {
this._closeWSConnection(conn, false) this._closeWSConnection(conn, false)
}) })
conn.on('error', err => { conn.on('error', err => {
Logger.error(err, `NotificationsService : Error on connection ${conn.id}`) Logger.error(err, `API : NotificationsService : Error on connection ${conn.id}`)
if (conn.connected) if (conn.connected)
this._closeWSConnection(conn, true) this._closeWSConnection(conn, true)
}) })
@ -91,7 +91,7 @@ class NotificationsService {
status.maxConn = Math.max(status.maxConn, Object.keys(this.conn).length) status.maxConn = Math.max(status.maxConn, Object.keys(this.conn).length)
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService._initWSServer() : Error during request accept`) Logger.error(e, `API : NotificationsService._initWSServer() : Error during request accept`)
} }
}) })
} }
@ -120,10 +120,10 @@ class NotificationsService {
if (forcedClose && conn.connected) if (forcedClose && conn.connected)
conn.drop(1008, 'Get out of here!') conn.drop(1008, 'Get out of here!')
debug && Logger.info(`Client ${conn.id} disconnected`) debug && Logger.info(`API : Client ${conn.id} disconnected`)
} catch(e) { } catch(e) {
Logger.error(e, 'NotificationsService._closeWSConnection()') Logger.error(e, 'API : NotificationsService._closeWSConnection()')
} }
} }
@ -134,7 +134,7 @@ class NotificationsService {
*/ */
_filterWSMessage(msg) { _filterWSMessage(msg) {
if (this.cacheSubs.has(msg)) { if (this.cacheSubs.has(msg)) {
debug && Logger.info('Duplicate subscriptions detected') debug && Logger.info('API : Duplicate subscriptions detected')
return false return false
} else { } else {
this.cacheSubs.set(msg, true) this.cacheSubs.set(msg, true)
@ -150,7 +150,7 @@ class NotificationsService {
*/ */
_handleWSMessage(msg, conn) { _handleWSMessage(msg, conn) {
try { try {
debug && Logger.info(`Received from client ${conn.id}: ${msg}`) debug && Logger.info(`API : Received from client ${conn.id}: ${msg}`)
const data = JSON.parse(msg) const data = JSON.parse(msg)
@ -183,7 +183,7 @@ class NotificationsService {
break break
} }
} catch(e) { } catch(e) {
Logger.error(e, 'NotificationsService._handleWSMessage() : WebSocket message error') Logger.error(e, 'API : NotificationsService._handleWSMessage() : WebSocket message error')
} }
} }
@ -223,7 +223,7 @@ class NotificationsService {
this.subs[topic].push(conn.id) this.subs[topic].push(conn.id)
debug && Logger.info(`Client ${conn.id} subscribed to ${topic}`) debug && Logger.info(`API : Client ${conn.id} subscribed to ${topic}`)
} }
/** /**
@ -267,7 +267,7 @@ class NotificationsService {
try { try {
this.conn[cid].sendUTF(msg) this.conn[cid].sendUTF(msg)
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`) Logger.error(e, `API : NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
} }
} }
} }
@ -284,7 +284,7 @@ class NotificationsService {
} }
this.dispatch('block', JSON.stringify(data)) this.dispatch('block', JSON.stringify(data))
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService.notifyBlock()`) Logger.error(e, `API : NotificationsService.notifyBlock()`)
} }
} }
@ -440,14 +440,14 @@ class NotificationsService {
try { try {
this.conn[cid].sendUTF(JSON.stringify(data)) this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent ctx ${ctx.hash} to client ${cid}`) debug && Logger.error(`API : Sent ctx ${ctx.hash} to client ${cid}`)
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`) Logger.error(e, `API : NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
} }
} }
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction()`) Logger.error(e, `API : NotificationsService.notifyTransaction()`)
} }
} }
@ -464,9 +464,9 @@ class NotificationsService {
try { try {
this.conn[cid].sendUTF(JSON.stringify(data)) this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent authentication error to client ${cid}`) debug && Logger.error(`API : Sent authentication error to client ${cid}`)
} catch(e) { } catch(e) {
Logger.error(e, `NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`) Logger.error(e, `API : NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
} }
} }

2
accounts/status-rest-api.js

@ -47,7 +47,7 @@ class StatusRestApi {
} catch(e) { } catch(e) {
HttpServer.sendError(res, e) HttpServer.sendError(res, e)
} finally { } finally {
debugApi && Logger.info(`Completed GET /status`) debugApi && Logger.info(`API : Completed GET /status`)
} }
} }

26
accounts/support-rest-api.js

@ -108,7 +108,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN) HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/info`) debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/info`)
} }
} }
@ -175,7 +175,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN) HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/rescan`) debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/rescan`)
} }
} }
@ -212,7 +212,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN) HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/info`) debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/info`)
} }
} }
@ -270,7 +270,7 @@ class SupportRestApi {
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2)) HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} else { } else {
ret.status = 'Rescan Error' ret.status = 'Rescan Error'
Logger.error(e, 'SupportRestApi.getXpubRescan() : Support rescan error') Logger.error(e, 'API : SupportRestApi.getXpubRescan() : Support rescan error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2)) HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} }
} }
@ -279,7 +279,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN) HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/rescan`) debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/rescan`)
} }
} }
@ -300,10 +300,10 @@ class SupportRestApi {
const ret = { const ret = {
status: 'error' status: 'error'
} }
Logger.error(e, 'SupportRestApi.getPairing() : Support pairing error') Logger.error(e, 'API : SupportRestApi.getPairing() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2)) HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally { } finally {
debugApi && Logger.info(`Completed GET /pairing`) debugApi && Logger.info(`API : Completed GET /pairing`)
} }
} }
@ -318,7 +318,7 @@ class SupportRestApi {
url = fs.readFileSync('/var/lib/tor/hsv3explorer/hostname', 'utf8') url = fs.readFileSync('/var/lib/tor/hsv3explorer/hostname', 'utf8')
url = url.replace('\n', '') url = url.replace('\n', '')
} catch(e) { } catch(e) {
Logger.error(e, 'SupportRestApi.getPairing() : Cannot read explorer onion address') Logger.error(e, 'API : SupportRestApi.getPairing() : Cannot read explorer onion address')
} }
} }
const ret = { const ret = {
@ -333,10 +333,10 @@ class SupportRestApi {
const ret = { const ret = {
status: 'error' status: 'error'
} }
Logger.error(e, 'SupportRestApi.getPairingExplorer() : Support pairing error') Logger.error(e, 'API : SupportRestApi.getPairingExplorer() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2)) HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally { } finally {
debugApi && Logger.info(`Completed GET /pairing/explorer`) debugApi && Logger.info(`API : Completed GET /pairing/explorer`)
} }
} }
@ -351,7 +351,7 @@ class SupportRestApi {
if (!isValidXpub) { if (!isValidXpub) {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`) Logger.error(null, `API : SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
} else { } else {
next() next()
} }
@ -369,7 +369,7 @@ class SupportRestApi {
if (!(isValidXpub && isValidGap)) { if (!(isValidXpub && isValidGap)) {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, 'SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments') Logger.error(null, 'API : SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
} else { } else {
next() next()
} }
@ -386,7 +386,7 @@ class SupportRestApi {
if (!isValidAddress) { if (!isValidAddress) {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`) Logger.error(null, `API : SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
} else { } else {
next() next()
} }

8
accounts/transactions-rest-api.js

@ -63,7 +63,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, e) HttpServer.sendError(res, e)
} finally { } finally {
const strParams = `${req.query.fees ? req.query.fees : ''}` const strParams = `${req.query.fees ? req.query.fees : ''}`
debugApi && Logger.info(`Completed GET /tx/${req.params.txid} ${strParams}`) debugApi && Logger.info(`API : Completed GET /tx/${req.params.txid} ${strParams}`)
} }
} }
@ -97,7 +97,7 @@ class TransactionsRestApi {
${req.query.page ? req.query.page : ''} \ ${req.query.page ? req.query.page : ''} \
${req.query.count ? req.query.count : ''}` ${req.query.count ? req.query.count : ''}`
debugApi && Logger.info(`Completed GET /txs ${strParams}`) debugApi && Logger.info(`API : Completed GET /txs ${strParams}`)
} }
} }
@ -118,7 +118,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.params, req.params,
'HeadersRestApi.validateArgsGetTransaction() : Invalid arguments' 'API : HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
) )
Logger.error(req.query, '') Logger.error(req.query, '')
} else { } else {
@ -145,7 +145,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.query, req.query,
'HeadersRestApi.validateArgsGetTransactions() : Invalid arguments' 'API : HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
) )
} else { } else {
next() next()

4
accounts/unspent-rest-api.js

@ -85,7 +85,7 @@ class UnspentRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \ ${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}` ${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /unspent ${strParams}`) Logger.info(`API : Completed GET /unspent ${strParams}`)
} }
} }
} }
@ -126,7 +126,7 @@ class UnspentRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \ ${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}` ${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /unspent ${strParams}`) Logger.info(`API : Completed POST /unspent ${strParams}`)
} }
} }
} }

22
accounts/xpub-rest-api.js

@ -151,7 +151,7 @@ class XPubRestApi {
return HttpServer.sendError(res, errors.generic.GEN) return HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed POST /xpub ${req.body.xpub}`) debugApi && Logger.info(`API : Completed POST /xpub ${req.body.xpub}`)
} }
} }
@ -194,11 +194,11 @@ class XPubRestApi {
HttpServer.sendOkData(res, ret) HttpServer.sendOkData(res, ret)
} catch(e) { } catch(e) {
Logger.error(e, 'XpubRestApi.getXpub()') Logger.error(e, 'API : XpubRestApi.getXpub()')
HttpServer.sendError(res, e) HttpServer.sendError(res, e)
} finally { } finally {
debugApi && Logger.info(`Completed GET /xpub/${req.params.xpub}`) debugApi && Logger.info(`API : Completed GET /xpub/${req.params.xpub}`)
} }
} }
@ -253,7 +253,7 @@ class XPubRestApi {
} }
} finally { } finally {
debugApi && Logger.info(`Completed POST /xpub/${req.params.xpub}/lock`) debugApi && Logger.info(`API : Completed POST /xpub/${req.params.xpub}/lock`)
} }
} }
@ -303,7 +303,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.generic.GEN) HttpServer.sendError(res, errors.generic.GEN)
} finally { } finally {
debugApi && Logger.info(`Completed DELETE /xpub/${req.params.xpub}`) debugApi && Logger.info(`API : Completed DELETE /xpub/${req.params.xpub}`)
} }
} }
@ -327,8 +327,8 @@ class XPubRestApi {
xpub = hdaHelper.xlatXPUB(origXpub) xpub = hdaHelper.xlatXPUB(origXpub)
scheme = isYpub ? hdaHelper.BIP49 : hdaHelper.BIP84 scheme = isYpub ? hdaHelper.BIP49 : hdaHelper.BIP84
if (trace) { if (trace) {
Logger.info('Converted: ' + origXpub) Logger.info('API : Converted: ' + origXpub)
Logger.info('Resulting xpub: ' + xpub) Logger.info('API : Resulting xpub: ' + xpub)
} }
} }
@ -371,7 +371,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.body, req.body,
'XpubRestApi.validateArgsPostXpub() : Invalid arguments' 'API : XpubRestApi.validateArgsPostXpub() : Invalid arguments'
) )
} else { } else {
next() next()
@ -391,7 +391,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.params.xpub, req.params.xpub,
'XpubRestApi.validateArgsGetXpub() : Invalid arguments' 'API : XpubRestApi.validateArgsGetXpub() : Invalid arguments'
) )
} else { } else {
next() next()
@ -414,7 +414,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.params, req.params,
'XpubRestApi.validateArgsPostLockXpub() : Invalid arguments' 'API : XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
) )
Logger.error(req.body, '') Logger.error(req.body, '')
} else { } else {
@ -437,7 +437,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA) HttpServer.sendError(res, errors.body.INVDATA)
Logger.error( Logger.error(
req.params, req.params,
'XpubRestApi.validateArgsDeleteXpub() : Invalid arguments' 'API : XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
) )
Logger.error(req.body, '') Logger.error(req.body, '')
} else { } else {

4
doc/DOCKER_mac_setup.MD

@ -111,9 +111,9 @@ __"Download the most recent release of Dojo from Github"__ until you reach __"La
Once you Reach Step __"Launch the Installation of Your Dojo with"__ from above you will need to read and follow the instructions from [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_advanced_setups.md) Once you Reach Step __"Launch the Installation of Your Dojo with"__ from above you will need to read and follow the instructions from [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_advanced_setups.md)
Once adjustments are made to your external bitcoind bitcoin.conf __(location dependent on what device you have bitcoind)__ and docker-bitcoind.conf.tpl __(dojo_dir > docker > my-dojo > conf)__ you can proceed with Install and revert back to original instructions [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_setup.md) at section __"Launch the Installation of Your Dojo with"__ Once adjustments are made to your external bitcoind bitcoin.conf __(location dependent on what device you have bitcoind)__ and docker-bitcoind.conf.tpl __(dojo_dir > docker > my-dojo > conf)__ you can proceed with Install and revert back to original instructions [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_setup.md) at section __"Launch the Installation of Your Dojo with"__
_Note: For tracking progress, open terminal, change directory to my-dojo and run /dojo.sh logs tracker _Note: For tracking progress, open terminal, change directory to my-dojo and run /dojo.sh logs nodejs
__Some possible optimization tips:__ __Some possible optimization tips:__
If you notice that progress has stopped. Click the whale icon and select Restart. Restart Logs Tracker from step above to verify progress has resumed. If you notice that progress has stopped. Click the whale icon and select Restart. Restart logs nodejs from step above to verify progress has resumed.
This may optimize speed: open __Activity Monitor__, check the PID (Process ID) of your docker process. Open Terminal and type: This may optimize speed: open __Activity Monitor__, check the PID (Process ID) of your docker process. Open Terminal and type:

18
doc/DOCKER_setup.md

@ -182,7 +182,7 @@ Docker and Docker Compose are going to build the images and containers of your D
* Monitor the progress made for the initialization of the database with this command displaying the logs of the tracker * Monitor the progress made for the initialization of the database with this command displaying the logs of the tracker
``` ```
./dojo.sh logs tracker ./dojo.sh logs nodejs
``` ```
Exit the logs with CTRL+C when the syncing of the database has completed. Exit the logs with CTRL+C when the syncing of the database has completed.
@ -245,24 +245,22 @@ Available commands:
install Install your Dojo. install Install your Dojo.
logs [module] [options] Display the logs of your Dojo. Use CTRL+C to stop the logs. logs [module] [options] Display the logs of your dojo.
By default, the command displays the live logs. Use CTRL+C to stop the logs.
Use the -n option to display past logs.
Available modules: Available modules:
dojo.sh logs : display the logs of all containers dojo.sh logs : display the logs of all the Docker containers
dojo.sh logs bitcoind : display the logs of bitcoind dojo.sh logs bitcoind : display the logs of bitcoind
dojo.sh logs db : display the logs of the MySQL database dojo.sh logs db : display the logs of the MySQL database
dojo.sh logs tor : display the logs of tor dojo.sh logs tor : display the logs of tor
dojo.sh logs nginx : display the logs of nginx
dojo.sh logs indexer : display the logs of the internal indexer dojo.sh logs indexer : display the logs of the internal indexer
dojo.sh logs api : display the logs of the REST API (nodejs) dojo.sh logs nodejs : display the logs of NodeJS modules (API, Tracker, PushTx API, Orchestrator)
dojo.sh logs tracker : display the logs of the Tracker (nodejs)
dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)
dojo.sh logs pushtx-orchest : display the logs of the Orchestrator (nodejs)
dojo.sh logs explorer : display the logs of the Explorer dojo.sh logs explorer : display the logs of the Explorer
dojo.sh logs whirlpool : display the logs of the Whirlpool client dojo.sh logs whirlpool : display the logs of the Whirlpool client
Available options (for api, tracker, pushtx, pushtx-orchest and explorer modules): Available options:
-d [VALUE] : select the type of log to be displayed.
VALUE can be output (default) or error.
-n [VALUE] : display the last VALUE lines -n [VALUE] : display the last VALUE lines
onion Display the Tor onion addresses allowing to access the API, maintenance tool and block explorer of your Dojo. onion Display the Tor onion addresses allowing to access the API, maintenance tool and block explorer of your Dojo.

6
docker/my-dojo/.env

@ -11,13 +11,13 @@
COMPOSE_CONVERT_WINDOWS_PATHS=1 COMPOSE_CONVERT_WINDOWS_PATHS=1
DOJO_VERSION_TAG=1.6.0 DOJO_VERSION_TAG=1.6.0
DOJO_DB_VERSION_TAG=1.1.1 DOJO_DB_VERSION_TAG=1.2.0
DOJO_BITCOIND_VERSION_TAG=1.5.0 DOJO_BITCOIND_VERSION_TAG=1.6.0
DOJO_NODEJS_VERSION_TAG=1.6.0 DOJO_NODEJS_VERSION_TAG=1.6.0
DOJO_NGINX_VERSION_TAG=1.5.0 DOJO_NGINX_VERSION_TAG=1.5.0
DOJO_TOR_VERSION_TAG=1.4.0 DOJO_TOR_VERSION_TAG=1.4.0
DOJO_EXPLORER_VERSION_TAG=1.3.0 DOJO_EXPLORER_VERSION_TAG=1.3.0
DOJO_INDEXER_VERSION_TAG=1.0.0 DOJO_INDEXER_VERSION_TAG=1.1.0
DOJO_WHIRLPOOL_VERSION_TAG=1.0.0 DOJO_WHIRLPOOL_VERSION_TAG=1.0.0

1
docker/my-dojo/bitcoin/restart.sh

@ -6,6 +6,7 @@ echo "## Start bitcoind #############################"
bitcoind_options=( bitcoind_options=(
-bind=172.28.1.5 -bind=172.28.1.5
-datadir=/home/bitcoin/.bitcoin -datadir=/home/bitcoin/.bitcoin
-printtoconsole=1
-dbcache=$BITCOIND_DB_CACHE -dbcache=$BITCOIND_DB_CACHE
-disablewallet=1 -disablewallet=1
-dns=$BITCOIND_DNS -dns=$BITCOIND_DNS

25
docker/my-dojo/docker-compose.yaml

@ -15,6 +15,11 @@ services:
- "3306" - "3306"
volumes: volumes:
- data-mysql:/var/lib/mysql - data-mysql:/var/lib/mysql
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks: networks:
dojonet: dojonet:
ipv4_address: 172.28.1.1 ipv4_address: 172.28.1.1
@ -40,8 +45,12 @@ services:
- "8081" - "8081"
- "8082" - "8082"
volumes: volumes:
- data-nodejs:/data
- data-tor:/var/lib/tor - data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on: depends_on:
- db - db
networks: networks:
@ -61,8 +70,11 @@ services:
expose: expose:
- "80" - "80"
- "9080" - "9080"
volumes: logging:
- data-nginx:/data driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on: depends_on:
- node - node
networks: networks:
@ -87,6 +99,11 @@ services:
command: /restart.sh command: /restart.sh
volumes: volumes:
- data-tor:/var/lib/tor - data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks: networks:
dmznet: dmznet:
ipv4_address: 172.29.1.4 ipv4_address: 172.29.1.4
@ -115,6 +132,4 @@ networks:
volumes: volumes:
data-mysql: data-mysql:
data-nodejs:
data-nginx:
data-tor: data-tor:

85
docker/my-dojo/dojo.sh

@ -198,7 +198,7 @@ install() {
docker_up --remove-orphans docker_up --remove-orphans
# Display the logs # Display the logs
if [ $noLog -eq 1 ]; then if [ $noLog -eq 1 ]; then
logs logs "" 0
fi fi
fi fi
} }
@ -326,7 +326,7 @@ upgrade() {
update_dojo_db update_dojo_db
# Display the logs # Display the logs
if [ $noLog -eq 1 ]; then if [ $noLog -eq 1 ]; then
logs logs "" 0
fi fi
fi fi
} }
@ -381,78 +381,55 @@ whirlpool() {
} }
# Display logs # Display logs
logs_node() { display_logs() {
if [ $3 -eq 0 ]; then yamlFiles=$(select_yaml_files)
docker exec -ti nodejs tail -f /data/logs/$1-$2.log if [ $2 -eq 0 ]; then
else docker-compose $yamlFiles logs --tail=50 --follow $1
docker exec -ti nodejs tail -n $3 /data/logs/$1-$2.log
fi
}
logs_explorer() {
if [ $3 -eq 0 ]; then
docker exec -ti explorer tail -f /data/logs/$1-$2.log
else
docker exec -ti explorer tail -n $3 /data/logs/$1-$2.log
fi
}
logs_whirlpool() {
if [ $3 -eq 0 ]; then
docker exec -ti whirlpool tail -f /home/whirlpool/.whirlpool-cli/whirlpool-output.log
else else
docker exec -ti whirlpool tail -n $3 /home/whirlpool/.whirlpool-cli/whirlpool-output.log docker-compose $yamlFiles logs --tail=$2 $1
fi fi
} }
logs() { logs() {
source_file "$DIR/conf/docker-bitcoind.conf" source_file "$DIR/conf/docker-bitcoind.conf"
source_file "$DIR/conf/docker-indexer.conf" source_file "$DIR/conf/docker-indexer.conf"
source_file "$DIR/conf/docker-explorer.conf"
source_file "$DIR/conf/docker-whirlpool.conf" source_file "$DIR/conf/docker-whirlpool.conf"
source_file "$DIR/conf/docker-common.conf" source_file "$DIR/conf/docker-common.conf"
case $1 in case $1 in
db ) db | tor | nginx | node )
docker-compose logs --tail=50 --follow db display_logs $1 $2
;; ;;
bitcoind ) bitcoind )
if [ "$BITCOIND_INSTALL" == "on" ]; then if [ "$BITCOIND_INSTALL" == "on" ]; then
if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then display_logs $1 $2
bitcoindDataDir="/home/bitcoin/.bitcoin/testnet3"
else
bitcoindDataDir="/home/bitcoin/.bitcoin"
fi
docker exec -ti bitcoind tail -f "$bitcoindDataDir/debug.log"
else else
echo -e "Command not supported for your setup.\nCause: Your Dojo is using an external bitcoind" echo -e "Command not supported for your setup.\nCause: Your Dojo is using an external bitcoind"
fi fi
;; ;;
indexer ) indexer )
if [ "$INDEXER_INSTALL" == "on" ]; then if [ "$INDEXER_INSTALL" == "on" ]; then
yamlFiles=$(select_yaml_files) display_logs $1 $2
eval "docker-compose $yamlFiles logs --tail=50 --follow indexer"
else else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not using an internal indexer" echo -e "Command not supported for your setup.\nCause: Your Dojo is not running the internal indexer"
fi fi
;; ;;
tor )
docker-compose logs --tail=50 --follow tor
;;
api | pushtx | pushtx-orchest | tracker )
logs_node $1 $2 $3
;;
explorer ) explorer )
logs_explorer $1 $2 $3 if [ "$EXPLORER_INSTALL" == "on" ]; then
display_logs $1 $2
else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not running the internal block explorer"
fi
;; ;;
whirlpool ) whirlpool )
if [ "$WHIRLPOOL_INSTALL" == "on" ]; then if [ "$WHIRLPOOL_INSTALL" == "on" ]; then
logs_whirlpool $1 $2 $3 display_logs $1 $2
else else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not running a whirlpool client" echo -e "Command not supported for your setup.\nCause: Your Dojo is not running a whirlpool client"
fi fi
;; ;;
* ) * )
yamlFiles=$(select_yaml_files)
services="nginx node tor db" services="nginx node tor db"
if [ "$BITCOIND_INSTALL" == "on" ]; then if [ "$BITCOIND_INSTALL" == "on" ]; then
services="$services bitcoind" services="$services bitcoind"
@ -466,7 +443,7 @@ logs() {
if [ "$WHIRLPOOL_INSTALL" == "on" ]; then if [ "$WHIRLPOOL_INSTALL" == "on" ]; then
services="$services whirlpool" services="$services whirlpool"
fi fi
eval "docker-compose $yamlFiles logs --tail=0 --follow $services" display_logs "$services" $2
;; ;;
esac esac
} }
@ -489,24 +466,22 @@ help() {
echo " Available options:" echo " Available options:"
echo " --nolog : do not display the logs after Dojo has been laucnhed." echo " --nolog : do not display the logs after Dojo has been laucnhed."
echo " " echo " "
echo " logs [module] [options] Display the logs of your dojo. Use CTRL+C to stop the logs." echo " logs [module] [options] Display the logs of your dojo."
echo " By default, the command displays the live logs. Use CTRL+C to stop the logs."
echo " Use the -n option to display past logs."
echo " " echo " "
echo " Available modules:" echo " Available modules:"
echo " dojo.sh logs : display the logs of all the Docker containers" echo " dojo.sh logs : display the logs of all the Docker containers"
echo " dojo.sh logs bitcoind : display the logs of bitcoind" echo " dojo.sh logs bitcoind : display the logs of bitcoind"
echo " dojo.sh logs db : display the logs of the MySQL database" echo " dojo.sh logs db : display the logs of the MySQL database"
echo " dojo.sh logs tor : display the logs of tor" echo " dojo.sh logs tor : display the logs of tor"
echo " dojo.sh logs nginx : display the logs of nginx"
echo " dojo.sh logs indexer : display the logs of the internal indexer" echo " dojo.sh logs indexer : display the logs of the internal indexer"
echo " dojo.sh logs api : display the logs of the REST API (nodejs)" echo " dojo.sh logs node : display the logs of NodeJS modules (API, Tracker, PushTx API, Orchestrator)"
echo " dojo.sh logs tracker : display the logs of the Tracker (nodejs)"
echo " dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)"
echo " dojo.sh logs pushtx-orchest : display the logs of the pushTx Orchestrator (nodejs)"
echo " dojo.sh logs explorer : display the logs of the Explorer" echo " dojo.sh logs explorer : display the logs of the Explorer"
echo " dojo.sh logs whirlpool : display the logs of the Whirlpool client" echo " dojo.sh logs whirlpool : display the logs of the Whirlpool client"
echo " " echo " "
echo " Available options (only available for api, tracker, pushtx, pushtx-orchest, explorer and whirlpool modules):" echo " Available options:"
echo " -d [VALUE] : select the type of log to be displayed."
echo " VALUE can be output (default) or error."
echo " -n [VALUE] : display the last VALUE lines" echo " -n [VALUE] : display the last VALUE lines"
echo " " echo " "
echo " onion Display the Tor onion address allowing your wallet to access your dojo." echo " onion Display the Tor onion address allowing your wallet to access your dojo."
@ -580,15 +555,11 @@ case "$subcommand" in
;; ;;
logs ) logs )
module=$1; shift module=$1; shift
display="output"
numlines=0 numlines=0
# Process package options # Process package options
while getopts ":d:n:" opt; do while getopts ":n:" opt; do
case ${opt} in case ${opt} in
d )
display=$OPTARG
;;
n ) n )
numlines=$OPTARG numlines=$OPTARG
;; ;;
@ -604,7 +575,7 @@ case "$subcommand" in
done done
shift $((OPTIND -1)) shift $((OPTIND -1))
logs $module $display $numlines logs "$module" $numlines
;; ;;
onion ) onion )
onion onion

8
docker/my-dojo/explorer/Dockerfile

@ -1,24 +1,18 @@
FROM node:12-buster FROM node:12-buster
ENV LOGS_DIR /data/logs
ENV APP_DIR /home/node/app ENV APP_DIR /home/node/app
ENV EXPLORER_URL https://github.com/janoside/btc-rpc-explorer/archive ENV EXPLORER_URL https://github.com/janoside/btc-rpc-explorer/archive
ENV EXPLORER_VERSION 2.0.0 ENV EXPLORER_VERSION 2.0.0
# Install netcat # Install netcat
RUN set -ex && \ RUN set -ex && \
apt-get update && \ apt-get update && \
apt-get install -y netcat apt-get install -y netcat
# Create logs and apps directory
RUN mkdir -p "$LOGS_DIR" && \
chown -R node:node "$LOGS_DIR" && \
mkdir "$APP_DIR"
# Download the source code and install it # Download the source code and install it
RUN set -ex && \ RUN set -ex && \
mkdir "$APP_DIR" && \
wget -qO explorer.tar.gz "$EXPLORER_URL/v$EXPLORER_VERSION.tar.gz" && \ wget -qO explorer.tar.gz "$EXPLORER_URL/v$EXPLORER_VERSION.tar.gz" && \
tar -xzvf explorer.tar.gz -C "$APP_DIR/" --strip-components 1 && \ tar -xzvf explorer.tar.gz -C "$APP_DIR/" --strip-components 1 && \
rm explorer.tar.gz && \ rm explorer.tar.gz && \

2
docker/my-dojo/explorer/restart.sh

@ -42,4 +42,4 @@ if [ "$NODE_ACTIVE_INDEXER" == "local_indexer" ]; then
fi fi
fi fi
node ./bin/cli.js "${explorer_options[@]}" > /data/logs/explorer-error.log 2> /data/logs/explorer-output.log node ./bin/cli.js "${explorer_options[@]}"

6
docker/my-dojo/nginx/Dockerfile

@ -1,11 +1,5 @@
FROM nginx:1.15.10-alpine FROM nginx:1.15.10-alpine
# Create data directory
ENV LOGS_DIR /data/logs
RUN mkdir -p "$LOGS_DIR" && \
chown -R nginx:nginx "$LOGS_DIR"
# Copy configuration files # Copy configuration files
COPY ./nginx.conf /etc/nginx/nginx.conf COPY ./nginx.conf /etc/nginx/nginx.conf
COPY ./dojo.conf /etc/nginx/sites-enabled/dojo.conf COPY ./dojo.conf /etc/nginx/sites-enabled/dojo.conf

5
docker/my-dojo/nginx/nginx.conf

@ -2,8 +2,9 @@ user nginx;
worker_processes auto; worker_processes auto;
daemon off; daemon off;
# Log critical errors and higher # Log critical errors and higher to stderr
error_log /data/logs/error.log crit; # (see https://github.com/nginxinc/docker-nginx/blob/594ce7a8bc26c85af88495ac94d5cd0096b306f7/mainline/alpine/Dockerfile#L104)
error_log /var/log/nginx/error.log crit;
pid /var/run/nginx.pid; pid /var/run/nginx.pid;

6
docker/my-dojo/node/Dockerfile

@ -1,9 +1,7 @@
FROM node:12-buster FROM node:12-buster
ENV LOGS_DIR /data/logs
ENV APP_DIR /home/node/app ENV APP_DIR /home/node/app
# Add node user to tor group # Add node user to tor group
RUN addgroup --system -gid 1107 tor && \ RUN addgroup --system -gid 1107 tor && \
usermod -a -G tor node usermod -a -G tor node
@ -11,10 +9,6 @@ RUN addgroup --system -gid 1107 tor && \
# Install forever # Install forever
RUN npm install -g forever RUN npm install -g forever
# Create data directory
RUN mkdir -p "$LOGS_DIR" && \
chown -R node:node "$LOGS_DIR"
# Create app directory # Create app directory
RUN mkdir "$APP_DIR" && \ RUN mkdir "$APP_DIR" && \
chown -R node:node "$APP_DIR" chown -R node:node "$APP_DIR"

14
docker/my-dojo/node/restart.sh

@ -1,13 +1,17 @@
#!/bin/bash #!/bin/bash
cd /home/node/app/accounts cd /home/node/app/accounts
forever start -a -l /dev/null -o /data/logs/api-output.log -e /data/logs/api-error.log index.js "$COMMON_BTC_NETWORK" forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
cd /home/node/app/pushtx cd /home/node/app/pushtx
forever start -a -l /dev/null -o /data/logs/pushtx-output.log -e /data/logs/pushtx-error.log index.js "$COMMON_BTC_NETWORK" forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/null -o /data/logs/pushtx-orchest-output.log -e /data/logs/pushtx-orchest-error.log index-orchestrator.js "$COMMON_BTC_NETWORK" forever start -a -l /dev/stdout -o /dev/null -e /dev/null index-orchestrator.js "$COMMON_BTC_NETWORK"
cd /home/node/app/tracker cd /home/node/app/tracker
forever start -a -l /dev/null -o /data/logs/tracker-output.log -e /data/logs/tracker-error.log index.js "$COMMON_BTC_NETWORK" forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever --fifo logs 0 # Keep the container up
while true
do
sleep 1
done

5
docker/my-dojo/overrides/bitcoind.install.yaml

@ -20,6 +20,11 @@ services:
volumes: volumes:
- data-bitcoind:/home/bitcoin/.bitcoin - data-bitcoind:/home/bitcoin/.bitcoin
- data-tor:/var/lib/tor - data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on: depends_on:
- db - db
- tor - tor

10
docker/my-dojo/overrides/explorer.install.yaml

@ -16,8 +16,11 @@ services:
command: "/home/node/app/restart.sh" command: "/home/node/app/restart.sh"
expose: expose:
- "3002" - "3002"
volumes: logging:
- data-explorer:/data/logs driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks: networks:
dojonet: dojonet:
ipv4_address: 172.28.1.7 ipv4_address: 172.28.1.7
@ -25,6 +28,3 @@ services:
node: node:
depends_on: depends_on:
- explorer - explorer
volumes:
data-explorer:

5
docker/my-dojo/overrides/indexer.install.yaml

@ -17,6 +17,11 @@ services:
- "50001" - "50001"
volumes: volumes:
- data-indexer:/home/indexer - data-indexer:/home/indexer
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on: depends_on:
- tor - tor
networks: networks:

5
docker/my-dojo/overrides/whirlpool.install.yaml

@ -16,6 +16,11 @@ services:
- "8898" - "8898"
volumes: volumes:
- data-whirlpool:/home/whirlpool - data-whirlpool:/home/whirlpool
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks: networks:
whirlnet: whirlnet:
ipv4_address: 172.30.1.8 ipv4_address: 172.30.1.8

1
docker/my-dojo/whirlpool/restart.sh

@ -12,7 +12,6 @@ whirlpool_options=(
--cli.torConfig.coordinator.onion=true --cli.torConfig.coordinator.onion=true
--cli.torConfig.backend.enabled=false --cli.torConfig.backend.enabled=false
--cli.torConfig.backend.onion=false --cli.torConfig.backend.onion=false
--logging.file="/home/whirlpool/.whirlpool-cli/whirlpool-output.log"
) )
if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then

2
lib/auth/authentication-manager.js

@ -40,7 +40,7 @@ class AuthenticationManager {
if (Configurator) { if (Configurator) {
this.activeStrategy = new Configurator() this.activeStrategy = new Configurator()
this.activeStrategy.configure() this.activeStrategy.configure()
Logger.info(`Authentication strategy ${this.activeStrategyName} successfully configured`) Logger.info(`Auth : Authentication strategy ${this.activeStrategyName} successfully configured`)
} }
} catch(e) { } catch(e) {

6
lib/auth/localapikey-strategy-configurator.js

@ -43,14 +43,14 @@ class LocalApiKeyStrategyConfigurator {
if (apiKey == _adminKey) { if (apiKey == _adminKey) {
// Check if received key is a valid api key // Check if received key is a valid api key
Logger.info('Successful authentication with an admin key') Logger.info('Auth : Successful authentication with an admin key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_ADMIN}) return done(null, {'profile': authorzMgr.TOKEN_PROFILE_ADMIN})
} else if (_apiKeys.indexOf(apiKey) >= 0) { } else if (_apiKeys.indexOf(apiKey) >= 0) {
// Check if received key is a valid api key // Check if received key is a valid api key
Logger.info('Successful authentication with an api key') Logger.info('Auth : Successful authentication with an api key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_API}) return done(null, {'profile': authorzMgr.TOKEN_PROFILE_API})
} else { } else {
Logger.error(null, `Authentication failure (apikey=${apiKey})`) Logger.error(null, `Auth : Authentication failure (apikey=${apiKey})`)
return done('Invalid API key', false) return done('Invalid API key', false)
} }
} }

2
lib/bitcoin/addresses-helper.js

@ -101,7 +101,7 @@ class AddressesHelper {
try { try {
return bitcoin.address.fromBech32(str).data.toString('hex') return bitcoin.address.fromBech32(str).data.toString('hex')
} catch(e) { } catch(e) {
Logger.error(e, 'AddressesHelper.getScriptHashFromBech32()') Logger.error(e, 'AddressesHelper : getScriptHashFromBech32()')
return null return null
} }
} }

6
lib/bitcoin/hd-accounts-helper.js

@ -123,7 +123,7 @@ class HDAccountsHelper {
&& ver != this.MAGIC_ZPUB && ver != this.MAGIC_ZPUB
&& ver != this.MAGIC_VPUB && ver != this.MAGIC_VPUB
) { ) {
//Logger.error(null, 'HdAccountsHelper.xlatXPUB() : Incorrect format') //Logger.error(null, 'HdAccountsHelper : xlatXPUB() : Incorrect format')
return '' return ''
} }
@ -379,12 +379,12 @@ class HDAccountsHelper {
if (msg.status = 'ok') { if (msg.status = 'ok') {
resolve(msg.addresses) resolve(msg.addresses)
} else { } else {
Logger.error(null, 'A problem was met during parallel addresses derivation') Logger.error(null, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject() reject()
} }
} catch(e) { } catch(e) {
Logger.error(e, 'A problem was met during parallel addresses derivation') Logger.error(e, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject(e) reject(e)
} }
}) })

10
lib/bitcoin/hd-accounts-service.js

@ -40,7 +40,7 @@ class HDAccountsService {
const isInvalidXpub = (e == errors.xpub.INVALID || e == errors.xpub.PRIVKEY) const isInvalidXpub = (e == errors.xpub.INVALID || e == errors.xpub.PRIVKEY)
const isLockedXpub = (e == errors.xpub.LOCKED) const isLockedXpub = (e == errors.xpub.LOCKED)
const err = (isInvalidXpub || isLockedXpub) ? e : errors.xpub.CREATE const err = (isInvalidXpub || isLockedXpub) ? e : errors.xpub.CREATE
Logger.error(e, 'HdAccountsService.createHdAccount()' + err) Logger.error(e, 'HdAccountsService : createHdAccount()' + err)
return Promise.reject(err) return Promise.reject(err)
} }
} }
@ -139,7 +139,7 @@ class HDAccountsService {
else if (scheme == hdaHelper.BIP84) else if (scheme == hdaHelper.BIP84)
segwit = ' SegWit (BIP84)' segwit = ' SegWit (BIP84)'
Logger.info(`Created HD Account: ${xpub}${segwit}`) Logger.info(`HdAccountsService : Created HD Account: ${xpub}${segwit}`)
const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme) const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme)
const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme) const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme)
@ -196,15 +196,15 @@ class HDAccountsService {
// check for a derivation scheme mismatch // check for a derivation scheme mismatch
if (info.type != scheme) { if (info.type != scheme) {
if (info.locked && !forceOverride) { if (info.locked && !forceOverride) {
Logger.info(`Attempted override on locked account: ${xpub}`) Logger.info(`HdAccountsService : Attempted override on locked account: ${xpub}`)
return Promise.reject(errors.xpub.LOCKED) return Promise.reject(errors.xpub.LOCKED)
} else { } else {
Logger.info(`Derivation scheme override: ${xpub}`) Logger.info(`HdAccountsService : Derivation scheme override: ${xpub}`)
return db.deleteHDAccount(xpub) return db.deleteHDAccount(xpub)
} }
} }
} catch(e) { } catch(e) {
Logger.error(e, 'HDAccountsService.derivationOverrideCheck()') Logger.error(e, 'HDAccountsService : derivationOverrideCheck()')
return Promise.reject(e) return Promise.reject(e)
} }
} }

2
lib/bitcoind-rpc/fees.js

@ -58,7 +58,7 @@ class Fees {
const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType) const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType)
this.fees[tgt] = Math.round(level.feerate * 1e5) this.fees[tgt] = Math.round(level.feerate * 1e5)
} catch(e) { } catch(e) {
Logger.error(e, 'Fees.refresh()') Logger.error(e, 'Bitcoind RPC : Fees.refresh()')
delete this.fees[tgt] delete this.fees[tgt]
} }
}) })

4
lib/bitcoind-rpc/latest-block.js

@ -43,7 +43,7 @@ class LatestBlock {
this.onBlockHash(msg.toString('hex')) this.onBlockHash(msg.toString('hex'))
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Bitcoind RPC : ${topic.toString()}`)
} }
}) })
} }
@ -61,7 +61,7 @@ class LatestBlock {
this.time = header.mediantime this.time = header.mediantime
this.diff = header.difficulty this.diff = header.difficulty
Logger.info(`Block ${this.height} ${this.hash}`) Logger.info(`Bitcoind RPC : Block ${this.height} ${this.hash}`)
} }
} }

2
lib/bitcoind-rpc/rpc-client.js

@ -78,7 +78,7 @@ class RpcClient {
await client.getblockchaininfo() await client.getblockchaininfo()
} catch(e) { } catch(e) {
client = null client = null
Logger.info('Bitcoind RPC API is still unreachable. New attempt in 20s.') Logger.info('Bitcoind RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => { return util.delay(20000).then(() => {
return RpcClient.waitForBitcoindRpcApi() return RpcClient.waitForBitcoindRpcApi()
}) })

6
lib/bitcoind-rpc/transactions.js

@ -65,7 +65,7 @@ class Transactions {
return await util.seriesCall(txs, async tx => { return await util.seriesCall(txs, async tx => {
if (tx.result == null) { if (tx.result == null) {
Logger.info(` got null for ${txids[tx.id]}`) Logger.info(`Bitcoind RPC : got null for ${txids[tx.id]}`)
return null return null
} else { } else {
return this._prepareTxResult(tx.result, fees) return this._prepareTxResult(tx.result, fees)
@ -73,7 +73,7 @@ class Transactions {
}) })
} catch(e) { } catch(e) {
Logger.error(e, 'Transaction.getTransactions()') Logger.error(e, 'Bitcoind RPC : Transaction.getTransactions()')
return Promise.reject(errors.generic.GEN) return Promise.reject(errors.generic.GEN)
} }
} }
@ -99,7 +99,7 @@ class Transactions {
this.txCache.set(txid, ret) this.txCache.set(txid, ret)
return ret return ret
} catch(e) { } catch(e) {
Logger.error(e, 'Transaction.getTransaction()') Logger.error(e, 'Bitcoind RPC : Transaction.getTransaction()')
return Promise.reject(errors.generic.GEN) return Promise.reject(errors.generic.GEN)
} }
} }

24
lib/db/mysql-db-wrapper.js

@ -312,21 +312,21 @@ class MySqlDbWrapper {
handleConnect() { handleConnect() {
try { try {
this.pool = mysql.createPool(this.dbConfig) this.pool = mysql.createPool(this.dbConfig)
Logger.info(`Created a database pool of ${this.dbConfig.connectionLimit} connections`) Logger.info(`Db Wrapper : Created a database pool of ${this.dbConfig.connectionLimit} connections`)
if (debug) { if (debug) {
this.pool.on('acquire', function (conn) { this.pool.on('acquire', function (conn) {
Logger.info(`Connection ${conn.threadId} acquired`) Logger.info(`Db Wrapper : Connection ${conn.threadId} acquired`)
}) })
this.pool.on('enqueue', function (conn) { this.pool.on('enqueue', function (conn) {
Logger.info('Waiting for a new connection slot') Logger.info('Db Wrapper : Waiting for a new connection slot')
}) })
this.pool.on('release', function (conn) { this.pool.on('release', function (conn) {
Logger.info(`Connection ${conn.threadId} released`) Logger.info(`Db Wrapper : Connection ${conn.threadId} released`)
}) })
} }
} catch(e) { } catch(e) {
Logger.error(err, 'MySqlDbWrapper.handleConnect() : Problem met while trying to initialize a new pool') Logger.error(err, 'Db Wrapper : handleConnect() : Problem met while trying to initialize a new pool')
throw e throw e
} }
} }
@ -348,7 +348,7 @@ class MySqlDbWrapper {
// Destroy previous pool // Destroy previous pool
this.pool.end(err => { this.pool.end(err => {
if (err) { if (err) {
Logger.error(err, 'MySqlDbWrapper.handleReconnect() : Problem met while terminating the pool') Logger.error(err, 'Db Wrapper : handleReconnect() : Problem met while terminating the pool')
this.timerReconnect = setTimeout(this.handleReconnect.bind(this), 2000) this.timerReconnect = setTimeout(this.handleReconnect.bind(this), 2000)
} else { } else {
this.handleConnect() this.handleConnect()
@ -362,14 +362,14 @@ class MySqlDbWrapper {
* Ping the mysql server * Ping the mysql server
*/ */
ping() { ping() {
debug && Logger.info(`MySqlDbWrapper.ping() : ${this.pool._freeConnections.length} free connections`) debug && Logger.info(`Db Wrapper : ping() : ${this.pool._freeConnections.length} free connections`)
// Iterate over all free connections // Iterate over all free connections
// which might have been disconnected by the mysql server // which might have been disconnected by the mysql server
for (let c of this.pool._freeConnections) { for (let c of this.pool._freeConnections) {
c.query('SELECT 1', (err, res, fields) => { c.query('SELECT 1', (err, res, fields) => {
if (debug && err) { if (debug && err) {
Logger.error(err, `MySqlDbWrapper.ping() : Ping Error`) Logger.error(err, `Db Wrapper : ping() : Ping Error`)
} }
}) })
} }
@ -379,7 +379,7 @@ class MySqlDbWrapper {
* Send a query * Send a query
*/ */
async _query(query, retries) { async _query(query, retries) {
queryDebug && Logger.info(query) queryDebug && Logger.info(`Db Wrapper : ${query}`)
if (retries == null) if (retries == null)
retries = 5 retries = 5
@ -406,7 +406,7 @@ class MySqlDbWrapper {
reject(err) reject(err)
} }
} else { } else {
queryDebug && Logger.info(result) queryDebug && Logger.info(`Db Wrapper : ${result}`)
resolve(result) resolve(result)
} }
}) })
@ -421,8 +421,8 @@ class MySqlDbWrapper {
* Log a query error * Log a query error
*/ */
queryError(err, query) { queryError(err, query) {
Logger.error(err, 'MySqlDbWrapper.query() : Query Error') Logger.error(err, 'Db Wrapper : query() : Query Error')
Logger.error(query) Logger.error(null, `Db Wrapper : ${query}`)
} }
/** /**

6
lib/http-server/http-server.js

@ -50,14 +50,14 @@ class HttpServer {
// Error handler, should be final middleware // Error handler, should be final middleware
this.app.use(function(err, req, res, next) { this.app.use(function(err, req, res, next) {
if (res.headersSent) return next(err) if (res.headersSent) return next(err)
Logger.error(err.stack, 'HttpServer.start()') Logger.error(err.stack, 'HttpServer : start()')
const ret = {status: 'Server error'} const ret = {status: 'Server error'}
HttpServer.sendError(res, ret, 500) HttpServer.sendError(res, ret, 500)
}) })
// Start a http server // Start a http server
this.server = this.app.listen(this.port, this.host, () => { this.server = this.app.listen(this.port, this.host, () => {
Logger.info(`HTTP server listening on ${this.host}:${this.port}`) Logger.info(`HttpServer : Listening on ${this.host}:${this.port}`)
}) })
this.server.timeout = 600 * 1000 this.server.timeout = 600 * 1000
@ -184,7 +184,7 @@ class HttpServer {
* @param {function} next - next middleware * @param {function} next - next middleware
*/ */
static requestLogger(req, res, next) { static requestLogger(req, res, next) {
Logger.info(`${req.method} ${req.url}`) Logger.info(`HttpServer : ${req.method} ${req.url}`)
next() next()
} }

2
lib/indexer-rpc/rpc-client.js

@ -82,7 +82,7 @@ class RpcClient {
await client.sendRequest('server.version', 'dojo', ['1.0', '1.4']) await client.sendRequest('server.version', 'dojo', ['1.0', '1.4'])
} catch(e) { } catch(e) {
client = null client = null
Logger.info('Indexer RPC API is still unreachable. New attempt in 20s.') Logger.info('Indexer RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => { return util.delay(20000).then(() => {
return RpcClient.waitForIndexerRpcApi() return RpcClient.waitForIndexerRpcApi()
}) })

9
lib/logger.js

@ -18,7 +18,7 @@ class Logger {
* @param {boolean} json - true if msg is a json object, false otherwise * @param {boolean} json - true if msg is a json object, false otherwise
*/ */
static info(msg, json) { static info(msg, json) {
const logEntry = Logger._formatLog(msg, json) const logEntry = Logger._formatLog('INFO', msg, json)
console.log(logEntry) console.log(logEntry)
} }
@ -28,7 +28,7 @@ class Logger {
* @param {string} msg - message associated to the error * @param {string} msg - message associated to the error
*/ */
static error(e, msg) { static error(e, msg) {
const logEntry = Logger._formatLog(msg) const logEntry = Logger._formatLog('ERROR', msg)
console.error(logEntry) console.error(logEntry)
//const errorEntry = Logger._formatLog(e) //const errorEntry = Logger._formatLog(e)
@ -40,10 +40,11 @@ class Logger {
/** /**
* Format log entry * Format log entry
* @param {string} level - log level label
* @param {string/object} msg * @param {string/object} msg
* @param {boolean} json - true if msg is a json object, false otherwise * @param {boolean} json - true if msg is a json object, false otherwise
*/ */
static _formatLog(msg, json) { static _formatLog(level, msg, json) {
json = json || false json = json || false
const data = json ? JSON.stringify(msg, null, 2) : msg const data = json ? JSON.stringify(msg, null, 2) : msg
@ -59,7 +60,7 @@ class Logger {
const s = util.pad10(D.getUTCSeconds()) const s = util.pad10(D.getUTCSeconds())
const ms = util.pad100(D.getUTCMilliseconds()) const ms = util.pad100(D.getUTCMilliseconds())
const parts = ['[', y, m, d, ' ', h, ':', mn, ':', s, '.', ms, ' ', mib, ' MiB', '] ', data] const parts = [y, '-', m, '-', d, 'T', h, ':', mn, ':', s, 'Z ', level, ' ', data]
return parts.join('') return parts.join('')
} }
} }

4
lib/remote-importer/bitcoind-wrapper.js

@ -70,7 +70,7 @@ class BitcoindWrapper extends Wrapper {
} }
if (filterAddr && ret.ntx > keys.addrFilterThreshold) { if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`) Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return { return {
address: address, address: address,
ntx: 0, ntx: 0,
@ -116,7 +116,7 @@ class BitcoindWrapper extends Wrapper {
for (let i in aRet) { for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) { if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`) Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1) aRet.splice(i, 1)
} }
} }

2
lib/remote-importer/esplora-wrapper.js

@ -93,7 +93,7 @@ class EsploraWrapper extends Wrapper {
return ret return ret
} else if (filterAddr && ret.ntx > keys.addrFilterThreshold) { } else if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
// we have too many transactions // we have too many transactions
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`) Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
ret.txids = [] ret.txids = []
ret.ntx = 0 ret.ntx = 0
return ret return ret

4
lib/remote-importer/local-indexer-wrapper.js

@ -70,7 +70,7 @@ class LocalIndexerWrapper extends Wrapper {
} }
if (filterAddr && ret.ntx > keys.addrFilterThreshold) { if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`) Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return { return {
address: address, address: address,
ntx: 0, ntx: 0,
@ -123,7 +123,7 @@ class LocalIndexerWrapper extends Wrapper {
for (let i in aRet) { for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) { if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`) Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1) aRet.splice(i, 1)
} }
} }

4
lib/remote-importer/oxt-wrapper.js

@ -64,7 +64,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address // Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) { if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`) Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret return ret
} }
@ -98,7 +98,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address // Check if we should filter this address
if (filterAddr && retAddr.ntx > keys.addrFilterThreshold) { if (filterAddr && retAddr.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`) Logger.info(`Importer : Import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
} else { } else {
retAddr.txids = r.txids retAddr.txids = r.txids
} }

28
lib/remote-importer/remote-importer.js

@ -133,14 +133,14 @@ class RemoteImporter {
return Promise.reject(errors.xpub.INVALID) return Promise.reject(errors.xpub.INVALID)
if (this.importing[xpub]) { if (this.importing[xpub]) {
Logger.info(` Import overlap for ${xpub}`) Logger.info(`Importer : Import overlap for ${xpub}`)
return Promise.reject(errors.xpub.OVERLAP) return Promise.reject(errors.xpub.OVERLAP)
} }
this.importing[xpub] = true this.importing[xpub] = true
const ts = hdaHelper.typeString(type) const ts = hdaHelper.typeString(type)
Logger.info(`Importing ${xpub} ${ts}`) Logger.info(`Importer : Importing ${xpub} ${ts}`)
const t0 = Date.now() const t0 = Date.now()
const chains = [0,1] const chains = [0,1]
@ -181,9 +181,9 @@ class RemoteImporter {
await this._importTransactions(aAddresses, txns) await this._importTransactions(aAddresses, txns)
} catch(e) { } catch(e) {
Logger.error(e, `RemoteImporter.importHDAccount() : xpub ${xpub}`) Logger.error(e, `Importer : RemoteImporter.importHDAccount() : xpub ${xpub}`)
} finally { } finally {
Logger.info(` xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`) Logger.info(`Importer : xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
delete this.importing[xpub] delete this.importing[xpub]
return true return true
} }
@ -233,7 +233,7 @@ class RemoteImporter {
// Update derived index // Update derived index
d = u + G d = u + G
Logger.info(` derived M/${c}/${A.join(',')}`) Logger.info(`Importer : derived M/${c}/${A.join(',')}`)
const addrMap = {} const addrMap = {}
for (let a of ret.addresses) for (let a of ret.addresses)
@ -261,7 +261,7 @@ class RemoteImporter {
} }
} }
Logger.info(` Got ${scanTx.length} transactions`) Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 200 transactions // Retrieve the transactions by batches of 200 transactions
const txsChunks = util.splitList(scanTx, 200) const txsChunks = util.splitList(scanTx, 200)
@ -276,7 +276,7 @@ class RemoteImporter {
} }
} }
} catch(e) { } catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : getTransactions error`) Logger.error(e, `Importer : RemoteImporter.xpubScan() : getTransactions error`)
} }
if (gotTransactions) { if (gotTransactions) {
@ -290,7 +290,7 @@ class RemoteImporter {
} }
} catch(e) { } catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`) Logger.error(e, `Importer : RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
} finally { } finally {
// Push everything up the rabbit hole // Push everything up the rabbit hole
return ret return ret
@ -313,14 +313,14 @@ class RemoteImporter {
addresses.push(address) addresses.push(address)
this.importing[address] = true this.importing[address] = true
} else { } else {
Logger.info(`Note: Import overlap for ${address}. Skipping`) Logger.info(`Importer : Import overlap for ${address}. Skipping`)
} }
} }
if (addresses.length == 0) if (addresses.length == 0)
return true return true
Logger.info(`Importing ${addresses.join(',')}`) Logger.info(`Importer : Importing ${addresses.join(',')}`)
try { try {
const scanTx = [] const scanTx = []
@ -338,7 +338,7 @@ class RemoteImporter {
} }
} }
Logger.info(` Got ${scanTx.length} transactions`) Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 100 transactions // Retrieve the transactions by batches of 100 transactions
const txsChunks = util.splitList(scanTx, 100) const txsChunks = util.splitList(scanTx, 100)
@ -354,7 +354,7 @@ class RemoteImporter {
await this._importTransactions(addresses, txns) await this._importTransactions(addresses, txns)
} catch(e) { } catch(e) {
Logger.error(e, `RemoteImporter.importAddresses() : ${candidates.join(',')}`) Logger.error(e, `Importer : RemoteImporter.importAddresses() : ${candidates.join(',')}`)
} finally { } finally {
const dt = Date.now() - t0 const dt = Date.now() - t0
@ -362,7 +362,7 @@ class RemoteImporter {
const N = addresses.length const N = addresses.length
if (N > 0) if (N > 0)
Logger.info(` Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`) Logger.info(`Importer : Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
for (let address of addresses) for (let address of addresses)
delete this.importing[address] delete this.importing[address]
@ -451,7 +451,7 @@ class RemoteImporter {
await db.addInputs(inputs) await db.addInputs(inputs)
} catch(e) { } catch(e) {
Logger.error(e, `RemoteImporter.addTransactions() :`) Logger.error(e, `Importer : RemoteImporter.addTransactions() :`)
} }
} }

6
lib/remote-importer/sources-mainnet.js

@ -34,16 +34,16 @@ class SourcesMainnet extends Sources {
// If local bitcoind option is activated // If local bitcoind option is activated
// we'll use the local node as our unique source // we'll use the local node as our unique source
this.source = new BitcoindWrapper() this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports') Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') { } else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated // If local indexer option is activated
// we'll use the local indexer as our unique source // we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper() this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports') Logger.info('Importer : Activated local indexer as the data source for imports')
} else { } else {
// Otherwise, we'll use the rest api provided by OXT // Otherwise, we'll use the rest api provided by OXT
this.source = new OxtWrapper(keys.indexer.oxt) this.source = new OxtWrapper(keys.indexer.oxt)
Logger.info('Activated OXT API as the data source for imports') Logger.info('Importer : Activated OXT API as the data source for imports')
} }
} }

6
lib/remote-importer/sources-testnet.js

@ -35,16 +35,16 @@ class SourcesTestnet extends Sources {
// If local bitcoind option is activated // If local bitcoind option is activated
// we'll use the local node as our unique source // we'll use the local node as our unique source
this.source = new BitcoindWrapper() this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports') Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') { } else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated // If local indexer option is activated
// we'll use the local indexer as our unique source // we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper() this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports') Logger.info('Importer : Activated local indexer as the data source for imports')
} else { } else {
// Otherwise, we'll use the rest api provided by Esplora // Otherwise, we'll use the rest api provided by Esplora
this.source = new EsploraWrapper(keys.indexer.esplora) this.source = new EsploraWrapper(keys.indexer.esplora)
Logger.info('Activated Esplora API as the data source for imports') Logger.info('Importer : Activated Esplora API as the data source for imports')
} }
} }

4
lib/remote-importer/sources.js

@ -48,7 +48,7 @@ class Sources {
ret.txids = result.txids ret.txids = result.txids
} catch(e) { } catch(e) {
Logger.error(null, `Sources.getAddress() : ${address} from ${this.source.base}`) Logger.error(null, `Importer : Sources.getAddress() : ${address} from ${this.source.base}`)
} finally { } finally {
return ret return ret
} }
@ -75,7 +75,7 @@ class Sources {
} }
} catch(e) { } catch(e) {
Logger.error(null, `Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`) Logger.error(null, `Importer : Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`)
} finally { } finally {
return ret return ret
} }

4
pushtx/index-orchestrator.js

@ -18,8 +18,8 @@
/** /**
* PushTx Orchestrator * PushTx Orchestrator
*/ */
Logger.info('Process ID: ' + process.pid) Logger.info('Orchestrator : Process ID: ' + process.pid)
Logger.info('Preparing the pushTx Orchestrator') Logger.info('Orchestrator : Preparing the pushTx Orchestrator')
// Wait for Bitcoind RPC API // Wait for Bitcoind RPC API
// being ready to process requests // being ready to process requests

4
pushtx/index.js

@ -19,8 +19,8 @@
/** /**
* PushTx API * PushTx API
*/ */
Logger.info('Process ID: ' + process.pid) Logger.info('PushTx : Process ID: ' + process.pid)
Logger.info('Preparing the pushTx API') Logger.info('PushTx : Preparing the pushTx API')
// Wait for Bitcoind RPC API // Wait for Bitcoind RPC API
// being ready to process requests // being ready to process requests

22
pushtx/orchestrator.js

@ -59,11 +59,11 @@ class Orchestrator {
this.onBlockHash(message) this.onBlockHash(message)
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Orchestrator : ${topic.toString()}`)
} }
}) })
Logger.info('Listening for blocks') Logger.info('Orchestrator : Listening for blocks')
} }
/** /**
@ -80,7 +80,7 @@ class Orchestrator {
const header = await this.rpcClient.getblockheader(blockHash, true) const header = await this.rpcClient.getblockheader(blockHash, true)
const height = header.height const height = header.height
Logger.info(`Block ${height} ${blockHash}`) Logger.info(`Orchestrator : Block ${height} ${blockHash}`)
let nbTxsPushed let nbTxsPushed
let rpcConnOk = true let rpcConnOk = true
@ -102,7 +102,7 @@ class Orchestrator {
try { try {
parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true) parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true)
} catch(e) { } catch(e) {
Logger.error(e, 'Transaction.getTransaction()') Logger.error(e, 'Orchestrator : Transaction.getTransaction()')
} }
} }
@ -110,14 +110,14 @@ class Orchestrator {
// Push the transaction // Push the transaction
try { try {
await pushTxProcessor.pushTx(tx.schRaw) await pushTxProcessor.pushTx(tx.schRaw)
Logger.info(`Pushed scheduled transaction ${tx.schTxid}`) Logger.info(`Orchestrator : Pushed scheduled transaction ${tx.schTxid}`)
} catch(e) { } catch(e) {
const msg = 'A problem was met while trying to push a scheduled transaction' const msg = 'A problem was met while trying to push a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`) Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
// Check if it's an issue with the connection to the RPC API // Check if it's an issue with the connection to the RPC API
// (=> immediately stop the loop) // (=> immediately stop the loop)
if (RpcClient.isConnectionError(e)) { if (RpcClient.isConnectionError(e)) {
Logger.info('Connection issue') Logger.info('Orchestrator : Connection issue')
rpcConnOk = false rpcConnOk = false
break break
} }
@ -130,7 +130,7 @@ class Orchestrator {
await this.updateTriggers(tx.schID, shift) await this.updateTriggers(tx.schID, shift)
} catch(e) { } catch(e) {
const msg = 'A problem was met while shifting scheduled transactions' const msg = 'A problem was met while shifting scheduled transactions'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`) Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
} }
} }
@ -141,14 +141,14 @@ class Orchestrator {
nbTxsPushed++ nbTxsPushed++
} catch(e) { } catch(e) {
const msg = 'A problem was met while trying to delete a scheduled transaction' const msg = 'A problem was met while trying to delete a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`) Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
} }
} }
} }
} while (rpcConnOk && nbTxsPushed > 0) } while (rpcConnOk && nbTxsPushed > 0)
} catch(e) { } catch(e) {
Logger.error(e, 'Orchestrator.onBlockHash() : Error') Logger.error(e, 'Orchestrator : Orchestrator.onBlockHash() : Error')
} finally { } finally {
// Release the semaphor // Release the semaphor
await this._onBlockHashSemaphor.release() await this._onBlockHashSemaphor.release()
@ -173,7 +173,7 @@ class Orchestrator {
await db.updateTriggerScheduledTransaction(tx.schID, newTrigger) await db.updateTriggerScheduledTransaction(tx.schID, newTrigger)
// Update the triggers of next transactions in the chain // Update the triggers of next transactions in the chain
await this.updateTriggers(tx.schID, shift) await this.updateTriggers(tx.schID, shift)
Logger.info(`Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`) Logger.info(`Orchestrator : Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
} }
} }

6
pushtx/pushtx-processor.js

@ -51,7 +51,7 @@ class PushTxProcessor {
const tx = bitcoin.Transaction.fromHex(rawtx) const tx = bitcoin.Transaction.fromHex(rawtx)
for (let output of tx.outs) for (let output of tx.outs)
value += output.value value += output.value
Logger.info('Push for ' + (value / 1e8).toFixed(8) + ' BTC') Logger.info('PushTx : Push for ' + (value / 1e8).toFixed(8) + ' BTC')
} catch(e) { } catch(e) {
throw errors.tx.PARSE throw errors.tx.PARSE
} }
@ -60,14 +60,14 @@ class PushTxProcessor {
// Attempt to send via RPC to the bitcoind instance // Attempt to send via RPC to the bitcoind instance
try { try {
const txid = await this.rpcClient.sendrawtransaction(rawtx) const txid = await this.rpcClient.sendrawtransaction(rawtx)
Logger.info('Pushed!') Logger.info('PushTx : Pushed!')
// Update the stats // Update the stats
status.updateStats(value) status.updateStats(value)
// Notify the tracker // Notify the tracker
this.notifSock.send(['pushtx', rawtx]) this.notifSock.send(['pushtx', rawtx])
return txid return txid
} catch(err) { } catch(err) {
Logger.info('Push failed') Logger.info('PushTx : Push failed')
throw err throw err
} }
} }

8
pushtx/pushtx-rest-api.js

@ -197,21 +197,21 @@ class PushTxRestApi {
} catch(e) {} } catch(e) {}
if (msg.code && msg.message) { if (msg.code && msg.message) {
Logger.error(null, 'Error ' + msg.code + ': ' + msg.message) Logger.error(null, 'PushTx : Error ' + msg.code + ': ' + msg.message)
ret = { ret = {
message: msg.message, message: msg.message,
code: msg.code code: msg.code
} }
} else { } else {
Logger.error(err.message, 'ERROR') Logger.error(err.message, 'PushTx : ')
ret = err.message ret = err.message
} }
} else { } else {
Logger.error(err, 'ERROR') Logger.error(err, 'PushTx : )
ret = err ret = err
} }
} catch (e) { } catch (e) {
Logger.error(e, 'ERROR') Logger.error(e, 'PushTx : ')
ret = e ret = e
} finally { } finally {
HttpServer.sendError(res, ret) HttpServer.sendError(res, ret)

2
pushtx/status.js

@ -78,7 +78,7 @@ class Status {
await this._refreshNetworkInfo() await this._refreshNetworkInfo()
await this._refreshBlockchainInfo() await this._refreshBlockchainInfo()
} catch (e) { } catch (e) {
Logger.error(e, 'Status.getCurrent() : Error') Logger.error(e, 'PushTx : Status.getCurrent() : Error')
} finally { } finally {
return this.status return this.status
} }

4
pushtx/transactions-scheduler.js

@ -69,7 +69,7 @@ class TransactionsScheduler {
// Check that nlocktimes are matching // Check that nlocktimes are matching
if (!(tx.locktime && tx.locktime == entry.nlocktime)) { if (!(tx.locktime && tx.locktime == entry.nlocktime)) {
const msg = `TransactionsScheduler.schedule() : nLockTime mismatch : ${tx.locktime} - ${entry.nlocktime}` const msg = `TransactionsScheduler.schedule() : nLockTime mismatch : ${tx.locktime} - ${entry.nlocktime}`
Logger.error(null, msg) Logger.error(null, `PushTx : ${msg}`)
throw errors.pushtx.NLOCK_MISMATCH throw errors.pushtx.NLOCK_MISMATCH
} }
// Check that order of hop and nlocktime values are consistent // Check that order of hop and nlocktime values are consistent
@ -113,7 +113,7 @@ class TransactionsScheduler {
} }
parentId = await db.addScheduledTransaction(objTx) parentId = await db.addScheduledTransaction(objTx)
Logger.info(`Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`) Logger.info(`PushTx : Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`)
parentTxid = tx.getId() parentTxid = tx.getId()
parentNlocktime = entry.nlocktime parentNlocktime = entry.nlocktime
} }

12
tracker/block.js

@ -33,7 +33,7 @@ class Block extends TransactionsBundle {
* @returns {Promise - object[]} returns an array of transactions to be broadcast * @returns {Promise - object[]} returns an array of transactions to be broadcast
*/ */
async checkBlock() { async checkBlock() {
Logger.info('Beginning to process new block.') Logger.info('Tracker : Beginning to process new block.')
let block let block
const txsForBroadcast = [] const txsForBroadcast = []
@ -42,7 +42,7 @@ class Block extends TransactionsBundle {
block = bitcoin.Block.fromHex(this.hex) block = bitcoin.Block.fromHex(this.hex)
this.transactions = block.transactions this.transactions = block.transactions
} catch (e) { } catch (e) {
Logger.error(e, 'Block.checkBlock()') Logger.error(e, 'Tracker : Block.checkBlock()')
Logger.error(null, this.header) Logger.error(null, this.header)
return Promise.reject(e) return Promise.reject(e)
} }
@ -74,7 +74,7 @@ class Block extends TransactionsBundle {
blockParent: prevID blockParent: prevID
}) })
Logger.info(` Added block ${this.header.height} (id=${blockId})`) Logger.info(`Tracker : Added block ${this.header.height} (id=${blockId})`)
// Confirms the transactions // Confirms the transactions
const txids = this.transactions.map(t => t.getId()) const txids = this.transactions.map(t => t.getId())
@ -85,7 +85,7 @@ class Block extends TransactionsBundle {
// Logs and result returned // Logs and result returned
const dt = ((Date.now()-t0)/1000).toFixed(1) const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/ntx).toFixed(0) const per = ((Date.now()-t0)/ntx).toFixed(0)
Logger.info(` Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`) Logger.info(`Tracker : Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
return txsForBroadcast return txsForBroadcast
} }
@ -96,7 +96,7 @@ class Block extends TransactionsBundle {
* @returns {Promise} * @returns {Promise}
*/ */
async checkBlockHeader(prevBlockID) { async checkBlockHeader(prevBlockID) {
Logger.info('Beginning to process new block header.') Logger.info('Tracker : Beginning to process new block header.')
// Insert the block header into the database // Insert the block header into the database
const blockId = await db.addBlock({ const blockId = await db.addBlock({
@ -106,7 +106,7 @@ class Block extends TransactionsBundle {
blockParent: prevBlockID blockParent: prevBlockID
}) })
Logger.info(` Added block header ${this.header.height} (id=${blockId})`) Logger.info(`Tracker : Added block header ${this.header.height} (id=${blockId})`)
return blockId return blockId
} }

44
tracker/blockchain-processor.js

@ -78,7 +78,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/ */
async catchupIBDMode() { async catchupIBDMode() {
try { try {
Logger.info('Tracker Startup (IBD mode)') Logger.info('Tracker : Tracker Startup (IBD mode)')
const info = await this.client.getblockchaininfo() const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks const daemonNbBlocks = info.blocks
@ -91,7 +91,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If no header or block loaded by bitcoind => try later // If no header or block loaded by bitcoind => try later
if (daemonNbHeaders == 0 || daemonNbBlocks == 0) { if (daemonNbHeaders == 0 || daemonNbBlocks == 0) {
Logger.info('New attempt scheduled in 30s (waiting for block headers)') Logger.info('Tracker : New attempt scheduled in 30s (waiting for block headers)')
return util.delay(30000).then(() => { return util.delay(30000).then(() => {
return this.catchupIBDMode() return this.catchupIBDMode()
}) })
@ -101,7 +101,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If blocks need to be downloaded by bitcoind => try later // If blocks need to be downloaded by bitcoind => try later
if (daemonNbBlocks - 1 <= dbMaxHeight) { if (daemonNbBlocks - 1 <= dbMaxHeight) {
Logger.info('New attempt scheduled in 10s (waiting for blocks)') Logger.info('Tracker : New attempt scheduled in 10s (waiting for blocks)')
return util.delay(10000).then(() => { return util.delay(10000).then(() => {
return this.catchupIBDMode() return this.catchupIBDMode()
}) })
@ -110,7 +110,7 @@ class BlockchainProcessor extends AbstractProcessor {
} else { } else {
const blockRange = _.range(dbMaxHeight + 1, daemonNbBlocks + 1) const blockRange = _.range(dbMaxHeight + 1, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`) Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
await util.seriesCall(blockRange, async height => { await util.seriesCall(blockRange, async height => {
try { try {
@ -118,13 +118,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(blockHash, true) const header = await this.client.getblockheader(blockHash, true)
prevBlockId = await this.processBlockHeader(header, prevBlockId) prevBlockId = await this.processBlockHeader(header, prevBlockId)
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()') Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
process.exit() process.exit()
} }
}, 'Tracker syncing', true) }, 'Tracker syncing', true)
// Schedule a new iteration (in case more blocks need to be loaded) // Schedule a new iteration (in case more blocks need to be loaded)
Logger.info('Start a new iteration') Logger.info('Tracker : Start a new iteration')
return this.catchupIBDMode() return this.catchupIBDMode()
} }
@ -134,7 +134,7 @@ class BlockchainProcessor extends AbstractProcessor {
} }
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()') Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
throw e throw e
} }
} }
@ -149,7 +149,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/ */
async catchupNormalMode() { async catchupNormalMode() {
try { try {
Logger.info('Tracker Startup (normal mode)') Logger.info('Tracker : Tracker Startup (normal mode)')
const info = await this.client.getblockchaininfo() const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks const daemonNbBlocks = info.blocks
@ -162,7 +162,7 @@ class BlockchainProcessor extends AbstractProcessor {
// Compute blocks range to be processed // Compute blocks range to be processed
const blockRange = _.range(highest.blockHeight, daemonNbBlocks + 1) const blockRange = _.range(highest.blockHeight, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`) Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
// Process the blocks // Process the blocks
return util.seriesCall(blockRange, async height => { return util.seriesCall(blockRange, async height => {
@ -171,13 +171,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(hash) const header = await this.client.getblockheader(hash)
return this.processBlock(header) return this.processBlock(header)
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()') Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
process.exit() process.exit()
} }
}, 'Tracker syncing', true) }, 'Tracker syncing', true)
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()') Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
} }
} }
@ -196,11 +196,11 @@ class BlockchainProcessor extends AbstractProcessor {
this.onBlockHash(message) this.onBlockHash(message)
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Tracker : ${topic.toString()}`)
} }
}) })
Logger.info('Listening for blocks') Logger.info('Tracker : Listening for blocks')
} }
/** /**
@ -241,11 +241,11 @@ class BlockchainProcessor extends AbstractProcessor {
try { try {
const header = await this.client.getblockheader(blockHash, true) const header = await this.client.getblockheader(blockHash, true)
Logger.info(`Block #${header.height} ${blockHash}`) Logger.info(`Tracker : Block #${header.height} ${blockHash}`)
// Grab all headers between this block and last known // Grab all headers between this block and last known
headers = await this.chainBacktrace([header]) headers = await this.chainBacktrace([header])
} catch(err) { } catch(err) {
Logger.error(err, `BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`) Logger.error(err, `Tracker : BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`)
} }
if(headers == null) if(headers == null)
@ -267,7 +267,7 @@ class BlockchainProcessor extends AbstractProcessor {
}) })
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.onBlockHash()') Logger.error(e, 'Tracker : BlockchainProcessor.onBlockHash()')
} finally { } finally {
// Release the semaphor // Release the semaphor
await this._onBlockHashSemaphor.release() await this._onBlockHashSemaphor.release()
@ -285,7 +285,7 @@ class BlockchainProcessor extends AbstractProcessor {
const deepest = headers[headers.length - 1] const deepest = headers[headers.length - 1]
if (headers.length > 1) if (headers.length > 1)
Logger.info(`chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`) Logger.info(`Tracker : chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`)
// Look for previous block in the database // Look for previous block in the database
const block = await db.getBlockByHash(deepest.previousblockhash) const block = await db.getBlockByHash(deepest.previousblockhash)
@ -313,7 +313,7 @@ class BlockchainProcessor extends AbstractProcessor {
if (txs.length > 0) { if (txs.length > 0) {
// Cancel confirmation of transactions included in reorg'd blocks // Cancel confirmation of transactions included in reorg'd blocks
Logger.info(`Backtrace: unconfirm ${txs.length} transactions in reorg`) Logger.info(`Tracker : Backtrace: unconfirm ${txs.length} transactions in reorg`)
const txids = txs.map(t => t.txnTxid) const txids = txs.map(t => t.txnTxid)
await db.unconfirmTransactions(txids) await db.unconfirmTransactions(txids)
} }
@ -345,12 +345,12 @@ class BlockchainProcessor extends AbstractProcessor {
// Process the blocks // Process the blocks
return util.seriesCall(blockRange, async height => { return util.seriesCall(blockRange, async height => {
try { try {
Logger.info(`Rescanning block ${height}`) Logger.info(`Tracker : Rescanning block ${height}`)
const hash = await this.client.getblockhash(height) const hash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(hash) const header = await this.client.getblockheader(hash)
return this.processBlock(header) return this.processBlock(header)
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.rescan()') Logger.error(e, 'Tracker : BlockchainProcessor.rescan()')
throw e throw e
} }
}, 'Tracker rescan', true) }, 'Tracker rescan', true)
@ -379,7 +379,7 @@ class BlockchainProcessor extends AbstractProcessor {
} catch(e) { } catch(e) {
// The show must go on. // The show must go on.
// TODO: further notification that this block did not check out // TODO: further notification that this block did not check out
Logger.error(e, 'BlockchainProcessor.processBlock()') Logger.error(e, 'Tracker : BlockchainProcessor.processBlock()')
} }
} }
@ -394,7 +394,7 @@ class BlockchainProcessor extends AbstractProcessor {
const block = new Block(null, header) const block = new Block(null, header)
return block.checkBlockHeader(prevBlockID) return block.checkBlockHeader(prevBlockID)
} catch(e) { } catch(e) {
Logger.error(e, 'BlockchainProcessor.processBlockHeader()') Logger.error(e, 'Tracker : BlockchainProcessor.processBlockHeader()')
throw e throw e
} }
} }

4
tracker/index.js

@ -16,8 +16,8 @@
const TrackerRestApi = require('./tracker-rest-api') const TrackerRestApi = require('./tracker-rest-api')
Logger.info('Process ID: ' + process.pid) Logger.info('Tracker : Process ID: ' + process.pid)
Logger.info('Preparing the tracker') Logger.info('Tracker : Preparing the tracker')
// Wait for Bitcoind RPC API // Wait for Bitcoind RPC API
// being ready to process requests // being ready to process requests

30
tracker/mempool-processor.js

@ -95,11 +95,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message) this.onPushTx(message)
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Tracker : ${topic.toString()}`)
} }
}) })
Logger.info('Listening for pushTx') Logger.info('Tracker : Listening for pushTx')
// Socket listening to pushTx Orchestrator // Socket listening to pushTx Orchestrator
this.orchestratorSock = zmq.socket('sub') this.orchestratorSock = zmq.socket('sub')
@ -112,11 +112,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message) this.onPushTx(message)
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Tracker : ${topic.toString()}`)
} }
}) })
Logger.info('Listening for pushTx orchestrator') Logger.info('Tracker : Listening for pushTx orchestrator')
// Socket listening to bitcoind Txs messages // Socket listening to bitcoind Txs messages
this.txSock = zmq.socket('sub') this.txSock = zmq.socket('sub')
@ -129,11 +129,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onTx(message) this.onTx(message)
break break
default: default:
Logger.info(topic.toString()) Logger.info(`Tracker : ${topic.toString()}`)
} }
}) })
Logger.info('Listening for mempool transactions') Logger.info('Tracker : Listening for mempool transactions')
} }
/** /**
@ -145,7 +145,7 @@ class MempoolProcessor extends AbstractProcessor {
await this._refreshActiveStatus() await this._refreshActiveStatus()
const activeLbl = this.isActive ? 'active' : 'inactive' const activeLbl = this.isActive ? 'active' : 'inactive'
Logger.info(`Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`) Logger.info(`Tracker : Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`)
let currentMempool = new TransactionsBundle(this.mempoolBuffer.toArray()) let currentMempool = new TransactionsBundle(this.mempoolBuffer.toArray())
this.mempoolBuffer.clear() this.mempoolBuffer.clear()
@ -171,7 +171,7 @@ class MempoolProcessor extends AbstractProcessor {
let tx = bitcoin.Transaction.fromBuffer(buf) let tx = bitcoin.Transaction.fromBuffer(buf)
this.mempoolBuffer.addTransaction(tx) this.mempoolBuffer.addTransaction(tx)
} catch (e) { } catch (e) {
Logger.error(e, 'MempoolProcessor.onTx()') Logger.error(e, 'Tracker : MempoolProcessor.onTx()')
return Promise.reject(e) return Promise.reject(e)
} }
} }
@ -190,7 +190,7 @@ class MempoolProcessor extends AbstractProcessor {
let pushedTx = bitcoin.Transaction.fromHex(buf.toString()) let pushedTx = bitcoin.Transaction.fromHex(buf.toString())
const txid = pushedTx.getId() const txid = pushedTx.getId()
Logger.info(`Processing tx for pushtx ${txid}`) Logger.info(`Tracker : Processing tx for pushtx ${txid}`)
if (!TransactionsBundle.cache.has(txid)) { if (!TransactionsBundle.cache.has(txid)) {
// Process the transaction // Process the transaction
@ -201,7 +201,7 @@ class MempoolProcessor extends AbstractProcessor {
this.notifyTx(txCheck.tx) this.notifyTx(txCheck.tx)
} }
} catch (e) { } catch (e) {
Logger.error(e, 'MempoolProcessor.onPushTx()') Logger.error(e, 'Tracker : MempoolProcessor.onPushTx()')
return Promise.reject(e) return Promise.reject(e)
} }
} }
@ -213,7 +213,7 @@ class MempoolProcessor extends AbstractProcessor {
async checkUnconfirmed() { async checkUnconfirmed() {
const t0 = Date.now() const t0 = Date.now()
Logger.info('Processing unconfirmed transactions') Logger.info('Tracker : Processing unconfirmed transactions')
const unconfirmedTxs = await db.getUnconfirmedTransactions() const unconfirmedTxs = await db.getUnconfirmedTransactions()
@ -226,7 +226,7 @@ class MempoolProcessor extends AbstractProcessor {
// Transaction is confirmed // Transaction is confirmed
const block = await db.getBlockByHash(rtx.blockhash) const block = await db.getBlockByHash(rtx.blockhash)
if (block && block.blockID) { if (block && block.blockID) {
Logger.info(`Marking TXID ${tx.txnTxid} confirmed`) Logger.info(`Tracker : Marking TXID ${tx.txnTxid} confirmed`)
return db.confirmTransactions([tx.txnTxid], block.blockID) return db.confirmTransactions([tx.txnTxid], block.blockID)
} }
}, },
@ -238,7 +238,7 @@ class MempoolProcessor extends AbstractProcessor {
} }
) )
} catch(e) { } catch(e) {
Logger.error(e, 'MempoolProcessor.checkUnconfirmed()') Logger.error(e, 'Tracker : MempoolProcessor.checkUnconfirmed()')
} }
}) })
} }
@ -247,7 +247,7 @@ class MempoolProcessor extends AbstractProcessor {
const ntx = unconfirmedTxs.length const ntx = unconfirmedTxs.length
const dt = ((Date.now() - t0) / 1000).toFixed(1) const dt = ((Date.now() - t0) / 1000).toFixed(1)
const per = (ntx == 0) ? 0 : ((Date.now() - t0) / ntx).toFixed(0) const per = (ntx == 0) ? 0 : ((Date.now() - t0) / ntx).toFixed(0)
Logger.info(` Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`) Logger.info(`Tracker : Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`)
} }
/** /**
@ -273,7 +273,7 @@ class MempoolProcessor extends AbstractProcessor {
* Log mempool statistics * Log mempool statistics
*/ */
displayMempoolStats() { displayMempoolStats() {
Logger.info(`Mempool Size: ${this.mempoolBuffer.size()}`) Logger.info(`Tracker : Mempool Size: ${this.mempoolBuffer.size()}`)
} }
} }

10
tracker/transaction.js

@ -63,7 +63,7 @@ class Transaction {
} }
} catch(e) { } catch(e) {
Logger.error(e, 'Transaction.checkTransaction()') Logger.error(e, 'Tracker : Transaction.checkTransaction()')
return Promise.reject(e) return Promise.reject(e)
} }
} }
@ -122,7 +122,7 @@ class Transaction {
// Detect potential double spends // Detect potential double spends
if (r.spendingTxnID !== null && r.spendingTxnID != this.storedTxnID) { if (r.spendingTxnID !== null && r.spendingTxnID != this.storedTxnID) {
Logger.info(`DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`) Logger.info(`Tracker : DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`)
// Delete the existing transaction that has been double-spent: // Delete the existing transaction that has been double-spent:
// since the deepest block keeps its transactions, this will // since the deepest block keeps its transactions, this will
// eventually work itself out, and the wallet will not show // eventually work itself out, and the wallet will not show
@ -366,7 +366,7 @@ class Transaction {
const derived = await hdaHelper.deriveAddresses(xpub, chain, indices, hdType) const derived = await hdaHelper.deriveAddresses(xpub, chain, indices, hdType)
Array.prototype.push.apply(newAddresses, derived) Array.prototype.push.apply(newAddresses, derived)
Logger.info(`Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`) Logger.info(`Tracker : Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`)
// Update view of derived address indices // Update view of derived address indices
derivedIndices[chain] = chainMaxUsedIndex + gapLimit[chain] derivedIndices[chain] = chainMaxUsedIndex + gapLimit[chain]
@ -374,7 +374,7 @@ class Transaction {
// Check derived addresses for use in this transaction // Check derived addresses for use in this transaction
for (let d of derived) { for (let d of derived) {
if (indexedOutputs[d.address]) { if (indexedOutputs[d.address]) {
Logger.info(`Derived address already in outputs: M/${d.chain}/${d.index}`) Logger.info(`Tracker : Derived address already in outputs: M/${d.chain}/${d.index}`)
// This transaction spends to an address // This transaction spends to an address
// beyond the original derived gap limit! // beyond the original derived gap limit!
chainMaxUsedIndex = d.index chainMaxUsedIndex = d.index
@ -405,7 +405,7 @@ class Transaction {
locktime: this.tx.locktime, locktime: this.tx.locktime,
}) })
Logger.info(`Storing transaction ${this.txid}`) Logger.info(`Tracker : Storing transaction ${this.txid}`)
} }
} }

Loading…
Cancel
Save