Browse Source

Merge pull request #154 from Samourai-Wallet/feat_mydojo_logs

manage all logs with docker log system
use-env-var-docker
kenshin samourai 5 years ago
committed by GitHub
parent
commit
f94b0f4dd8
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      accounts/api-helper.js
  2. 2
      accounts/fees-rest-api.js
  3. 4
      accounts/headers-rest-api.js
  4. 6
      accounts/index-cluster.js
  5. 4
      accounts/index.js
  6. 4
      accounts/multiaddr-rest-api.js
  7. 6
      accounts/notifications-server.js
  8. 34
      accounts/notifications-service.js
  9. 2
      accounts/status-rest-api.js
  10. 26
      accounts/support-rest-api.js
  11. 8
      accounts/transactions-rest-api.js
  12. 4
      accounts/unspent-rest-api.js
  13. 22
      accounts/xpub-rest-api.js
  14. 4
      doc/DOCKER_mac_setup.MD
  15. 18
      doc/DOCKER_setup.md
  16. 6
      docker/my-dojo/.env
  17. 1
      docker/my-dojo/bitcoin/restart.sh
  18. 25
      docker/my-dojo/docker-compose.yaml
  19. 85
      docker/my-dojo/dojo.sh
  20. 8
      docker/my-dojo/explorer/Dockerfile
  21. 2
      docker/my-dojo/explorer/restart.sh
  22. 6
      docker/my-dojo/nginx/Dockerfile
  23. 5
      docker/my-dojo/nginx/nginx.conf
  24. 8
      docker/my-dojo/node/Dockerfile
  25. 14
      docker/my-dojo/node/restart.sh
  26. 5
      docker/my-dojo/overrides/bitcoind.install.yaml
  27. 10
      docker/my-dojo/overrides/explorer.install.yaml
  28. 5
      docker/my-dojo/overrides/indexer.install.yaml
  29. 5
      docker/my-dojo/overrides/whirlpool.install.yaml
  30. 1
      docker/my-dojo/whirlpool/restart.sh
  31. 2
      lib/auth/authentication-manager.js
  32. 6
      lib/auth/localapikey-strategy-configurator.js
  33. 2
      lib/bitcoin/addresses-helper.js
  34. 6
      lib/bitcoin/hd-accounts-helper.js
  35. 10
      lib/bitcoin/hd-accounts-service.js
  36. 2
      lib/bitcoind-rpc/fees.js
  37. 4
      lib/bitcoind-rpc/latest-block.js
  38. 2
      lib/bitcoind-rpc/rpc-client.js
  39. 6
      lib/bitcoind-rpc/transactions.js
  40. 24
      lib/db/mysql-db-wrapper.js
  41. 6
      lib/http-server/http-server.js
  42. 2
      lib/indexer-rpc/rpc-client.js
  43. 9
      lib/logger.js
  44. 4
      lib/remote-importer/bitcoind-wrapper.js
  45. 2
      lib/remote-importer/esplora-wrapper.js
  46. 4
      lib/remote-importer/local-indexer-wrapper.js
  47. 4
      lib/remote-importer/oxt-wrapper.js
  48. 28
      lib/remote-importer/remote-importer.js
  49. 6
      lib/remote-importer/sources-mainnet.js
  50. 6
      lib/remote-importer/sources-testnet.js
  51. 4
      lib/remote-importer/sources.js
  52. 4
      pushtx/index-orchestrator.js
  53. 4
      pushtx/index.js
  54. 22
      pushtx/orchestrator.js
  55. 6
      pushtx/pushtx-processor.js
  56. 8
      pushtx/pushtx-rest-api.js
  57. 2
      pushtx/status.js
  58. 4
      pushtx/transactions-scheduler.js
  59. 12
      tracker/block.js
  60. 44
      tracker/blockchain-processor.js
  61. 4
      tracker/index.js
  62. 30
      tracker/mempool-processor.js
  63. 10
      tracker/transaction.js

2
accounts/api-helper.js

@ -137,7 +137,7 @@ class ApiHelper {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
params,
`ApiHelper.validateEntitiesParams() : Invalid arguments`
`API : ApiHelper.validateEntitiesParams() : Invalid arguments`
)
}
}

2
accounts/fees-rest-api.js

@ -46,7 +46,7 @@ class FeesRestApi {
} catch (e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /fees`)
debugApi && Logger.info(`API : Completed GET /fees`)
}
}

4
accounts/headers-rest-api.js

@ -49,7 +49,7 @@ class HeadersRestApi {
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /header/${req.params.hash}`)
debugApi && Logger.info(`API : Completed GET /header/${req.params.hash}`)
}
}
@ -66,7 +66,7 @@ class HeadersRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.hash,
'HeadersRestApi.validateArgsGetHeader() : Invalid hash'
'API : HeadersRestApi.validateArgsGetHeader() : Invalid hash'
)
} else {
next()

6
accounts/index-cluster.js

@ -20,15 +20,15 @@ if (cluster.isMaster) {
})
cluster.on('listening', function(worker) {
Logger.info(`Cluster ${worker.process.pid} connected`)
Logger.info(`API : Cluster ${worker.process.pid} connected`)
})
cluster.on('disconnect', function(worker) {
Logger.info(`Cluster ${worker.process.pid} disconnected`)
Logger.info(`API : Cluster ${worker.process.pid} disconnected`)
})
cluster.on('exit', function(worker) {
Logger.info(`Cluster ${worker.process.pid} is dead`)
Logger.info(`API : Cluster ${worker.process.pid} is dead`)
// Ensuring a new cluster will start if an old one dies
cluster.fork()
})

4
accounts/index.js

@ -28,8 +28,8 @@
/**
* Samourai REST API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the REST API')
Logger.info('API : Process ID: ' + process.pid)
Logger.info('API : Preparing the REST API')
// Wait for Bitcoind RPC API
// being ready to process requests

4
accounts/multiaddr-rest-api.js

@ -85,7 +85,7 @@ class MultiaddrRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /multiaddr ${strParams}`)
Logger.info(`API : Completed GET /multiaddr ${strParams}`)
}
}
}
@ -126,7 +126,7 @@ class MultiaddrRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /multiaddr ${strParams}`)
Logger.info(`API : Completed POST /multiaddr ${strParams}`)
}
}
}

6
accounts/notifications-server.js

@ -61,7 +61,7 @@ class NotificationsServer {
const header = JSON.parse(message.toString())
this.notifService.notifyBlock(header)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in block message')
Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in block message')
}
break
case 'transaction':
@ -69,11 +69,11 @@ class NotificationsServer {
const tx = JSON.parse(message.toString())
this.notifService.notifyTransaction(tx)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in transaction message')
Logger.error(e, 'API : NotificationServer._initTrackerSocket() : Error in transaction message')
}
break
default:
Logger.info(`Unknown ZMQ message topic: "${topic}"`)
Logger.info(`API : Unknown ZMQ message topic: "${topic}"`)
}
})
}

34
accounts/notifications-service.js

@ -59,7 +59,7 @@ class NotificationsService {
_initWSServer(server) {
this.ws = new WebSocket.server({httpServer: server})
Logger.info('Created WebSocket server')
Logger.info('API : Created WebSocket server')
this.ws.on('request', req => {
try {
@ -67,14 +67,14 @@ class NotificationsService {
conn.id = status.sessions++
conn.subs = []
debug && Logger.info(`Client ${conn.id} connected`)
debug && Logger.info(`API : Client ${conn.id} connected`)
conn.on('close', () => {
this._closeWSConnection(conn, false)
})
conn.on('error', err => {
Logger.error(err, `NotificationsService : Error on connection ${conn.id}`)
Logger.error(err, `API : NotificationsService : Error on connection ${conn.id}`)
if (conn.connected)
this._closeWSConnection(conn, true)
})
@ -91,7 +91,7 @@ class NotificationsService {
status.maxConn = Math.max(status.maxConn, Object.keys(this.conn).length)
} catch(e) {
Logger.error(e, `NotificationsService._initWSServer() : Error during request accept`)
Logger.error(e, `API : NotificationsService._initWSServer() : Error during request accept`)
}
})
}
@ -120,10 +120,10 @@ class NotificationsService {
if (forcedClose && conn.connected)
conn.drop(1008, 'Get out of here!')
debug && Logger.info(`Client ${conn.id} disconnected`)
debug && Logger.info(`API : Client ${conn.id} disconnected`)
} catch(e) {
Logger.error(e, 'NotificationsService._closeWSConnection()')
Logger.error(e, 'API : NotificationsService._closeWSConnection()')
}
}
@ -134,7 +134,7 @@ class NotificationsService {
*/
_filterWSMessage(msg) {
if (this.cacheSubs.has(msg)) {
debug && Logger.info('Duplicate subscriptions detected')
debug && Logger.info('API : Duplicate subscriptions detected')
return false
} else {
this.cacheSubs.set(msg, true)
@ -150,7 +150,7 @@ class NotificationsService {
*/
_handleWSMessage(msg, conn) {
try {
debug && Logger.info(`Received from client ${conn.id}: ${msg}`)
debug && Logger.info(`API : Received from client ${conn.id}: ${msg}`)
const data = JSON.parse(msg)
@ -183,7 +183,7 @@ class NotificationsService {
break
}
} catch(e) {
Logger.error(e, 'NotificationsService._handleWSMessage() : WebSocket message error')
Logger.error(e, 'API : NotificationsService._handleWSMessage() : WebSocket message error')
}
}
@ -223,7 +223,7 @@ class NotificationsService {
this.subs[topic].push(conn.id)
debug && Logger.info(`Client ${conn.id} subscribed to ${topic}`)
debug && Logger.info(`API : Client ${conn.id} subscribed to ${topic}`)
}
/**
@ -267,7 +267,7 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(msg)
} catch(e) {
Logger.error(e, `NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
Logger.error(e, `API : NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
}
}
}
@ -284,7 +284,7 @@ class NotificationsService {
}
this.dispatch('block', JSON.stringify(data))
} catch(e) {
Logger.error(e, `NotificationsService.notifyBlock()`)
Logger.error(e, `API : NotificationsService.notifyBlock()`)
}
}
@ -440,14 +440,14 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent ctx ${ctx.hash} to client ${cid}`)
debug && Logger.error(`API : Sent ctx ${ctx.hash} to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
Logger.error(e, `API : NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
}
}
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction()`)
Logger.error(e, `API : NotificationsService.notifyTransaction()`)
}
}
@ -464,9 +464,9 @@ class NotificationsService {
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent authentication error to client ${cid}`)
debug && Logger.error(`API : Sent authentication error to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
Logger.error(e, `API : NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
}
}

2
accounts/status-rest-api.js

@ -47,7 +47,7 @@ class StatusRestApi {
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /status`)
debugApi && Logger.info(`API : Completed GET /status`)
}
}

26
accounts/support-rest-api.js

@ -108,7 +108,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/info`)
debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/info`)
}
}
@ -175,7 +175,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/rescan`)
debugApi && Logger.info(`API : Completed GET /support/address/${req.params.addr}/rescan`)
}
}
@ -212,7 +212,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/info`)
debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/info`)
}
}
@ -270,7 +270,7 @@ class SupportRestApi {
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} else {
ret.status = 'Rescan Error'
Logger.error(e, 'SupportRestApi.getXpubRescan() : Support rescan error')
Logger.error(e, 'API : SupportRestApi.getXpubRescan() : Support rescan error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
}
}
@ -279,7 +279,7 @@ class SupportRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/rescan`)
debugApi && Logger.info(`API : Completed GET /support/xpub/${req.params.xpub}/rescan`)
}
}
@ -300,10 +300,10 @@ class SupportRestApi {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getPairing() : Support pairing error')
Logger.error(e, 'API : SupportRestApi.getPairing() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /pairing`)
debugApi && Logger.info(`API : Completed GET /pairing`)
}
}
@ -318,7 +318,7 @@ class SupportRestApi {
url = fs.readFileSync('/var/lib/tor/hsv3explorer/hostname', 'utf8')
url = url.replace('\n', '')
} catch(e) {
Logger.error(e, 'SupportRestApi.getPairing() : Cannot read explorer onion address')
Logger.error(e, 'API : SupportRestApi.getPairing() : Cannot read explorer onion address')
}
}
const ret = {
@ -333,10 +333,10 @@ class SupportRestApi {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getPairingExplorer() : Support pairing error')
Logger.error(e, 'API : SupportRestApi.getPairingExplorer() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /pairing/explorer`)
debugApi && Logger.info(`API : Completed GET /pairing/explorer`)
}
}
@ -351,7 +351,7 @@ class SupportRestApi {
if (!isValidXpub) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
Logger.error(null, `API : SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
} else {
next()
}
@ -369,7 +369,7 @@ class SupportRestApi {
if (!(isValidXpub && isValidGap)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, 'SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
Logger.error(null, 'API : SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
} else {
next()
}
@ -386,7 +386,7 @@ class SupportRestApi {
if (!isValidAddress) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
Logger.error(null, `API : SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
} else {
next()
}

8
accounts/transactions-rest-api.js

@ -63,7 +63,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, e)
} finally {
const strParams = `${req.query.fees ? req.query.fees : ''}`
debugApi && Logger.info(`Completed GET /tx/${req.params.txid} ${strParams}`)
debugApi && Logger.info(`API : Completed GET /tx/${req.params.txid} ${strParams}`)
}
}
@ -97,7 +97,7 @@ class TransactionsRestApi {
${req.query.page ? req.query.page : ''} \
${req.query.count ? req.query.count : ''}`
debugApi && Logger.info(`Completed GET /txs ${strParams}`)
debugApi && Logger.info(`API : Completed GET /txs ${strParams}`)
}
}
@ -118,7 +118,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
'API : HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
)
Logger.error(req.query, '')
} else {
@ -145,7 +145,7 @@ class TransactionsRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.query,
'HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
'API : HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
)
} else {
next()

4
accounts/unspent-rest-api.js

@ -85,7 +85,7 @@ class UnspentRestApi {
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /unspent ${strParams}`)
Logger.info(`API : Completed GET /unspent ${strParams}`)
}
}
}
@ -126,7 +126,7 @@ class UnspentRestApi {
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /unspent ${strParams}`)
Logger.info(`API : Completed POST /unspent ${strParams}`)
}
}
}

22
accounts/xpub-rest-api.js

@ -151,7 +151,7 @@ class XPubRestApi {
return HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed POST /xpub ${req.body.xpub}`)
debugApi && Logger.info(`API : Completed POST /xpub ${req.body.xpub}`)
}
}
@ -194,11 +194,11 @@ class XPubRestApi {
HttpServer.sendOkData(res, ret)
} catch(e) {
Logger.error(e, 'XpubRestApi.getXpub()')
Logger.error(e, 'API : XpubRestApi.getXpub()')
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /xpub/${req.params.xpub}`)
debugApi && Logger.info(`API : Completed GET /xpub/${req.params.xpub}`)
}
}
@ -253,7 +253,7 @@ class XPubRestApi {
}
} finally {
debugApi && Logger.info(`Completed POST /xpub/${req.params.xpub}/lock`)
debugApi && Logger.info(`API : Completed POST /xpub/${req.params.xpub}/lock`)
}
}
@ -303,7 +303,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed DELETE /xpub/${req.params.xpub}`)
debugApi && Logger.info(`API : Completed DELETE /xpub/${req.params.xpub}`)
}
}
@ -327,8 +327,8 @@ class XPubRestApi {
xpub = hdaHelper.xlatXPUB(origXpub)
scheme = isYpub ? hdaHelper.BIP49 : hdaHelper.BIP84
if (trace) {
Logger.info('Converted: ' + origXpub)
Logger.info('Resulting xpub: ' + xpub)
Logger.info('API : Converted: ' + origXpub)
Logger.info('API : Resulting xpub: ' + xpub)
}
}
@ -371,7 +371,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.body,
'XpubRestApi.validateArgsPostXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsPostXpub() : Invalid arguments'
)
} else {
next()
@ -391,7 +391,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.xpub,
'XpubRestApi.validateArgsGetXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsGetXpub() : Invalid arguments'
)
} else {
next()
@ -414,7 +414,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {
@ -437,7 +437,7 @@ class XPubRestApi {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
'API : XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {

4
doc/DOCKER_mac_setup.MD

@ -111,9 +111,9 @@ __"Download the most recent release of Dojo from Github"__ until you reach __"La
Once you Reach Step __"Launch the Installation of Your Dojo with"__ from above you will need to read and follow the instructions from [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_advanced_setups.md)
Once adjustments are made to your external bitcoind bitcoin.conf __(location dependent on what device you have bitcoind)__ and docker-bitcoind.conf.tpl __(dojo_dir > docker > my-dojo > conf)__ you can proceed with Install and revert back to original instructions [here](https://github.com/Samourai-Wallet/samourai-dojo/blob/develop/doc/DOCKER_setup.md) at section __"Launch the Installation of Your Dojo with"__
_Note: For tracking progress, open terminal, change directory to my-dojo and run /dojo.sh logs tracker
_Note: For tracking progress, open terminal, change directory to my-dojo and run /dojo.sh logs nodejs
__Some possible optimization tips:__
If you notice that progress has stopped. Click the whale icon and select Restart. Restart Logs Tracker from step above to verify progress has resumed.
If you notice that progress has stopped. Click the whale icon and select Restart. Restart logs nodejs from step above to verify progress has resumed.
This may optimize speed: open __Activity Monitor__, check the PID (Process ID) of your docker process. Open Terminal and type:

18
doc/DOCKER_setup.md

@ -182,7 +182,7 @@ Docker and Docker Compose are going to build the images and containers of your D
* Monitor the progress made for the initialization of the database with this command displaying the logs of the tracker
```
./dojo.sh logs tracker
./dojo.sh logs nodejs
```
Exit the logs with CTRL+C when the syncing of the database has completed.
@ -245,24 +245,22 @@ Available commands:
install Install your Dojo.
logs [module] [options] Display the logs of your Dojo. Use CTRL+C to stop the logs.
logs [module] [options] Display the logs of your dojo.
By default, the command displays the live logs. Use CTRL+C to stop the logs.
Use the -n option to display past logs.
Available modules:
dojo.sh logs : display the logs of all containers
dojo.sh logs : display the logs of all the Docker containers
dojo.sh logs bitcoind : display the logs of bitcoind
dojo.sh logs db : display the logs of the MySQL database
dojo.sh logs tor : display the logs of tor
dojo.sh logs nginx : display the logs of nginx
dojo.sh logs indexer : display the logs of the internal indexer
dojo.sh logs api : display the logs of the REST API (nodejs)
dojo.sh logs tracker : display the logs of the Tracker (nodejs)
dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)
dojo.sh logs pushtx-orchest : display the logs of the Orchestrator (nodejs)
dojo.sh logs nodejs : display the logs of NodeJS modules (API, Tracker, PushTx API, Orchestrator)
dojo.sh logs explorer : display the logs of the Explorer
dojo.sh logs whirlpool : display the logs of the Whirlpool client
Available options (for api, tracker, pushtx, pushtx-orchest and explorer modules):
-d [VALUE] : select the type of log to be displayed.
VALUE can be output (default) or error.
Available options:
-n [VALUE] : display the last VALUE lines
onion Display the Tor onion addresses allowing to access the API, maintenance tool and block explorer of your Dojo.

6
docker/my-dojo/.env

@ -11,13 +11,13 @@
COMPOSE_CONVERT_WINDOWS_PATHS=1
DOJO_VERSION_TAG=1.6.0
DOJO_DB_VERSION_TAG=1.1.1
DOJO_BITCOIND_VERSION_TAG=1.5.0
DOJO_DB_VERSION_TAG=1.2.0
DOJO_BITCOIND_VERSION_TAG=1.6.0
DOJO_NODEJS_VERSION_TAG=1.6.0
DOJO_NGINX_VERSION_TAG=1.5.0
DOJO_TOR_VERSION_TAG=1.4.0
DOJO_EXPLORER_VERSION_TAG=1.3.0
DOJO_INDEXER_VERSION_TAG=1.0.0
DOJO_INDEXER_VERSION_TAG=1.1.0
DOJO_WHIRLPOOL_VERSION_TAG=1.0.0

1
docker/my-dojo/bitcoin/restart.sh

@ -6,6 +6,7 @@ echo "## Start bitcoind #############################"
bitcoind_options=(
-bind=172.28.1.5
-datadir=/home/bitcoin/.bitcoin
-printtoconsole=1
-dbcache=$BITCOIND_DB_CACHE
-disablewallet=1
-dns=$BITCOIND_DNS

25
docker/my-dojo/docker-compose.yaml

@ -15,6 +15,11 @@ services:
- "3306"
volumes:
- data-mysql:/var/lib/mysql
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks:
dojonet:
ipv4_address: 172.28.1.1
@ -40,8 +45,12 @@ services:
- "8081"
- "8082"
volumes:
- data-nodejs:/data
- data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on:
- db
networks:
@ -61,8 +70,11 @@ services:
expose:
- "80"
- "9080"
volumes:
- data-nginx:/data
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on:
- node
networks:
@ -87,6 +99,11 @@ services:
command: /restart.sh
volumes:
- data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks:
dmznet:
ipv4_address: 172.29.1.4
@ -115,6 +132,4 @@ networks:
volumes:
data-mysql:
data-nodejs:
data-nginx:
data-tor:

85
docker/my-dojo/dojo.sh

@ -198,7 +198,7 @@ install() {
docker_up --remove-orphans
# Display the logs
if [ $noLog -eq 1 ]; then
logs
logs "" 0
fi
fi
}
@ -326,7 +326,7 @@ upgrade() {
update_dojo_db
# Display the logs
if [ $noLog -eq 1 ]; then
logs
logs "" 0
fi
fi
}
@ -381,78 +381,55 @@ whirlpool() {
}
# Display logs
logs_node() {
if [ $3 -eq 0 ]; then
docker exec -ti nodejs tail -f /data/logs/$1-$2.log
else
docker exec -ti nodejs tail -n $3 /data/logs/$1-$2.log
fi
}
logs_explorer() {
if [ $3 -eq 0 ]; then
docker exec -ti explorer tail -f /data/logs/$1-$2.log
else
docker exec -ti explorer tail -n $3 /data/logs/$1-$2.log
fi
}
logs_whirlpool() {
if [ $3 -eq 0 ]; then
docker exec -ti whirlpool tail -f /home/whirlpool/.whirlpool-cli/whirlpool-output.log
display_logs() {
yamlFiles=$(select_yaml_files)
if [ $2 -eq 0 ]; then
docker-compose $yamlFiles logs --tail=50 --follow $1
else
docker exec -ti whirlpool tail -n $3 /home/whirlpool/.whirlpool-cli/whirlpool-output.log
docker-compose $yamlFiles logs --tail=$2 $1
fi
}
logs() {
source_file "$DIR/conf/docker-bitcoind.conf"
source_file "$DIR/conf/docker-indexer.conf"
source_file "$DIR/conf/docker-explorer.conf"
source_file "$DIR/conf/docker-whirlpool.conf"
source_file "$DIR/conf/docker-common.conf"
case $1 in
db )
docker-compose logs --tail=50 --follow db
db | tor | nginx | node )
display_logs $1 $2
;;
bitcoind )
if [ "$BITCOIND_INSTALL" == "on" ]; then
if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then
bitcoindDataDir="/home/bitcoin/.bitcoin/testnet3"
else
bitcoindDataDir="/home/bitcoin/.bitcoin"
fi
docker exec -ti bitcoind tail -f "$bitcoindDataDir/debug.log"
display_logs $1 $2
else
echo -e "Command not supported for your setup.\nCause: Your Dojo is using an external bitcoind"
fi
;;
indexer )
if [ "$INDEXER_INSTALL" == "on" ]; then
yamlFiles=$(select_yaml_files)
eval "docker-compose $yamlFiles logs --tail=50 --follow indexer"
display_logs $1 $2
else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not using an internal indexer"
echo -e "Command not supported for your setup.\nCause: Your Dojo is not running the internal indexer"
fi
;;
tor )
docker-compose logs --tail=50 --follow tor
;;
api | pushtx | pushtx-orchest | tracker )
logs_node $1 $2 $3
;;
explorer )
logs_explorer $1 $2 $3
if [ "$EXPLORER_INSTALL" == "on" ]; then
display_logs $1 $2
else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not running the internal block explorer"
fi
;;
whirlpool )
if [ "$WHIRLPOOL_INSTALL" == "on" ]; then
logs_whirlpool $1 $2 $3
display_logs $1 $2
else
echo -e "Command not supported for your setup.\nCause: Your Dojo is not running a whirlpool client"
fi
;;
* )
yamlFiles=$(select_yaml_files)
services="nginx node tor db"
if [ "$BITCOIND_INSTALL" == "on" ]; then
services="$services bitcoind"
@ -466,7 +443,7 @@ logs() {
if [ "$WHIRLPOOL_INSTALL" == "on" ]; then
services="$services whirlpool"
fi
eval "docker-compose $yamlFiles logs --tail=0 --follow $services"
display_logs "$services" $2
;;
esac
}
@ -489,24 +466,22 @@ help() {
echo " Available options:"
echo " --nolog : do not display the logs after Dojo has been laucnhed."
echo " "
echo " logs [module] [options] Display the logs of your dojo. Use CTRL+C to stop the logs."
echo " logs [module] [options] Display the logs of your dojo."
echo " By default, the command displays the live logs. Use CTRL+C to stop the logs."
echo " Use the -n option to display past logs."
echo " "
echo " Available modules:"
echo " dojo.sh logs : display the logs of all the Docker containers"
echo " dojo.sh logs bitcoind : display the logs of bitcoind"
echo " dojo.sh logs db : display the logs of the MySQL database"
echo " dojo.sh logs tor : display the logs of tor"
echo " dojo.sh logs nginx : display the logs of nginx"
echo " dojo.sh logs indexer : display the logs of the internal indexer"
echo " dojo.sh logs api : display the logs of the REST API (nodejs)"
echo " dojo.sh logs tracker : display the logs of the Tracker (nodejs)"
echo " dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)"
echo " dojo.sh logs pushtx-orchest : display the logs of the pushTx Orchestrator (nodejs)"
echo " dojo.sh logs node : display the logs of NodeJS modules (API, Tracker, PushTx API, Orchestrator)"
echo " dojo.sh logs explorer : display the logs of the Explorer"
echo " dojo.sh logs whirlpool : display the logs of the Whirlpool client"
echo " "
echo " Available options (only available for api, tracker, pushtx, pushtx-orchest, explorer and whirlpool modules):"
echo " -d [VALUE] : select the type of log to be displayed."
echo " VALUE can be output (default) or error."
echo " Available options:"
echo " -n [VALUE] : display the last VALUE lines"
echo " "
echo " onion Display the Tor onion address allowing your wallet to access your dojo."
@ -580,15 +555,11 @@ case "$subcommand" in
;;
logs )
module=$1; shift
display="output"
numlines=0
# Process package options
while getopts ":d:n:" opt; do
while getopts ":n:" opt; do
case ${opt} in
d )
display=$OPTARG
;;
n )
numlines=$OPTARG
;;
@ -604,7 +575,7 @@ case "$subcommand" in
done
shift $((OPTIND -1))
logs $module $display $numlines
logs "$module" $numlines
;;
onion )
onion

8
docker/my-dojo/explorer/Dockerfile

@ -1,24 +1,18 @@
FROM node:12-buster
ENV LOGS_DIR /data/logs
ENV APP_DIR /home/node/app
ENV EXPLORER_URL https://github.com/janoside/btc-rpc-explorer/archive
ENV EXPLORER_VERSION 2.0.0
# Install netcat
RUN set -ex && \
apt-get update && \
apt-get install -y netcat
# Create logs and apps directory
RUN mkdir -p "$LOGS_DIR" && \
chown -R node:node "$LOGS_DIR" && \
mkdir "$APP_DIR"
# Download the source code and install it
RUN set -ex && \
mkdir "$APP_DIR" && \
wget -qO explorer.tar.gz "$EXPLORER_URL/v$EXPLORER_VERSION.tar.gz" && \
tar -xzvf explorer.tar.gz -C "$APP_DIR/" --strip-components 1 && \
rm explorer.tar.gz && \

2
docker/my-dojo/explorer/restart.sh

@ -42,4 +42,4 @@ if [ "$NODE_ACTIVE_INDEXER" == "local_indexer" ]; then
fi
fi
node ./bin/cli.js "${explorer_options[@]}" > /data/logs/explorer-error.log 2> /data/logs/explorer-output.log
node ./bin/cli.js "${explorer_options[@]}"

6
docker/my-dojo/nginx/Dockerfile

@ -1,11 +1,5 @@
FROM nginx:1.15.10-alpine
# Create data directory
ENV LOGS_DIR /data/logs
RUN mkdir -p "$LOGS_DIR" && \
chown -R nginx:nginx "$LOGS_DIR"
# Copy configuration files
COPY ./nginx.conf /etc/nginx/nginx.conf
COPY ./dojo.conf /etc/nginx/sites-enabled/dojo.conf

5
docker/my-dojo/nginx/nginx.conf

@ -2,8 +2,9 @@ user nginx;
worker_processes auto;
daemon off;
# Log critical errors and higher
error_log /data/logs/error.log crit;
# Log critical errors and higher to stderr
# (see https://github.com/nginxinc/docker-nginx/blob/594ce7a8bc26c85af88495ac94d5cd0096b306f7/mainline/alpine/Dockerfile#L104)
error_log /var/log/nginx/error.log crit;
pid /var/run/nginx.pid;

8
docker/my-dojo/node/Dockerfile

@ -1,9 +1,7 @@
FROM node:12-buster
ENV LOGS_DIR /data/logs
ENV APP_DIR /home/node/app
# Add node user to tor group
RUN addgroup --system -gid 1107 tor && \
usermod -a -G tor node
@ -11,13 +9,9 @@ RUN addgroup --system -gid 1107 tor && \
# Install forever
RUN npm install -g forever
# Create data directory
RUN mkdir -p "$LOGS_DIR" && \
chown -R node:node "$LOGS_DIR"
# Create app directory
RUN mkdir "$APP_DIR" && \
chown -R node:node "$APP_DIR"
chown -R node:node "$APP_DIR"
# Copy app source files into APP_DIR
COPY . "$APP_DIR"

14
docker/my-dojo/node/restart.sh

@ -1,13 +1,17 @@
#!/bin/bash
cd /home/node/app/accounts
forever start -a -l /dev/null -o /data/logs/api-output.log -e /data/logs/api-error.log index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
cd /home/node/app/pushtx
forever start -a -l /dev/null -o /data/logs/pushtx-output.log -e /data/logs/pushtx-error.log index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/null -o /data/logs/pushtx-orchest-output.log -e /data/logs/pushtx-orchest-error.log index-orchestrator.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index-orchestrator.js "$COMMON_BTC_NETWORK"
cd /home/node/app/tracker
forever start -a -l /dev/null -o /data/logs/tracker-output.log -e /data/logs/tracker-error.log index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever --fifo logs 0
# Keep the container up
while true
do
sleep 1
done

5
docker/my-dojo/overrides/bitcoind.install.yaml

@ -20,6 +20,11 @@ services:
volumes:
- data-bitcoind:/home/bitcoin/.bitcoin
- data-tor:/var/lib/tor
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on:
- db
- tor

10
docker/my-dojo/overrides/explorer.install.yaml

@ -16,8 +16,11 @@ services:
command: "/home/node/app/restart.sh"
expose:
- "3002"
volumes:
- data-explorer:/data/logs
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks:
dojonet:
ipv4_address: 172.28.1.7
@ -25,6 +28,3 @@ services:
node:
depends_on:
- explorer
volumes:
data-explorer:

5
docker/my-dojo/overrides/indexer.install.yaml

@ -17,6 +17,11 @@ services:
- "50001"
volumes:
- data-indexer:/home/indexer
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
depends_on:
- tor
networks:

5
docker/my-dojo/overrides/whirlpool.install.yaml

@ -16,6 +16,11 @@ services:
- "8898"
volumes:
- data-whirlpool:/home/whirlpool
logging:
driver: "json-file"
options:
max-size: "20m"
max-file: "10"
networks:
whirlnet:
ipv4_address: 172.30.1.8

1
docker/my-dojo/whirlpool/restart.sh

@ -12,7 +12,6 @@ whirlpool_options=(
--cli.torConfig.coordinator.onion=true
--cli.torConfig.backend.enabled=false
--cli.torConfig.backend.onion=false
--logging.file="/home/whirlpool/.whirlpool-cli/whirlpool-output.log"
)
if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then

2
lib/auth/authentication-manager.js

@ -40,7 +40,7 @@ class AuthenticationManager {
if (Configurator) {
this.activeStrategy = new Configurator()
this.activeStrategy.configure()
Logger.info(`Authentication strategy ${this.activeStrategyName} successfully configured`)
Logger.info(`Auth : Authentication strategy ${this.activeStrategyName} successfully configured`)
}
} catch(e) {

6
lib/auth/localapikey-strategy-configurator.js

@ -43,14 +43,14 @@ class LocalApiKeyStrategyConfigurator {
if (apiKey == _adminKey) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an admin key')
Logger.info('Auth : Successful authentication with an admin key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_ADMIN})
} else if (_apiKeys.indexOf(apiKey) >= 0) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an api key')
Logger.info('Auth : Successful authentication with an api key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_API})
} else {
Logger.error(null, `Authentication failure (apikey=${apiKey})`)
Logger.error(null, `Auth : Authentication failure (apikey=${apiKey})`)
return done('Invalid API key', false)
}
}

2
lib/bitcoin/addresses-helper.js

@ -101,7 +101,7 @@ class AddressesHelper {
try {
return bitcoin.address.fromBech32(str).data.toString('hex')
} catch(e) {
Logger.error(e, 'AddressesHelper.getScriptHashFromBech32()')
Logger.error(e, 'AddressesHelper : getScriptHashFromBech32()')
return null
}
}

6
lib/bitcoin/hd-accounts-helper.js

@ -123,7 +123,7 @@ class HDAccountsHelper {
&& ver != this.MAGIC_ZPUB
&& ver != this.MAGIC_VPUB
) {
//Logger.error(null, 'HdAccountsHelper.xlatXPUB() : Incorrect format')
//Logger.error(null, 'HdAccountsHelper : xlatXPUB() : Incorrect format')
return ''
}
@ -379,12 +379,12 @@ class HDAccountsHelper {
if (msg.status = 'ok') {
resolve(msg.addresses)
} else {
Logger.error(null, 'A problem was met during parallel addresses derivation')
Logger.error(null, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject()
}
} catch(e) {
Logger.error(e, 'A problem was met during parallel addresses derivation')
Logger.error(e, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject(e)
}
})

10
lib/bitcoin/hd-accounts-service.js

@ -40,7 +40,7 @@ class HDAccountsService {
const isInvalidXpub = (e == errors.xpub.INVALID || e == errors.xpub.PRIVKEY)
const isLockedXpub = (e == errors.xpub.LOCKED)
const err = (isInvalidXpub || isLockedXpub) ? e : errors.xpub.CREATE
Logger.error(e, 'HdAccountsService.createHdAccount()' + err)
Logger.error(e, 'HdAccountsService : createHdAccount()' + err)
return Promise.reject(err)
}
}
@ -139,7 +139,7 @@ class HDAccountsService {
else if (scheme == hdaHelper.BIP84)
segwit = ' SegWit (BIP84)'
Logger.info(`Created HD Account: ${xpub}${segwit}`)
Logger.info(`HdAccountsService : Created HD Account: ${xpub}${segwit}`)
const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme)
const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme)
@ -196,15 +196,15 @@ class HDAccountsService {
// check for a derivation scheme mismatch
if (info.type != scheme) {
if (info.locked && !forceOverride) {
Logger.info(`Attempted override on locked account: ${xpub}`)
Logger.info(`HdAccountsService : Attempted override on locked account: ${xpub}`)
return Promise.reject(errors.xpub.LOCKED)
} else {
Logger.info(`Derivation scheme override: ${xpub}`)
Logger.info(`HdAccountsService : Derivation scheme override: ${xpub}`)
return db.deleteHDAccount(xpub)
}
}
} catch(e) {
Logger.error(e, 'HDAccountsService.derivationOverrideCheck()')
Logger.error(e, 'HDAccountsService : derivationOverrideCheck()')
return Promise.reject(e)
}
}

2
lib/bitcoind-rpc/fees.js

@ -58,7 +58,7 @@ class Fees {
const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType)
this.fees[tgt] = Math.round(level.feerate * 1e5)
} catch(e) {
Logger.error(e, 'Fees.refresh()')
Logger.error(e, 'Bitcoind RPC : Fees.refresh()')
delete this.fees[tgt]
}
})

4
lib/bitcoind-rpc/latest-block.js

@ -43,7 +43,7 @@ class LatestBlock {
this.onBlockHash(msg.toString('hex'))
break
default:
Logger.info(topic.toString())
Logger.info(`Bitcoind RPC : ${topic.toString()}`)
}
})
}
@ -61,7 +61,7 @@ class LatestBlock {
this.time = header.mediantime
this.diff = header.difficulty
Logger.info(`Block ${this.height} ${this.hash}`)
Logger.info(`Bitcoind RPC : Block ${this.height} ${this.hash}`)
}
}

2
lib/bitcoind-rpc/rpc-client.js

@ -78,7 +78,7 @@ class RpcClient {
await client.getblockchaininfo()
} catch(e) {
client = null
Logger.info('Bitcoind RPC API is still unreachable. New attempt in 20s.')
Logger.info('Bitcoind RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForBitcoindRpcApi()
})

6
lib/bitcoind-rpc/transactions.js

@ -65,7 +65,7 @@ class Transactions {
return await util.seriesCall(txs, async tx => {
if (tx.result == null) {
Logger.info(` got null for ${txids[tx.id]}`)
Logger.info(`Bitcoind RPC : got null for ${txids[tx.id]}`)
return null
} else {
return this._prepareTxResult(tx.result, fees)
@ -73,7 +73,7 @@ class Transactions {
})
} catch(e) {
Logger.error(e, 'Transaction.getTransactions()')
Logger.error(e, 'Bitcoind RPC : Transaction.getTransactions()')
return Promise.reject(errors.generic.GEN)
}
}
@ -99,7 +99,7 @@ class Transactions {
this.txCache.set(txid, ret)
return ret
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
Logger.error(e, 'Bitcoind RPC : Transaction.getTransaction()')
return Promise.reject(errors.generic.GEN)
}
}

24
lib/db/mysql-db-wrapper.js

@ -312,21 +312,21 @@ class MySqlDbWrapper {
handleConnect() {
try {
this.pool = mysql.createPool(this.dbConfig)
Logger.info(`Created a database pool of ${this.dbConfig.connectionLimit} connections`)
Logger.info(`Db Wrapper : Created a database pool of ${this.dbConfig.connectionLimit} connections`)
if (debug) {
this.pool.on('acquire', function (conn) {
Logger.info(`Connection ${conn.threadId} acquired`)
Logger.info(`Db Wrapper : Connection ${conn.threadId} acquired`)
})
this.pool.on('enqueue', function (conn) {
Logger.info('Waiting for a new connection slot')
Logger.info('Db Wrapper : Waiting for a new connection slot')
})
this.pool.on('release', function (conn) {
Logger.info(`Connection ${conn.threadId} released`)
Logger.info(`Db Wrapper : Connection ${conn.threadId} released`)
})
}
} catch(e) {
Logger.error(err, 'MySqlDbWrapper.handleConnect() : Problem met while trying to initialize a new pool')
Logger.error(err, 'Db Wrapper : handleConnect() : Problem met while trying to initialize a new pool')
throw e
}
}
@ -348,7 +348,7 @@ class MySqlDbWrapper {
// Destroy previous pool
this.pool.end(err => {
if (err) {
Logger.error(err, 'MySqlDbWrapper.handleReconnect() : Problem met while terminating the pool')
Logger.error(err, 'Db Wrapper : handleReconnect() : Problem met while terminating the pool')
this.timerReconnect = setTimeout(this.handleReconnect.bind(this), 2000)
} else {
this.handleConnect()
@ -362,14 +362,14 @@ class MySqlDbWrapper {
* Ping the mysql server
*/
ping() {
debug && Logger.info(`MySqlDbWrapper.ping() : ${this.pool._freeConnections.length} free connections`)
debug && Logger.info(`Db Wrapper : ping() : ${this.pool._freeConnections.length} free connections`)
// Iterate over all free connections
// which might have been disconnected by the mysql server
for (let c of this.pool._freeConnections) {
c.query('SELECT 1', (err, res, fields) => {
if (debug && err) {
Logger.error(err, `MySqlDbWrapper.ping() : Ping Error`)
Logger.error(err, `Db Wrapper : ping() : Ping Error`)
}
})
}
@ -379,7 +379,7 @@ class MySqlDbWrapper {
* Send a query
*/
async _query(query, retries) {
queryDebug && Logger.info(query)
queryDebug && Logger.info(`Db Wrapper : ${query}`)
if (retries == null)
retries = 5
@ -406,7 +406,7 @@ class MySqlDbWrapper {
reject(err)
}
} else {
queryDebug && Logger.info(result)
queryDebug && Logger.info(`Db Wrapper : ${result}`)
resolve(result)
}
})
@ -421,8 +421,8 @@ class MySqlDbWrapper {
* Log a query error
*/
queryError(err, query) {
Logger.error(err, 'MySqlDbWrapper.query() : Query Error')
Logger.error(query)
Logger.error(err, 'Db Wrapper : query() : Query Error')
Logger.error(null, `Db Wrapper : ${query}`)
}
/**

6
lib/http-server/http-server.js

@ -50,14 +50,14 @@ class HttpServer {
// Error handler, should be final middleware
this.app.use(function(err, req, res, next) {
if (res.headersSent) return next(err)
Logger.error(err.stack, 'HttpServer.start()')
Logger.error(err.stack, 'HttpServer : start()')
const ret = {status: 'Server error'}
HttpServer.sendError(res, ret, 500)
})
// Start a http server
this.server = this.app.listen(this.port, this.host, () => {
Logger.info(`HTTP server listening on ${this.host}:${this.port}`)
Logger.info(`HttpServer : Listening on ${this.host}:${this.port}`)
})
this.server.timeout = 600 * 1000
@ -184,7 +184,7 @@ class HttpServer {
* @param {function} next - next middleware
*/
static requestLogger(req, res, next) {
Logger.info(`${req.method} ${req.url}`)
Logger.info(`HttpServer : ${req.method} ${req.url}`)
next()
}

2
lib/indexer-rpc/rpc-client.js

@ -82,7 +82,7 @@ class RpcClient {
await client.sendRequest('server.version', 'dojo', ['1.0', '1.4'])
} catch(e) {
client = null
Logger.info('Indexer RPC API is still unreachable. New attempt in 20s.')
Logger.info('Indexer RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForIndexerRpcApi()
})

9
lib/logger.js

@ -18,7 +18,7 @@ class Logger {
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static info(msg, json) {
const logEntry = Logger._formatLog(msg, json)
const logEntry = Logger._formatLog('INFO', msg, json)
console.log(logEntry)
}
@ -28,7 +28,7 @@ class Logger {
* @param {string} msg - message associated to the error
*/
static error(e, msg) {
const logEntry = Logger._formatLog(msg)
const logEntry = Logger._formatLog('ERROR', msg)
console.error(logEntry)
//const errorEntry = Logger._formatLog(e)
@ -40,10 +40,11 @@ class Logger {
/**
* Format log entry
* @param {string} level - log level label
* @param {string/object} msg
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static _formatLog(msg, json) {
static _formatLog(level, msg, json) {
json = json || false
const data = json ? JSON.stringify(msg, null, 2) : msg
@ -59,7 +60,7 @@ class Logger {
const s = util.pad10(D.getUTCSeconds())
const ms = util.pad100(D.getUTCMilliseconds())
const parts = ['[', y, m, d, ' ', h, ':', mn, ':', s, '.', ms, ' ', mib, ' MiB', '] ', data]
const parts = [y, '-', m, '-', d, 'T', h, ':', mn, ':', s, 'Z ', level, ' ', data]
return parts.join('')
}
}

4
lib/remote-importer/bitcoind-wrapper.js

@ -70,7 +70,7 @@ class BitcoindWrapper extends Wrapper {
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
@ -116,7 +116,7 @@ class BitcoindWrapper extends Wrapper {
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}

2
lib/remote-importer/esplora-wrapper.js

@ -93,7 +93,7 @@ class EsploraWrapper extends Wrapper {
return ret
} else if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
// we have too many transactions
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
ret.txids = []
ret.ntx = 0
return ret

4
lib/remote-importer/local-indexer-wrapper.js

@ -70,7 +70,7 @@ class LocalIndexerWrapper extends Wrapper {
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
@ -123,7 +123,7 @@ class LocalIndexerWrapper extends Wrapper {
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}

4
lib/remote-importer/oxt-wrapper.js

@ -64,7 +64,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
Logger.info(`Importer : Import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret
}
@ -98,7 +98,7 @@ class OxtWrapper extends Wrapper {
// Check if we should filter this address
if (filterAddr && retAddr.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
Logger.info(`Importer : Import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
} else {
retAddr.txids = r.txids
}

28
lib/remote-importer/remote-importer.js

@ -133,14 +133,14 @@ class RemoteImporter {
return Promise.reject(errors.xpub.INVALID)
if (this.importing[xpub]) {
Logger.info(` Import overlap for ${xpub}`)
Logger.info(`Importer : Import overlap for ${xpub}`)
return Promise.reject(errors.xpub.OVERLAP)
}
this.importing[xpub] = true
const ts = hdaHelper.typeString(type)
Logger.info(`Importing ${xpub} ${ts}`)
Logger.info(`Importer : Importing ${xpub} ${ts}`)
const t0 = Date.now()
const chains = [0,1]
@ -181,9 +181,9 @@ class RemoteImporter {
await this._importTransactions(aAddresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importHDAccount() : xpub ${xpub}`)
Logger.error(e, `Importer : RemoteImporter.importHDAccount() : xpub ${xpub}`)
} finally {
Logger.info(` xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
Logger.info(`Importer : xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
delete this.importing[xpub]
return true
}
@ -233,7 +233,7 @@ class RemoteImporter {
// Update derived index
d = u + G
Logger.info(` derived M/${c}/${A.join(',')}`)
Logger.info(`Importer : derived M/${c}/${A.join(',')}`)
const addrMap = {}
for (let a of ret.addresses)
@ -261,7 +261,7 @@ class RemoteImporter {
}
}
Logger.info(` Got ${scanTx.length} transactions`)
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 200 transactions
const txsChunks = util.splitList(scanTx, 200)
@ -276,7 +276,7 @@ class RemoteImporter {
}
}
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : getTransactions error`)
Logger.error(e, `Importer : RemoteImporter.xpubScan() : getTransactions error`)
}
if (gotTransactions) {
@ -290,7 +290,7 @@ class RemoteImporter {
}
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
Logger.error(e, `Importer : RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
} finally {
// Push everything up the rabbit hole
return ret
@ -313,14 +313,14 @@ class RemoteImporter {
addresses.push(address)
this.importing[address] = true
} else {
Logger.info(`Note: Import overlap for ${address}. Skipping`)
Logger.info(`Importer : Import overlap for ${address}. Skipping`)
}
}
if (addresses.length == 0)
return true
Logger.info(`Importing ${addresses.join(',')}`)
Logger.info(`Importer : Importing ${addresses.join(',')}`)
try {
const scanTx = []
@ -338,7 +338,7 @@ class RemoteImporter {
}
}
Logger.info(` Got ${scanTx.length} transactions`)
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 100 transactions
const txsChunks = util.splitList(scanTx, 100)
@ -354,7 +354,7 @@ class RemoteImporter {
await this._importTransactions(addresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importAddresses() : ${candidates.join(',')}`)
Logger.error(e, `Importer : RemoteImporter.importAddresses() : ${candidates.join(',')}`)
} finally {
const dt = Date.now() - t0
@ -362,7 +362,7 @@ class RemoteImporter {
const N = addresses.length
if (N > 0)
Logger.info(` Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
Logger.info(`Importer : Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
for (let address of addresses)
delete this.importing[address]
@ -451,7 +451,7 @@ class RemoteImporter {
await db.addInputs(inputs)
} catch(e) {
Logger.error(e, `RemoteImporter.addTransactions() :`)
Logger.error(e, `Importer : RemoteImporter.addTransactions() :`)
}
}

6
lib/remote-importer/sources-mainnet.js

@ -34,16 +34,16 @@ class SourcesMainnet extends Sources {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports')
Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports')
Logger.info('Importer : Activated local indexer as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by OXT
this.source = new OxtWrapper(keys.indexer.oxt)
Logger.info('Activated OXT API as the data source for imports')
Logger.info('Importer : Activated OXT API as the data source for imports')
}
}

6
lib/remote-importer/sources-testnet.js

@ -35,16 +35,16 @@ class SourcesTestnet extends Sources {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports')
Logger.info('Importer : Activated Bitcoind as the data source for imports')
} else if (keys.indexer.active == 'local_indexer') {
// If local indexer option is activated
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Activated local indexer as the data source for imports')
Logger.info('Importer : Activated local indexer as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by Esplora
this.source = new EsploraWrapper(keys.indexer.esplora)
Logger.info('Activated Esplora API as the data source for imports')
Logger.info('Importer : Activated Esplora API as the data source for imports')
}
}

4
lib/remote-importer/sources.js

@ -48,7 +48,7 @@ class Sources {
ret.txids = result.txids
} catch(e) {
Logger.error(null, `Sources.getAddress() : ${address} from ${this.source.base}`)
Logger.error(null, `Importer : Sources.getAddress() : ${address} from ${this.source.base}`)
} finally {
return ret
}
@ -75,7 +75,7 @@ class Sources {
}
} catch(e) {
Logger.error(null, `Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`)
Logger.error(null, `Importer : Sources.getAddresses() : Error while requesting ${addresses} from ${this.source.base}`)
} finally {
return ret
}

4
pushtx/index-orchestrator.js

@ -18,8 +18,8 @@
/**
* PushTx Orchestrator
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx Orchestrator')
Logger.info('Orchestrator : Process ID: ' + process.pid)
Logger.info('Orchestrator : Preparing the pushTx Orchestrator')
// Wait for Bitcoind RPC API
// being ready to process requests

4
pushtx/index.js

@ -19,8 +19,8 @@
/**
* PushTx API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx API')
Logger.info('PushTx : Process ID: ' + process.pid)
Logger.info('PushTx : Preparing the pushTx API')
// Wait for Bitcoind RPC API
// being ready to process requests

22
pushtx/orchestrator.js

@ -59,11 +59,11 @@ class Orchestrator {
this.onBlockHash(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Orchestrator : ${topic.toString()}`)
}
})
Logger.info('Listening for blocks')
Logger.info('Orchestrator : Listening for blocks')
}
/**
@ -80,7 +80,7 @@ class Orchestrator {
const header = await this.rpcClient.getblockheader(blockHash, true)
const height = header.height
Logger.info(`Block ${height} ${blockHash}`)
Logger.info(`Orchestrator : Block ${height} ${blockHash}`)
let nbTxsPushed
let rpcConnOk = true
@ -102,7 +102,7 @@ class Orchestrator {
try {
parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true)
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
Logger.error(e, 'Orchestrator : Transaction.getTransaction()')
}
}
@ -110,14 +110,14 @@ class Orchestrator {
// Push the transaction
try {
await pushTxProcessor.pushTx(tx.schRaw)
Logger.info(`Pushed scheduled transaction ${tx.schTxid}`)
Logger.info(`Orchestrator : Pushed scheduled transaction ${tx.schTxid}`)
} catch(e) {
const msg = 'A problem was met while trying to push a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
// Check if it's an issue with the connection to the RPC API
// (=> immediately stop the loop)
if (RpcClient.isConnectionError(e)) {
Logger.info('Connection issue')
Logger.info('Orchestrator : Connection issue')
rpcConnOk = false
break
}
@ -130,7 +130,7 @@ class Orchestrator {
await this.updateTriggers(tx.schID, shift)
} catch(e) {
const msg = 'A problem was met while shifting scheduled transactions'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
}
}
@ -141,14 +141,14 @@ class Orchestrator {
nbTxsPushed++
} catch(e) {
const msg = 'A problem was met while trying to delete a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
}
}
}
} while (rpcConnOk && nbTxsPushed > 0)
} catch(e) {
Logger.error(e, 'Orchestrator.onBlockHash() : Error')
Logger.error(e, 'Orchestrator : Orchestrator.onBlockHash() : Error')
} finally {
// Release the semaphor
await this._onBlockHashSemaphor.release()
@ -173,7 +173,7 @@ class Orchestrator {
await db.updateTriggerScheduledTransaction(tx.schID, newTrigger)
// Update the triggers of next transactions in the chain
await this.updateTriggers(tx.schID, shift)
Logger.info(`Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
Logger.info(`Orchestrator : Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
}
}

6
pushtx/pushtx-processor.js

@ -51,7 +51,7 @@ class PushTxProcessor {
const tx = bitcoin.Transaction.fromHex(rawtx)
for (let output of tx.outs)
value += output.value
Logger.info('Push for ' + (value / 1e8).toFixed(8) + ' BTC')
Logger.info('PushTx : Push for ' + (value / 1e8).toFixed(8) + ' BTC')
} catch(e) {
throw errors.tx.PARSE
}
@ -60,14 +60,14 @@ class PushTxProcessor {
// Attempt to send via RPC to the bitcoind instance
try {
const txid = await this.rpcClient.sendrawtransaction(rawtx)
Logger.info('Pushed!')
Logger.info('PushTx : Pushed!')
// Update the stats
status.updateStats(value)
// Notify the tracker
this.notifSock.send(['pushtx', rawtx])
return txid
} catch(err) {
Logger.info('Push failed')
Logger.info('PushTx : Push failed')
throw err
}
}

8
pushtx/pushtx-rest-api.js

@ -197,21 +197,21 @@ class PushTxRestApi {
} catch(e) {}
if (msg.code && msg.message) {
Logger.error(null, 'Error ' + msg.code + ': ' + msg.message)
Logger.error(null, 'PushTx : Error ' + msg.code + ': ' + msg.message)
ret = {
message: msg.message,
code: msg.code
}
} else {
Logger.error(err.message, 'ERROR')
Logger.error(err.message, 'PushTx : ')
ret = err.message
}
} else {
Logger.error(err, 'ERROR')
Logger.error(err, 'PushTx : )
ret = err
}
} catch (e) {
Logger.error(e, 'ERROR')
Logger.error(e, 'PushTx : ')
ret = e
} finally {
HttpServer.sendError(res, ret)

2
pushtx/status.js

@ -78,7 +78,7 @@ class Status {
await this._refreshNetworkInfo()
await this._refreshBlockchainInfo()
} catch (e) {
Logger.error(e, 'Status.getCurrent() : Error')
Logger.error(e, 'PushTx : Status.getCurrent() : Error')
} finally {
return this.status
}

4
pushtx/transactions-scheduler.js

@ -69,7 +69,7 @@ class TransactionsScheduler {
// Check that nlocktimes are matching
if (!(tx.locktime && tx.locktime == entry.nlocktime)) {
const msg = `TransactionsScheduler.schedule() : nLockTime mismatch : ${tx.locktime} - ${entry.nlocktime}`
Logger.error(null, msg)
Logger.error(null, `PushTx : ${msg}`)
throw errors.pushtx.NLOCK_MISMATCH
}
// Check that order of hop and nlocktime values are consistent
@ -113,7 +113,7 @@ class TransactionsScheduler {
}
parentId = await db.addScheduledTransaction(objTx)
Logger.info(`Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`)
Logger.info(`PushTx : Registered scheduled tx ${objTx.txid} (trigger=${objTx.trigger})`)
parentTxid = tx.getId()
parentNlocktime = entry.nlocktime
}

12
tracker/block.js

@ -33,7 +33,7 @@ class Block extends TransactionsBundle {
* @returns {Promise - object[]} returns an array of transactions to be broadcast
*/
async checkBlock() {
Logger.info('Beginning to process new block.')
Logger.info('Tracker : Beginning to process new block.')
let block
const txsForBroadcast = []
@ -42,7 +42,7 @@ class Block extends TransactionsBundle {
block = bitcoin.Block.fromHex(this.hex)
this.transactions = block.transactions
} catch (e) {
Logger.error(e, 'Block.checkBlock()')
Logger.error(e, 'Tracker : Block.checkBlock()')
Logger.error(null, this.header)
return Promise.reject(e)
}
@ -74,7 +74,7 @@ class Block extends TransactionsBundle {
blockParent: prevID
})
Logger.info(` Added block ${this.header.height} (id=${blockId})`)
Logger.info(`Tracker : Added block ${this.header.height} (id=${blockId})`)
// Confirms the transactions
const txids = this.transactions.map(t => t.getId())
@ -85,7 +85,7 @@ class Block extends TransactionsBundle {
// Logs and result returned
const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/ntx).toFixed(0)
Logger.info(` Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
Logger.info(`Tracker : Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
return txsForBroadcast
}
@ -96,7 +96,7 @@ class Block extends TransactionsBundle {
* @returns {Promise}
*/
async checkBlockHeader(prevBlockID) {
Logger.info('Beginning to process new block header.')
Logger.info('Tracker : Beginning to process new block header.')
// Insert the block header into the database
const blockId = await db.addBlock({
@ -106,7 +106,7 @@ class Block extends TransactionsBundle {
blockParent: prevBlockID
})
Logger.info(` Added block header ${this.header.height} (id=${blockId})`)
Logger.info(`Tracker : Added block header ${this.header.height} (id=${blockId})`)
return blockId
}

44
tracker/blockchain-processor.js

@ -78,7 +78,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/
async catchupIBDMode() {
try {
Logger.info('Tracker Startup (IBD mode)')
Logger.info('Tracker : Tracker Startup (IBD mode)')
const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks
@ -91,7 +91,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If no header or block loaded by bitcoind => try later
if (daemonNbHeaders == 0 || daemonNbBlocks == 0) {
Logger.info('New attempt scheduled in 30s (waiting for block headers)')
Logger.info('Tracker : New attempt scheduled in 30s (waiting for block headers)')
return util.delay(30000).then(() => {
return this.catchupIBDMode()
})
@ -101,7 +101,7 @@ class BlockchainProcessor extends AbstractProcessor {
// If blocks need to be downloaded by bitcoind => try later
if (daemonNbBlocks - 1 <= dbMaxHeight) {
Logger.info('New attempt scheduled in 10s (waiting for blocks)')
Logger.info('Tracker : New attempt scheduled in 10s (waiting for blocks)')
return util.delay(10000).then(() => {
return this.catchupIBDMode()
})
@ -110,7 +110,7 @@ class BlockchainProcessor extends AbstractProcessor {
} else {
const blockRange = _.range(dbMaxHeight + 1, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`)
Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
await util.seriesCall(blockRange, async height => {
try {
@ -118,13 +118,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(blockHash, true)
prevBlockId = await this.processBlockHeader(header, prevBlockId)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
process.exit()
}
}, 'Tracker syncing', true)
// Schedule a new iteration (in case more blocks need to be loaded)
Logger.info('Start a new iteration')
Logger.info('Tracker : Start a new iteration')
return this.catchupIBDMode()
}
@ -134,7 +134,7 @@ class BlockchainProcessor extends AbstractProcessor {
}
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupIBDMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
throw e
}
}
@ -149,7 +149,7 @@ class BlockchainProcessor extends AbstractProcessor {
*/
async catchupNormalMode() {
try {
Logger.info('Tracker Startup (normal mode)')
Logger.info('Tracker : Tracker Startup (normal mode)')
const info = await this.client.getblockchaininfo()
const daemonNbBlocks = info.blocks
@ -162,7 +162,7 @@ class BlockchainProcessor extends AbstractProcessor {
// Compute blocks range to be processed
const blockRange = _.range(highest.blockHeight, daemonNbBlocks + 1)
Logger.info(`Sync ${blockRange.length} blocks`)
Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
// Process the blocks
return util.seriesCall(blockRange, async height => {
@ -171,13 +171,13 @@ class BlockchainProcessor extends AbstractProcessor {
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
process.exit()
}
}, 'Tracker syncing', true)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.catchupNormalMode()')
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
}
}
@ -196,11 +196,11 @@ class BlockchainProcessor extends AbstractProcessor {
this.onBlockHash(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for blocks')
Logger.info('Tracker : Listening for blocks')
}
/**
@ -241,11 +241,11 @@ class BlockchainProcessor extends AbstractProcessor {
try {
const header = await this.client.getblockheader(blockHash, true)
Logger.info(`Block #${header.height} ${blockHash}`)
Logger.info(`Tracker : Block #${header.height} ${blockHash}`)
// Grab all headers between this block and last known
headers = await this.chainBacktrace([header])
} catch(err) {
Logger.error(err, `BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`)
Logger.error(err, `Tracker : BlockchainProcessor.onBlockHash() : error in getblockheader(${blockHash})`)
}
if(headers == null)
@ -267,7 +267,7 @@ class BlockchainProcessor extends AbstractProcessor {
})
} catch(e) {
Logger.error(e, 'BlockchainProcessor.onBlockHash()')
Logger.error(e, 'Tracker : BlockchainProcessor.onBlockHash()')
} finally {
// Release the semaphor
await this._onBlockHashSemaphor.release()
@ -285,7 +285,7 @@ class BlockchainProcessor extends AbstractProcessor {
const deepest = headers[headers.length - 1]
if (headers.length > 1)
Logger.info(`chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`)
Logger.info(`Tracker : chainBacktrace @ height ${deepest.height}, ${headers.length} blocks`)
// Look for previous block in the database
const block = await db.getBlockByHash(deepest.previousblockhash)
@ -313,7 +313,7 @@ class BlockchainProcessor extends AbstractProcessor {
if (txs.length > 0) {
// Cancel confirmation of transactions included in reorg'd blocks
Logger.info(`Backtrace: unconfirm ${txs.length} transactions in reorg`)
Logger.info(`Tracker : Backtrace: unconfirm ${txs.length} transactions in reorg`)
const txids = txs.map(t => t.txnTxid)
await db.unconfirmTransactions(txids)
}
@ -345,12 +345,12 @@ class BlockchainProcessor extends AbstractProcessor {
// Process the blocks
return util.seriesCall(blockRange, async height => {
try {
Logger.info(`Rescanning block ${height}`)
Logger.info(`Tracker : Rescanning block ${height}`)
const hash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.rescan()')
Logger.error(e, 'Tracker : BlockchainProcessor.rescan()')
throw e
}
}, 'Tracker rescan', true)
@ -379,7 +379,7 @@ class BlockchainProcessor extends AbstractProcessor {
} catch(e) {
// The show must go on.
// TODO: further notification that this block did not check out
Logger.error(e, 'BlockchainProcessor.processBlock()')
Logger.error(e, 'Tracker : BlockchainProcessor.processBlock()')
}
}
@ -394,7 +394,7 @@ class BlockchainProcessor extends AbstractProcessor {
const block = new Block(null, header)
return block.checkBlockHeader(prevBlockID)
} catch(e) {
Logger.error(e, 'BlockchainProcessor.processBlockHeader()')
Logger.error(e, 'Tracker : BlockchainProcessor.processBlockHeader()')
throw e
}
}

4
tracker/index.js

@ -16,8 +16,8 @@
const TrackerRestApi = require('./tracker-rest-api')
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the tracker')
Logger.info('Tracker : Process ID: ' + process.pid)
Logger.info('Tracker : Preparing the tracker')
// Wait for Bitcoind RPC API
// being ready to process requests

30
tracker/mempool-processor.js

@ -95,11 +95,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for pushTx')
Logger.info('Tracker : Listening for pushTx')
// Socket listening to pushTx Orchestrator
this.orchestratorSock = zmq.socket('sub')
@ -112,11 +112,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onPushTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for pushTx orchestrator')
Logger.info('Tracker : Listening for pushTx orchestrator')
// Socket listening to bitcoind Txs messages
this.txSock = zmq.socket('sub')
@ -129,11 +129,11 @@ class MempoolProcessor extends AbstractProcessor {
this.onTx(message)
break
default:
Logger.info(topic.toString())
Logger.info(`Tracker : ${topic.toString()}`)
}
})
Logger.info('Listening for mempool transactions')
Logger.info('Tracker : Listening for mempool transactions')
}
/**
@ -145,7 +145,7 @@ class MempoolProcessor extends AbstractProcessor {
await this._refreshActiveStatus()
const activeLbl = this.isActive ? 'active' : 'inactive'
Logger.info(`Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`)
Logger.info(`Tracker : Processing ${activeLbl} Mempool (${this.mempoolBuffer.size()} transactions)`)
let currentMempool = new TransactionsBundle(this.mempoolBuffer.toArray())
this.mempoolBuffer.clear()
@ -171,7 +171,7 @@ class MempoolProcessor extends AbstractProcessor {
let tx = bitcoin.Transaction.fromBuffer(buf)
this.mempoolBuffer.addTransaction(tx)
} catch (e) {
Logger.error(e, 'MempoolProcessor.onTx()')
Logger.error(e, 'Tracker : MempoolProcessor.onTx()')
return Promise.reject(e)
}
}
@ -190,7 +190,7 @@ class MempoolProcessor extends AbstractProcessor {
let pushedTx = bitcoin.Transaction.fromHex(buf.toString())
const txid = pushedTx.getId()
Logger.info(`Processing tx for pushtx ${txid}`)
Logger.info(`Tracker : Processing tx for pushtx ${txid}`)
if (!TransactionsBundle.cache.has(txid)) {
// Process the transaction
@ -201,7 +201,7 @@ class MempoolProcessor extends AbstractProcessor {
this.notifyTx(txCheck.tx)
}
} catch (e) {
Logger.error(e, 'MempoolProcessor.onPushTx()')
Logger.error(e, 'Tracker : MempoolProcessor.onPushTx()')
return Promise.reject(e)
}
}
@ -213,7 +213,7 @@ class MempoolProcessor extends AbstractProcessor {
async checkUnconfirmed() {
const t0 = Date.now()
Logger.info('Processing unconfirmed transactions')
Logger.info('Tracker : Processing unconfirmed transactions')
const unconfirmedTxs = await db.getUnconfirmedTransactions()
@ -226,7 +226,7 @@ class MempoolProcessor extends AbstractProcessor {
// Transaction is confirmed
const block = await db.getBlockByHash(rtx.blockhash)
if (block && block.blockID) {
Logger.info(`Marking TXID ${tx.txnTxid} confirmed`)
Logger.info(`Tracker : Marking TXID ${tx.txnTxid} confirmed`)
return db.confirmTransactions([tx.txnTxid], block.blockID)
}
},
@ -238,7 +238,7 @@ class MempoolProcessor extends AbstractProcessor {
}
)
} catch(e) {
Logger.error(e, 'MempoolProcessor.checkUnconfirmed()')
Logger.error(e, 'Tracker : MempoolProcessor.checkUnconfirmed()')
}
})
}
@ -247,7 +247,7 @@ class MempoolProcessor extends AbstractProcessor {
const ntx = unconfirmedTxs.length
const dt = ((Date.now() - t0) / 1000).toFixed(1)
const per = (ntx == 0) ? 0 : ((Date.now() - t0) / ntx).toFixed(0)
Logger.info(` Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`)
Logger.info(`Tracker : Finished processing unconfirmed transactions ${dt}s, ${ntx} tx, ${per}ms/tx`)
}
/**
@ -273,7 +273,7 @@ class MempoolProcessor extends AbstractProcessor {
* Log mempool statistics
*/
displayMempoolStats() {
Logger.info(`Mempool Size: ${this.mempoolBuffer.size()}`)
Logger.info(`Tracker : Mempool Size: ${this.mempoolBuffer.size()}`)
}
}

10
tracker/transaction.js

@ -63,7 +63,7 @@ class Transaction {
}
} catch(e) {
Logger.error(e, 'Transaction.checkTransaction()')
Logger.error(e, 'Tracker : Transaction.checkTransaction()')
return Promise.reject(e)
}
}
@ -122,7 +122,7 @@ class Transaction {
// Detect potential double spends
if (r.spendingTxnID !== null && r.spendingTxnID != this.storedTxnID) {
Logger.info(`DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`)
Logger.info(`Tracker : DOUBLE SPEND of ${r.txnTxid}-${r.outIndex} by ${this.txid}!`)
// Delete the existing transaction that has been double-spent:
// since the deepest block keeps its transactions, this will
// eventually work itself out, and the wallet will not show
@ -366,7 +366,7 @@ class Transaction {
const derived = await hdaHelper.deriveAddresses(xpub, chain, indices, hdType)
Array.prototype.push.apply(newAddresses, derived)
Logger.info(`Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`)
Logger.info(`Tracker : Derived hdID(${hdAccount.hdID}) M/${chain}/${indices.join(',')}`)
// Update view of derived address indices
derivedIndices[chain] = chainMaxUsedIndex + gapLimit[chain]
@ -374,7 +374,7 @@ class Transaction {
// Check derived addresses for use in this transaction
for (let d of derived) {
if (indexedOutputs[d.address]) {
Logger.info(`Derived address already in outputs: M/${d.chain}/${d.index}`)
Logger.info(`Tracker : Derived address already in outputs: M/${d.chain}/${d.index}`)
// This transaction spends to an address
// beyond the original derived gap limit!
chainMaxUsedIndex = d.index
@ -405,7 +405,7 @@ class Transaction {
locktime: this.tx.locktime,
})
Logger.info(`Storing transaction ${this.txid}`)
Logger.info(`Tracker : Storing transaction ${this.txid}`)
}
}

Loading…
Cancel
Save