Browse Source

Merge v1.10.0 into umbrel

umbrel
Lounès Ksouri 3 years ago
parent
commit
ec02a553cf
No known key found for this signature in database GPG Key ID: F8DC83D24F68572D
  1. 104
      RELEASES.md
  2. 2
      accounts/api-helper.js
  3. 2
      accounts/headers-rest-api.js
  4. 4
      accounts/index.js
  5. 8
      accounts/support-rest-api.js
  6. 17
      accounts/transactions-rest-api.js
  7. 15
      accounts/xpub-rest-api.js
  8. 4
      doc/DOCKER_advanced_setups.md
  9. 1
      doc/GET_txs.md
  10. 72
      docker/my-dojo/.env
  11. 19
      docker/my-dojo/bitcoin/Dockerfile
  12. 6
      docker/my-dojo/bitcoin/restart.sh
  13. 6
      docker/my-dojo/conf/docker-mysql.conf.tpl
  14. 19
      docker/my-dojo/docker-compose.yaml
  15. 9
      docker/my-dojo/explorer/Dockerfile
  16. 2
      docker/my-dojo/explorer/restart.sh
  17. 10
      docker/my-dojo/indexer/Dockerfile
  18. 3
      docker/my-dojo/indexer/restart.sh
  19. 14
      docker/my-dojo/install/install-scripts.sh
  20. 14
      docker/my-dojo/install/upgrade-scripts.sh
  21. 3
      docker/my-dojo/mysql/mysql-default.cnf
  22. 2
      docker/my-dojo/mysql/mysql-dojo.cnf
  23. 11
      docker/my-dojo/mysql/mysql-low_mem.cnf
  24. 10
      docker/my-dojo/node/Dockerfile
  25. 4
      docker/my-dojo/node/keys.index.js
  26. 8
      docker/my-dojo/node/restart.sh
  27. 4
      docker/my-dojo/node/wait-for-it.sh
  28. 6
      docker/my-dojo/overrides/bitcoind.install.yaml
  29. 2
      docker/my-dojo/overrides/explorer.install.yaml
  30. 6
      docker/my-dojo/overrides/indexer.install.yaml
  31. 5
      docker/my-dojo/overrides/whirlpool.install.yaml
  32. 17
      docker/my-dojo/tor/Dockerfile
  33. 18
      docker/my-dojo/tor/restart.sh
  34. 21
      docker/my-dojo/whirlpool/Dockerfile
  35. 7
      docker/my-dojo/whirlpool/restart.sh
  36. 3
      keys/index-example.js
  37. 84
      lib/bitcoin/addresses-helper.js
  38. 18
      lib/bitcoin/hd-accounts-helper.js
  39. 5
      lib/bitcoin/hd-accounts-service.js
  40. 21
      lib/bitcoin/network.js
  41. 22
      lib/bitcoin/parallel-address-derivation.js
  42. 9
      lib/bitcoind-rpc/fees.js
  43. 6
      lib/bitcoind-rpc/headers.js
  44. 6
      lib/bitcoind-rpc/latest-block.js
  45. 60
      lib/bitcoind-rpc/rpc-client.js
  46. 17
      lib/bitcoind-rpc/transactions.js
  47. 2
      lib/db/mysql-db-wrapper.js
  48. 85
      lib/fork-pool.js
  49. 19
      lib/http-server/http-server.js
  50. 9
      lib/remote-importer/bitcoind-wrapper.js
  51. 157
      lib/remote-importer/local-rest-indexer-wrapper.js
  52. 343
      lib/remote-importer/remote-importer.js
  53. 7
      lib/remote-importer/sources-mainnet.js
  54. 7
      lib/remote-importer/sources-testnet.js
  55. 81
      lib/util.js
  56. 9
      lib/wallet/address-info.js
  57. 42
      lib/wallet/hd-account-info.js
  58. 8
      lib/wallet/wallet-info.js
  59. 66
      lib/wallet/wallet-service.js
  60. 5644
      package-lock.json
  61. 28
      package.json
  62. 4
      pushtx/index-orchestrator.js
  63. 4
      pushtx/index.js
  64. 10
      pushtx/orchestrator.js
  65. 9
      pushtx/pushtx-processor.js
  66. 13
      pushtx/status.js
  67. 6
      pushtx/transactions-scheduler.js
  68. 8
      scripts/create-first-blocks.js
  69. 17
      static/admin/css/style.css
  70. 4
      static/admin/dmt/status/status.js
  71. 24
      static/admin/dmt/xpubs-tools/xpubs-tools.html
  72. 50
      static/admin/dmt/xpubs-tools/xpubs-tools.js
  73. 8
      static/admin/lib/api-wrapper.js
  74. 1
      static/admin/lib/auth-utils.js
  75. 50
      tracker/abstract-processor.js
  76. 173
      tracker/block-worker.js
  77. 121
      tracker/block.js
  78. 97
      tracker/blockchain-processor.js
  79. 222
      tracker/blocks-processor.js
  80. 4
      tracker/index.js
  81. 72
      tracker/mempool-processor.js
  82. 5
      tracker/tracker-rest-api.js
  83. 13
      tracker/transaction.js
  84. 106
      tracker/transactions-bundle.js

104
RELEASES.md

@ -3,6 +3,7 @@
## Releases ##
- [v1.10.0](#1_10_0)
- [v1.9.0](#1_9_0)
- [v1.8.1](#1_8_1)
- [v1.8.0](#1_8_0)
@ -16,6 +17,109 @@
- [v1.1.0](#1_1_0)
<a name="1_10_0"/>
## Samourai Dojo v1.10.0 ##
### Notable changes ###
#### Performances optimization ####
This release provides faster IBD, synchronization and rescans thanks to the optimization of multiple components of Dojo (Tracker, Importer, etc)
#### Export of XPUB activity ####
The Maintenance Tool now allows to export the activity history of a XPUB in CSV format
#### Upgrade of bitcoind to v0.21.1 ####
Upgrade to Bitcoin Core v0.21.1
#### Upgrade of whirlpool to v0.10.11 ####
Upgrade to whirlpool-cli 0.10.11
#### Upgrade of explorer to v3.1.1 ####
Upgrade to btc-rpc-explorer 3.1.1
#### Upgrade of tor to v0.4.4.8 ####
Upgrade to Tor v0.4.4.8
#### Upgrade of indexer to v0.5.0 ####
Upgrade to addrindexrs v0.5.0
### Change log ###
#### MyDojo ####
- [#mr199](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/199) manage linux uids and gids as dojo system parameters
- [#mr200](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/200) manage ip addresses of containers as dojo system parameters
- [#mr201](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/201) disable rescan-lookahead field if data source is third_party_explorer
- [#mr202](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/202) reference db container by its ip address
- [#mr203](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/203) add export of xpub history in csv format
- [#mr204](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/204) upgrade whirlpool to whirlpool cli v0 10 10
- [#mr206](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/206) add support of config profiles for mysql
- [#mr207](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/207) upgrade tor to tor 0.4.4.8
- [#mr208](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/208) improve performances of blocks processing by the tracker
- [#mr209](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/209) improve performances of api
- [#mr210](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/210) better seriesCall
- [#mr211](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/211) add support of rest api provided by addrindexrs
- [#mr212](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/212) minor optimizations
- [#mr214](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/214) upgrade explorer to btc rpc explorer 3.0.0
- [#mr215](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/215) handle Error in sendError method
- [#mr217](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/217) optimize tracker (parallel processing of blocks)
- [#mr218](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/218) optimize derivation of addresses
- [#mr219](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/219) optimize remote importer
- [#mr221](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/221) implement util.splitList() with slice() instead of splice()
- [#mr222](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/222) determine bitcoin network based on config file instead of cli argument
- [#mr223](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/223) upgrade bitcoind to bitcoin core 0.21.1
- [#mr224](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/224) switch to buster-slim and alpine images
- [#mr226](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/226) upgrade btc-rpc-explorer to v3.1.1
- [#mr227](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/227) switch from express to tiny-http
- [#mr228](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/228) set NODE_ENV to production for optimization purposes
- [#mr232](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/232) upgrade whirlpool to whirlpool-cli v0.10.11
#### Bug fixes ####
- [#mr220](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/220) switch tx isolation mode to read-committed
#### Security ####
- [#mr216](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/216) upgrade node packages
- [#mr229](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/229) update node dependencies
#### Documentation ####
- [#mr225](https://code.samourai.io/dojo/samourai-dojo/-/merge_requests/225) update docker_advanced_setups.md - fix typo
#### Credits ###
- flatcloud0b3
- kenshin-samourai
- LaurentMT
- MrHash
- pajasevi
<a name="1_9_0"/>
## Samourai Dojo v1.9.0 ##

2
accounts/api-helper.js

@ -109,7 +109,7 @@ class ApiHelper {
* Express middleware validating if entities params are well formed
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateEntitiesParams(req, res, next) {
const params = this.checkEntitiesParams(req.query) ? req.query : req.body

2
accounts/headers-rest-api.js

@ -57,7 +57,7 @@ class HeadersRestApi {
* Validate request arguments
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetHeader(req, res, next) {
const isValidHash = validator.isHash(req.params.hash, 'sha256')

4
accounts/index.js

@ -7,7 +7,7 @@
'use strict'
const Logger = require('../lib/logger')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { waitForBitcoindRpcApi } = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const db = require('../lib/db/mysql-db-wrapper')
@ -34,7 +34,7 @@
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
await waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {

8
accounts/support-rest-api.js

@ -336,7 +336,7 @@ class SupportRestApi {
* Validate arguments related to GET xpub info requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetXpubInfo(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
@ -353,7 +353,7 @@ class SupportRestApi {
* Validate arguments related to GET xpub rescan requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetXpubRescan(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
@ -371,7 +371,7 @@ class SupportRestApi {
* Validate arguments related to GET xpub delete requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetXpubDelete(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
@ -388,7 +388,7 @@ class SupportRestApi {
* Validate arguments related to addresses requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateAddress(req, res, next) {
const isValidAddress = validator.isAlphanumeric(req.params.addr)

17
accounts/transactions-rest-api.js

@ -83,8 +83,15 @@ class TransactionsRestApi {
const active = apiHelper.parseEntities(req.query.active)
const page = req.query.page != null ? parseInt(req.query.page) : 0
const count = req.query.count != null ? parseInt(req.query.count) : keys.multiaddr.transactions
const excludeNullXfer = req.query.excludeNullXfer != null
const result = await walletService.getWalletTransactions(active, page, count)
if (excludeNullXfer) {
result.txs = result.txs.filter(tx => {
return tx['result'] != 0
})
}
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
@ -105,7 +112,7 @@ class TransactionsRestApi {
* Validate arguments of /tx requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetTransaction(req, res, next) {
const isValidTxid = validator.isHash(req.params.txid, 'sha256')
@ -130,7 +137,7 @@ class TransactionsRestApi {
* Validate arguments of /txs requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetTransactions(req, res, next) {
const isValidPage =
@ -141,7 +148,11 @@ class TransactionsRestApi {
!req.query.count
|| validator.isInt(req.query.count)
if (!(isValidPage && isValidCount)) {
const isValidExcludeNull =
!req.query.excludeNullXfer
|| validator.isAlphanumeric(req.query.excludeNullXfer)
if (!(isValidPage && isValidCount && isValidExcludeNull)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.query,

15
accounts/xpub-rest-api.js

@ -9,16 +9,14 @@ const bodyParser = require('body-parser')
const errors = require('../lib/errors')
const network = require('../lib/bitcoin/network')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const hdaHelper = require('../lib/bitcoin/hd-accounts-helper')
const hdaService = require('../lib/bitcoin/hd-accounts-service')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const HdAccountInfo = require('../lib/wallet/hd-account-info')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const remoteImporter = require('../lib/remote-importer/remote-importer')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
const debugApi = process.argv.indexOf('api-debug') > -1
const gap = require('../keys/')[network.key].gap
@ -34,9 +32,6 @@ class XPubRestApi {
constructor(httpServer) {
this.httpServer = httpServer
// Initialize the rpc client
this.rpcClient = new RpcClient()
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
@ -404,7 +399,7 @@ class XPubRestApi {
* Validate arguments of postXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsPostXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.body.xpub)
@ -436,7 +431,7 @@ class XPubRestApi {
* Validate arguments of getXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsGetXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
@ -456,7 +451,7 @@ class XPubRestApi {
* Validate arguments of postLockXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsPostLockXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
@ -480,7 +475,7 @@ class XPubRestApi {
* Validate arguments of deleteXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
* @param {function} next - next tiny-http middleware
*/
validateArgsDeleteXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)

4
doc/DOCKER_advanced_setups.md

@ -11,7 +11,7 @@ A word of caution, though, the default values of these options try to maximize y
- [Local Electrum server used as data source for imports/rescans](#local_electrum)
- [Local Whirlpool client](#local_whirlpool)
- [External Bitcoin full node](#external_bitcoind)
- [bitcoind RPC API ans ZMQ notifications exposed to external apps](#exposed_rpc_zmq)
- [bitcoind RPC API and ZMQ notifications exposed to external apps](#exposed_rpc_zmq)
- [Static onion address for bitcoind hidden service](#static_onion)
- [Configure Tor Bridges](#tor_bridges)
- [Support of testnet](#testnet)
@ -282,7 +282,7 @@ Follow these steps if you want to speed up this operation by preloading an archi
<a name="exposed_rpc_zmq"/>
## bitcoind RPC API ans ZMQ notifications exposed to external apps ##
## bitcoind RPC API and ZMQ notifications exposed to external apps ##
By default, access to the RPC API of your bitcoind is restricted to Docker containers hosted on the "dojonet" network.

1
doc/GET_txs.md

@ -12,6 +12,7 @@ GET /txs?active=...
* **active** - `string` - A pipe-separated list of extended public keys and/or loose addresses and/or pubkeys (`xpub1|address1|address2|pubkey1|...`)
* **page** - `integer` - Index of the requested page (first page is index 0)
* **count** - `integer` - Number of transactions returned per page
* **excludeNullXfer** - `boolean` - Boolean flag indicating if transactions that don't change the balance should be excluded from the result (default = false)
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated. Alternatively, the access token can be passed through the `Authorization` HTTP header (with the `Bearer` scheme).
### Examples

72
docker/my-dojo/.env

@ -10,15 +10,34 @@
COMPOSE_CONVERT_WINDOWS_PATHS=1
DOJO_VERSION_TAG=1.9.0
DOJO_DB_VERSION_TAG=1.2.0
DOJO_BITCOIND_VERSION_TAG=1.11.0
DOJO_NODEJS_VERSION_TAG=1.9.0
DOJO_NGINX_VERSION_TAG=1.5.0
DOJO_TOR_VERSION_TAG=1.7.0
DOJO_EXPLORER_VERSION_TAG=1.4.0
DOJO_INDEXER_VERSION_TAG=1.2.0
DOJO_WHIRLPOOL_VERSION_TAG=1.3.0
DOJO_VERSION_TAG=1.10.0
DOJO_DB_VERSION_TAG=1.3.0
DOJO_BITCOIND_VERSION_TAG=1.12.0
DOJO_NODEJS_VERSION_TAG=1.10.0
DOJO_NGINX_VERSION_TAG=1.6.0
DOJO_TOR_VERSION_TAG=1.9.0
DOJO_EXPLORER_VERSION_TAG=1.7.0
DOJO_INDEXER_VERSION_TAG=1.3.0
DOJO_WHIRLPOOL_VERSION_TAG=1.4.0
#########################################
# NETWORKING
#########################################
NET_DOJO_MYSQL_IPV4=172.28.1.1
NET_DOJO_NODE_IPV4=172.28.1.2
NET_DOJO_NGINX_IPV4=172.28.1.3
NET_DOJO_TOR_IPV4=172.28.1.4
NET_DOJO_BITCOIND_IPV4=172.28.1.5
NET_DOJO_INDEXER_IPV4=172.28.1.6
NET_DOJO_EXPLORER_IPV4=172.28.1.7
NET_DMZ_NGINX_IPV4=172.29.1.3
NET_DMZ_TOR_IPV4=172.29.1.4
NET_WHIRL_NGINX_IPV4=172.30.1.3
NET_WHIRL_WHIRLPOOL_IPV4=172.30.1.8
#########################################
@ -32,6 +51,9 @@ MYSQL_DATABASE=samourai-main
# BITCOIND
#########################################
BITCOIND_LINUX_UID=1105
BITCOIND_LINUX_GID=1108
BITCOIND_DNSSEED=0
BITCOIND_DNS=0
@ -67,3 +89,35 @@ NODE_TRACKER_UNCONF_TXS_PERIOD=300000
#########################################
INDEXER_BATCH_SIZE=10
#########################################
# TOR
#########################################
TOR_LINUX_UID=1104
TOR_LINUX_GID=1107
#########################################
# WHIRLPOOL
#########################################
WHIRLPOOL_LINUX_UID=1110
WHIRLPOOL_LINUX_GID=1113
#########################################
# INDEXER
#########################################
INDEXER_LINUX_UID=1106
INDEXER_LINUX_GID=1109
#########################################
# SOROBAN
#########################################
SOROBAN_LINUX_UID=1111
SOROBAN_LINUX_GID=1114

19
docker/my-dojo/bitcoin/Dockerfile

@ -1,17 +1,22 @@
FROM debian:buster
FROM debian:buster-slim
#################################################################
# INSTALL BITCOIN
#################################################################
ENV BITCOIN_HOME /home/bitcoin
ENV BITCOIN_VERSION 0.21.0
ENV BITCOIN_URL https://bitcoincore.org/bin/bitcoin-core-0.21.0/bitcoin-0.21.0-x86_64-linux-gnu.tar.gz
ENV BITCOIN_SHA256 da7766775e3f9c98d7a9145429f2be8297c2672fe5b118fd3dc2411fb48e0032
ENV BITCOIN_VERSION 0.21.1
ENV BITCOIN_URL https://bitcoincore.org/bin/bitcoin-core-0.21.1/bitcoin-0.21.1-x86_64-linux-gnu.tar.gz
ENV BITCOIN_SHA256 366eb44a7a0aa5bd342deea215ec19a184a11f2ca22220304ebb20b9c8917e2b
ENV BITCOIN_ASC_URL https://bitcoincore.org/bin/bitcoin-core-0.21.0/SHA256SUMS.asc
ENV BITCOIN_PGP_KS_URI hkp://keyserver.ubuntu.com:80
ENV BITCOIN_PGP_KEY 01EA5486DE18A882D4C2684590C8019E36C2E964
ARG BITCOIND_LINUX_UID
ARG BITCOIND_LINUX_GID
ARG TOR_LINUX_GID
RUN set -ex && \
apt-get update && \
apt-get install -qq --no-install-recommends ca-certificates dirmngr gosu gpg gpg-agent wget && \
@ -30,9 +35,9 @@ RUN set -ex && \
# Create groups bitcoin & tor
# Create user bitcoin and add it to groups
RUN addgroup --system -gid 1108 bitcoin && \
addgroup --system -gid 1107 tor && \
adduser --system --ingroup bitcoin -uid 1105 bitcoin && \
RUN addgroup --system -gid ${BITCOIND_LINUX_GID} bitcoin && \
addgroup --system -gid ${TOR_LINUX_GID} tor && \
adduser --system --ingroup bitcoin -uid ${BITCOIND_LINUX_UID} bitcoin && \
usermod -a -G tor bitcoin
# Create data directory

6
docker/my-dojo/bitcoin/restart.sh

@ -15,9 +15,9 @@ bitcoind_options=(
-mempoolexpiry=$BITCOIND_MEMPOOL_EXPIRY
-minrelaytxfee=$BITCOIND_MIN_RELAY_TX_FEE
-port=8333
-proxy=172.28.1.4:9050
-proxy=$NET_DOJO_TOR_IPV4:9050
-rpcallowip=0.0.0.0/0
-rpcbind=172.28.1.5
-rpcbind=$NET_DOJO_BITCOIND_IPV4
-rpcpassword=$BITCOIND_RPC_PASSWORD
-rpcport=28256
-rpcthreads=$BITCOIND_RPC_THREADS
@ -31,7 +31,7 @@ bitcoind_options=(
if [ "$BITCOIND_LISTEN_MODE" == "on" ]; then
bitcoind_options+=(-listen=1)
bitcoind_options+=(-bind=172.28.1.5)
bitcoind_options+=(-bind="$NET_DOJO_BITCOIND_IPV4")
bitcoind_options+=(-externalip=$(cat /var/lib/tor/hsv2bitcoind/hostname))
bitcoind_options+=(-externalip=$(cat /var/lib/tor/hsv3bitcoind/hostname))
fi

6
docker/my-dojo/conf/docker-mysql.conf.tpl

@ -16,3 +16,9 @@ MYSQL_USER=samourai
# Warning: This option must not be modified after the first installation
# Type: alphanumeric
MYSQL_PASSWORD=password
# MySQL configuration profile
# default = default configuration parameters
# low_mem = configuration minimizing the RAM consumed by the database
# Values: default | low_mem
MYSQL_CONF_PROFILE=default

19
docker/my-dojo/docker-compose.yaml

@ -22,7 +22,7 @@ services:
max-file: "10"
networks:
dojonet:
ipv4_address: 172.28.1.1
ipv4_address: ${NET_DOJO_MYSQL_IPV4}
node:
image: "samouraiwallet/dojo-nodejs:${DOJO_NODEJS_VERSION_TAG}"
@ -30,6 +30,8 @@ services:
build:
context: ./../..
dockerfile: ./docker/my-dojo/node/Dockerfile
args:
TOR_LINUX_GID: ${TOR_LINUX_GID}
env_file:
- ./.env
- ./conf/docker-common.conf
@ -55,7 +57,7 @@ services:
- db
networks:
dojonet:
ipv4_address: 172.28.1.2
ipv4_address: ${NET_DOJO_NODE_IPV4}
nginx:
image: "samouraiwallet/dojo-nginx:${DOJO_NGINX_VERSION_TAG}"
@ -79,17 +81,20 @@ services:
- node
networks:
whirlnet:
ipv4_address: 172.30.1.3
ipv4_address: ${NET_WHIRL_NGINX_IPV4}
dmznet:
ipv4_address: 172.29.1.3
ipv4_address: ${NET_DMZ_NGINX_IPV4}
dojonet:
ipv4_address: 172.28.1.3
ipv4_address: ${NET_DOJO_NGINX_IPV4}
tor:
image: "samouraiwallet/dojo-tor:${DOJO_TOR_VERSION_TAG}"
container_name: tor
build:
context: ./tor
args:
TOR_LINUX_UID: ${TOR_LINUX_UID}
TOR_LINUX_GID: ${TOR_LINUX_GID}
env_file:
- ./.env
- ./conf/docker-bitcoind.conf
@ -107,9 +112,9 @@ services:
max-file: "10"
networks:
dmznet:
ipv4_address: 172.29.1.4
ipv4_address: ${NET_DMZ_TOR_IPV4}
dojonet:
ipv4_address: 172.28.1.4
ipv4_address: ${NET_DOJO_TOR_IPV4}
networks:
dojonet:

9
docker/my-dojo/explorer/Dockerfile

@ -1,14 +1,15 @@
FROM node:12-buster
FROM node:12-alpine
ENV NODE_ENV production
ENV APP_DIR /home/node/app
ENV EXPLORER_URL https://github.com/janoside/btc-rpc-explorer/archive
ENV EXPLORER_VERSION 2.1.0
ENV EXPLORER_VERSION 3.1.1
# Install netcat
RUN set -ex && \
apt-get update && \
apt-get install -y netcat
apk --no-cache add bash gcc g++ make python3 git netcat-openbsd
# Download the source code and install it
RUN set -ex && \

2
docker/my-dojo/explorer/restart.sh

@ -4,7 +4,7 @@ cd /home/node/app
explorer_options=(
--port 3002
--host 172.28.1.7
--host "$NET_DOJO_EXPLORER_IPV4"
--basic-auth-password "$EXPLORER_KEY"
--coin BTC
--bitcoind-host "$BITCOIND_IP"

10
docker/my-dojo/indexer/Dockerfile

@ -1,16 +1,19 @@
FROM rust:1.42.0-slim-buster
ENV INDEXER_HOME /home/indexer
ENV INDEXER_VERSION 0.4.0
ENV INDEXER_VERSION 0.5.0
ENV INDEXER_URL https://code.samourai.io/dojo/addrindexrs.git
ARG INDEXER_LINUX_GID
ARG INDEXER_LINUX_UID
RUN apt-get update && \
apt-get install -y clang cmake git && \
apt-get install -y libsnappy-dev
# Create group and user indexer
RUN addgroup --system -gid 1109 indexer && \
adduser --system --ingroup indexer -uid 1106 indexer
RUN addgroup --system -gid ${INDEXER_LINUX_GID} indexer && \
adduser --system --ingroup indexer -uid ${INDEXER_LINUX_UID} indexer
# Create data directory
RUN mkdir "$INDEXER_HOME/addrindexrs" && \
@ -39,5 +42,6 @@ RUN cd "$INDEXER_HOME/addrindexrs" && \
cargo install --locked --path .
EXPOSE 50001
EXPOSE 8080
STOPSIGNAL SIGINT

3
docker/my-dojo/indexer/restart.sh

@ -6,7 +6,8 @@ indexer_options=(
--index-batch-size="$INDEXER_BATCH_SIZE"
--jsonrpc-import
--db-dir="/home/indexer/db"
--indexer-rpc-addr="172.28.1.6:50001"
--indexer-rpc-addr="$NET_DOJO_INDEXER_IPV4:50001"
--indexer-http-addr="$NET_DOJO_INDEXER_IPV4:8080"
--daemon-rpc-addr="$BITCOIND_IP:$BITCOIND_RPC_PORT"
--cookie="$BITCOIND_RPC_USER:$BITCOIND_RPC_PASSWORD"
--txid-limit="$INDEXER_TXID_LIMIT"

14
docker/my-dojo/install/install-scripts.sh

@ -6,6 +6,12 @@ else
source ./conf/docker-bitcoind.conf.tpl
fi
if [ -f ./conf/docker-mysql.conf ]; then
source ./conf/docker-mysql.conf
else
source ./conf/docker-mysql.conf.tpl
fi
if [ -f ./conf/docker-explorer.conf ]; then
source ./conf/docker-explorer.conf
else
@ -111,4 +117,12 @@ init_config_files() {
cp ../../static/admin/conf/index-mainnet.js ../../static/admin/conf/index.js
echo "Initialized index.js (admin module)"
fi
# Initialize config files for mysql
if [ "$MYSQL_CONF_PROFILE" == "low_mem" ]; then
cp ./mysql/mysql-low_mem.cnf ./mysql/mysql-dojo.cnf
else
cp ./mysql/mysql-default.cnf ./mysql/mysql-dojo.cnf
fi
echo "Initialized mysql-dojo.cnf (mysql)"
}

14
docker/my-dojo/install/upgrade-scripts.sh

@ -6,6 +6,12 @@ else
source ./conf/docker-common.conf.tpl
fi
if [ -f ./conf/docker-mysql.conf ]; then
source ./conf/docker-mysql.conf
else
source ./conf/docker-mysql.conf.tpl
fi
if [ -f ./conf/docker-explorer.conf ]; then
source ./conf/docker-explorer.conf
else
@ -95,6 +101,14 @@ update_config_files() {
cp ../../static/admin/conf/index-mainnet.js ../../static/admin/conf/index.js
echo "Initialized index.js (admin module)"
fi
# Initialize config files for mysql
if [ "$MYSQL_CONF_PROFILE" == "low_mem" ]; then
cp ./mysql/mysql-low_mem.cnf ./mysql/mysql-dojo.cnf
else
cp ./mysql/mysql-default.cnf ./mysql/mysql-dojo.cnf
fi
echo "Initialized mysql-dojo.cnf (mysql)"
}
# Update a configuration file from template

3
docker/my-dojo/mysql/mysql-default.cnf

@ -0,0 +1,3 @@
[mysqld]
sql_mode="NO_ENGINE_SUBSTITUTION"
transaction_isolation=READ-COMMITTED

2
docker/my-dojo/mysql/mysql-dojo.cnf

@ -1,2 +0,0 @@
[mysqld]
sql_mode="NO_ENGINE_SUBSTITUTION"

11
docker/my-dojo/mysql/mysql-low_mem.cnf

@ -0,0 +1,11 @@
[mysqld]
sql_mode="NO_ENGINE_SUBSTITUTION"
transaction_isolation=READ-COMMITTED
performance_schema=off
innodb_buffer_pool_size=128M
innodb_buffer_pool_chunk_size=16M
key_buffer_size=8M
tmp_table_size=1M
max_connections=80
sort_buffer_size=1M
query_cache_size=8M

10
docker/my-dojo/node/Dockerfile

@ -1,9 +1,15 @@
FROM node:12-buster
FROM node:12-alpine
ENV NODE_ENV production
ENV APP_DIR /home/node/app
ARG TOR_LINUX_GID
RUN set -ex && \
apk --no-cache add shadow bash gcc g++ make python3
# Add node user to tor group
RUN addgroup --system -gid 1107 tor && \
RUN addgroup -S -g ${TOR_LINUX_GID} tor && \
usermod -a -G tor node
# Install forever

4
docker/my-dojo/node/keys.index.js

@ -74,7 +74,7 @@ module.exports = {
// Password
pass: process.env.MYSQL_PASSWORD,
// IP address
host: 'db',
host: process.env.NET_DOJO_MYSQL_IPV4,
// TCP port
port: 3306,
// Db name
@ -198,7 +198,7 @@ module.exports = {
},
// Use a SOCKS5 proxy for all communications with external services
// Values: null if no socks5 proxy used, otherwise the url of the socks5 proxy
socks5Proxy: `socks5h://${torIP}:${torPort}`,
socks5Proxy: `socks5h://${process.env.NET_DOJO_TOR_IPV4}:9050`,
// OXT (mainnet)
oxt: process.env.NODE_URL_OXT_API,
// Esplora (testnet)

8
docker/my-dojo/node/restart.sh

@ -1,14 +1,14 @@
#!/bin/bash
cd /home/node/app/accounts
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js
cd /home/node/app/pushtx
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index-orchestrator.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index-orchestrator.js
cd /home/node/app/tracker
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js "$COMMON_BTC_NETWORK"
forever start -a -l /dev/stdout -o /dev/null -e /dev/null index.js
# Keep the container up
while true

4
docker/my-dojo/node/wait-for-it.sh

@ -146,7 +146,11 @@ WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0

6
docker/my-dojo/overrides/bitcoind.install.yaml

@ -6,6 +6,10 @@ services:
container_name: bitcoind
build:
context: ./bitcoin
args:
BITCOIND_LINUX_UID: ${BITCOIND_LINUX_UID}
BITCOIND_LINUX_GID: ${BITCOIND_LINUX_GID}
TOR_LINUX_GID: ${TOR_LINUX_GID}
env_file:
- ./.env
- ./conf/docker-common.conf
@ -30,7 +34,7 @@ services:
- tor
networks:
dojonet:
ipv4_address: 172.28.1.5
ipv4_address: ${NET_DOJO_BITCOIND_IPV4}
node:
depends_on:

2
docker/my-dojo/overrides/explorer.install.yaml

@ -23,7 +23,7 @@ services:
max-file: "10"
networks:
dojonet:
ipv4_address: 172.28.1.7
ipv4_address: ${NET_DOJO_EXPLORER_IPV4}
node:
depends_on:

6
docker/my-dojo/overrides/indexer.install.yaml

@ -6,6 +6,9 @@ services:
container_name: indexer
build:
context: ./indexer
args:
INDEXER_LINUX_UID: ${INDEXER_LINUX_UID}
INDEXER_LINUX_GID: ${INDEXER_LINUX_GID}
env_file:
- ./.env
- ./conf/docker-common.conf
@ -15,6 +18,7 @@ services:
command: "/wait-for-it.sh tor:9050 --timeout=360 --strict -- /restart.sh"
expose:
- "50001"
- "8080"
volumes:
- data-indexer:/home/indexer
logging:
@ -26,7 +30,7 @@ services:
- tor
networks:
dojonet:
ipv4_address: 172.28.1.6
ipv4_address: ${NET_DOJO_INDEXER_IPV4}
volumes:
data-indexer:

5
docker/my-dojo/overrides/whirlpool.install.yaml

@ -6,6 +6,9 @@ services:
container_name: whirlpool
build:
context: ./whirlpool
args:
WHIRLPOOL_LINUX_UID: ${WHIRLPOOL_LINUX_UID}
WHIRLPOOL_LINUX_GID: ${WHIRLPOOL_LINUX_GID}
env_file:
- ./.env
- ./conf/docker-common.conf
@ -23,7 +26,7 @@ services:
max-file: "10"
networks:
whirlnet:
ipv4_address: 172.30.1.8
ipv4_address: ${NET_WHIRL_WHIRLPOOL_IPV4}
volumes:
data-whirlpool:

17
docker/my-dojo/tor/Dockerfile

@ -1,9 +1,9 @@
FROM debian:buster
FROM debian:buster-slim
ENV TOR_HOME /var/lib/tor
ENV TOR_URL https://dist.torproject.org
ENV TOR_MIRROR_URL https://tor.eff.org/dist
ENV TOR_VERSION 0.4.4.7
ENV TOR_VERSION 0.4.5.8
ENV TOR_GPG_KS_URI hkp://keyserver.ubuntu.com:80
ENV TOR_GPG_KEY1 0xEB5A896A28988BF5
ENV TOR_GPG_KEY2 0xC218525819F78451
@ -11,17 +11,20 @@ ENV TOR_GPG_KEY3 0x21194EBB165733EA
ENV TOR_GPG_KEY4 0x6AFEE6D49E92B601
ENV GOLANG_DL_URL https://dl.google.com/go
ENV GOLANG_ARCHIVE go1.11.13.linux-amd64.tar.gz
ENV GOLANG_SHA256 50fe8e13592f8cf22304b9c4adfc11849a2c3d281b1d7e09c924ae24874c6daa
ENV GOLANG_ARCHIVE go1.16.4.linux-amd64.tar.gz
ENV GOLANG_SHA256 7154e88f5a8047aad4b80ebace58a059e36e7e2e4eb3b383127a28c711b4ff59
ENV OBFS4_URL https://github.com/Yawning/obfs4.git
ENV OBFS4_VERSION 0.0.11
ARG TOR_LINUX_UID
ARG TOR_LINUX_GID
# Install Tor
RUN set -ex && \
apt-get update && \
apt-get install -y git libevent-dev zlib1g-dev libssl-dev gcc make automake ca-certificates autoconf musl-dev coreutils gpg wget && \
apt-get install -y git libevent-dev zlib1g-dev libssl-dev gcc make automake ca-certificates autoconf musl-dev coreutils gpg wget python3 && \
mkdir -p /usr/local/src/ && \
cd /usr/local/src && \
res=0; \
@ -68,8 +71,8 @@ RUN cd /usr/local/src && \
rm -rf obfs4proxy
# Create group & user tor
RUN addgroup --system -gid 1107 tor && \
adduser --system --ingroup tor -uid 1104 tor
RUN addgroup --system -gid ${TOR_LINUX_GID} tor && \
adduser --system --ingroup tor -uid ${TOR_LINUX_UID} tor
# Create /etc/tor directory
RUN mkdir -p /etc/tor/ && \

18
docker/my-dojo/tor/restart.sh

@ -8,29 +8,29 @@ chmod 750 /var/lib/tor
echo "## Start tor #############################"
tor_options=(
--SocksPort 172.28.1.4:9050
--SocksPort "$NET_DOJO_TOR_IPV4:9050"
--SocksPolicy "accept 172.28.0.0/16"
--SocksPolicy "reject *"
--DataDirectory /var/lib/tor/.tor
--DataDirectoryGroupReadable 1
--HiddenServiceDir /var/lib/tor/hsv2dojo
--HiddenServiceVersion 2
--HiddenServicePort "80 172.29.1.3:80"
--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:80"
--HiddenServiceDir /var/lib/tor/hsv3dojo
--HiddenServiceVersion 3
--HiddenServicePort "80 172.29.1.3:80"
--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:80"
)
if [ "$BITCOIND_INSTALL" == "on" ]; then
if [ "$BITCOIND_LISTEN_MODE" == "on" ]; then
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv2bitcoind)
tor_options+=(--HiddenServiceVersion 2)
tor_options+=(--HiddenServicePort "8333 172.28.1.5:8333")
tor_options+=(--HiddenServicePort "8333 $NET_DOJO_BITCOIND_IPV4:8333")
tor_options+=(--HiddenServiceDirGroupReadable 1)
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv3bitcoind)
tor_options+=(--HiddenServiceVersion 3)
tor_options+=(--HiddenServicePort "8333 172.28.1.5:8333")
tor_options+=(--HiddenServicePort "8333 $NET_DOJO_BITCOIND_IPV4:8333")
tor_options+=(--HiddenServiceDirGroupReadable 1)
fi
fi
@ -38,24 +38,24 @@ fi
if [ "$EXPLORER_INSTALL" == "on" ]; then
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv2explorer)
tor_options+=(--HiddenServiceVersion 2)
tor_options+=(--HiddenServicePort "80 172.29.1.3:9080")
tor_options+=(--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:9080")
tor_options+=(--HiddenServiceDirGroupReadable 1)
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv3explorer)
tor_options+=(--HiddenServiceVersion 3)
tor_options+=(--HiddenServicePort "80 172.29.1.3:9080")
tor_options+=(--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:9080")
tor_options+=(--HiddenServiceDirGroupReadable 1)
fi
if [ "$WHIRLPOOL_INSTALL" == "on" ]; then
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv2whirlpool)
tor_options+=(--HiddenServiceVersion 2)
tor_options+=(--HiddenServicePort "80 172.29.1.3:8898")
tor_options+=(--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:8898")
tor_options+=(--HiddenServiceDirGroupReadable 1)
tor_options+=(--HiddenServiceDir /var/lib/tor/hsv3whirlpool)
tor_options+=(--HiddenServiceVersion 3)
tor_options+=(--HiddenServicePort "80 172.29.1.3:8898")
tor_options+=(--HiddenServicePort "80 $NET_DMZ_NGINX_IPV4:8898")
tor_options+=(--HiddenServiceDirGroupReadable 1)
fi

21
docker/my-dojo/whirlpool/Dockerfile

@ -1,18 +1,24 @@
FROM debian:buster
FROM debian:buster-slim
ENV WHIRLPOOL_HOME /home/whirlpool
ENV WHIRLPOOL_DIR /usr/local/whirlpool-cli
ARG WHIRLPOOL_LINUX_UID
ARG WHIRLPOOL_LINUX_GID
# Install prerequisites
# Create group & user whirlpool
# Create /usr/share/man/man1 directory
# Create .whirlpool-cli subdirectory of WHIRLPOOL_HOME
# Create /usr/local/src/whirlpool-cli directory
RUN mkdir -p /usr/share/man/man1
RUN set -ex && \
apt-get update && \
apt-get install -y libevent-dev zlib1g-dev libssl-dev gcc make automake ca-certificates autoconf musl-dev coreutils gpg wget default-jdk && \
addgroup --system -gid 1000 whirlpool && \
adduser --system --ingroup whirlpool -uid 1000 whirlpool && \
addgroup --system -gid ${WHIRLPOOL_LINUX_GID} whirlpool && \
adduser --system --ingroup whirlpool -uid ${WHIRLPOOL_LINUX_UID} whirlpool && \
mkdir -p "$WHIRLPOOL_HOME/.whirlpool-cli" && \
chown -Rv whirlpool:whirlpool "$WHIRLPOOL_HOME" && \
chmod -R 750 "$WHIRLPOOL_HOME" && \
@ -21,7 +27,7 @@ RUN set -ex && \
# Install Tor
ENV WHIRLPOOL_TOR_URL https://dist.torproject.org
ENV WHIRLPOOL_TOR_MIRROR_URL https://tor.eff.org/dist
ENV WHIRLPOOL_TOR_VERSION 0.4.4.7
ENV WHIRLPOOL_TOR_VERSION 0.4.4.8
ENV WHIRLPOOL_TOR_GPG_KS_URI hkp://keyserver.ubuntu.com:80
ENV WHIRLPOOL_TOR_GPG_KEY1 0xEB5A896A28988BF5
ENV WHIRLPOOL_TOR_GPG_KEY2 0xC218525819F78451
@ -60,10 +66,11 @@ RUN set -ex && \
# Install whirlpool-cli
ENV WHIRLPOOL_URL https://code.samourai.io/whirlpool/whirlpool-client-cli/uploads
ENV WHIRLPOOL_VERSION 0.10.9
ENV WHIRLPOOL_VERSION_HASH 602666c59f95ce72f1466f72d9c853e3
ENV WHIRLPOOL_VERSION 0.10.11
ENV WHIRLPOOL_VERSION_HASH 21d25ed02cceb91f4aa95b6389b9da9c
ENV WHIRLPOOL_JAR "whirlpool-client-cli-$WHIRLPOOL_VERSION-run.jar"
ENV WHIRLPOOL_SHA256 9de3ceaff6e8cc0849bde58bc9e17b9c602352df8659adc67ab95b39cf046e4c
ENV WHIRLPOOL_SHA256 09e574743851db2d5374bc7d4e66fd0c29f07b95b4e32b3d70c2187b724d5745
RUN set -ex && \
cd "$WHIRLPOOL_DIR" && \

7
docker/my-dojo/whirlpool/restart.sh

@ -11,15 +11,16 @@ whirlpool_options=(
--cli.torConfig.coordinator.enabled=true
--cli.torConfig.backend.enabled=false
--cli.torConfig.backend.onion=false
--cli.mix.liquidityClient=false
--cli.mix.liquidityClient=true
--cli.mix.clientsPerPool=1
)
if [ "$COMMON_BTC_NETWORK" == "testnet" ]; then
whirlpool_options+=(--cli.server="TESTNET")
whirlpool_options+=(--cli.dojo.url="http://${NGINX_IP:-172.30.1.3}:80/test/v2/")
whirlpool_options+=(--cli.dojo.url="http://$NET_WHIRL_NGINX_IPV4:80/test/v2/")
else
whirlpool_options+=(--cli.server="MAINNET")
whirlpool_options+=(--cli.dojo.url="http://${NGINX_IP:-172.30.1.3}:80/v2/")
whirlpool_options+=(--cli.dojo.url="http://$NET_WHIRL_NGINX_IPV4:80/v2/")
fi
if [ "$WHIRLPOOL_COORDINATOR_ONION" == "on" ]; then

3
keys/index-example.js

@ -6,6 +6,7 @@
/**
* Desired structure of /keys/index.js, which is ignored in the repository.
* index.js should store only one of the 2 sets of parameters (mainnet or testnet)
*/
module.exports = {
/*
@ -15,7 +16,7 @@ module.exports = {
/*
* Dojo version
*/
dojoVersion: '1.9.0',
dojoVersion: '1.10.0',
/*
* Bitcoind
*/

84
lib/bitcoin/addresses-helper.js

@ -7,7 +7,9 @@
const bitcoin = require('bitcoinjs-lib')
const btcMessage = require('bitcoinjs-message')
const activeNet = require('./network').network
const { p2pkh, p2sh, p2wpkh } = bitcoin.payments
const { p2pkh, p2sh, p2wpkh, p2wsh } = bitcoin.payments
const { OPS } = bitcoin.script
/**
* A singleton providing Addresses helper functions
@ -106,6 +108,86 @@ class AddressesHelper {
}
}
/**
* Check if an output script is a P2PKH script
* @param {Buffer} scriptpubkey - scriptpubkey
* @returns {boolean} return true if output is a P2PKH script, otherwise return false
*/
isP2pkhScript(scriptpubkey) {
return scriptpubkey.length == 25
&& scriptpubkey[0] == OPS.OP_DUP
&& scriptpubkey[1] == OPS.OP_HASH160
&& scriptpubkey[2] == 0x14
&& scriptpubkey[23] == OPS.OP_EQUALVERIFY
&& scriptpubkey[24] == OPS.OP_CHECKSIG
}
/**
* Check if an output script is a P2SH script
* @param {Buffer} scriptpubkey - scriptpubkey
* @returns {boolean} return true if output is a P2SH script, otherwise return false
*/
isP2shScript(scriptpubkey) {
return scriptpubkey.length == 23
&& scriptpubkey[0] == OPS.OP_HASH160
&& scriptpubkey[1] == 0x14
&& scriptpubkey[22] == OPS.OP_EQUAL
}
/**
* Check if an output script is a P2WPKH script
* @param {Buffer} scriptpubkey - scriptpubkey
* @returns {boolean} return true if output is a P2WPKH script, otherwise return false
*/
isP2wpkhScript(scriptpubkey) {
return scriptpubkey.length == 22
&& scriptpubkey[0] == OPS.OP_0
&& scriptpubkey[1] == 0x14
}
/**
* Check if an output script is a P2WSH script
* @param {Buffer} scriptpubkey - scriptpubkey
* @returns {boolean} return true if output is a P2WSH script, otherwise return false
*/
isP2wshScript(scriptpubkey) {
return scriptpubkey.length == 34
&& scriptpubkey[0] == OPS.OP_0
&& scriptpubkey[1] == 0x20
}
/**
* Return the bitcoin address corresponding to an output script
* @param {Buffer} scriptpubkey - scriptpubkey
* @returns {string} bitcoin address
*/
outputScript2Address(scriptpubkey) {
if (this.isP2pkhScript(scriptpubkey))
return p2pkh({
output: scriptpubkey,
network: activeNet,
}).address
if (this.isP2shScript(scriptpubkey))
return p2sh({
output: scriptpubkey,
network: activeNet,
}).address
if (this.isP2wpkhScript(scriptpubkey))
return p2wpkh({
output: scriptpubkey,
network: activeNet,
}).address
if (this.isP2wshScript(scriptpubkey))
return p2wsh({
output: scriptpubkey,
network: activeNet,
}).address
throw 'unknown address format'
}
}
module.exports = new AddressesHelper()

18
lib/bitcoin/hd-accounts-helper.js

@ -4,14 +4,13 @@
*/
'use strict'
const cp = require('child_process')
const LRU = require('lru-cache')
const workerPool = require('workerpool')
const bitcoin = require('bitcoinjs-lib')
const bs58check = require('bs58check')
const bs58 = require('bs58')
const errors = require('../errors')
const Logger = require('../logger')
const ForkPool = require('../fork-pool')
const network = require('./network')
const activeNet = network.network
const keys = require('../../keys/')[network.key]
@ -64,17 +63,16 @@ class HDAccountsHelper {
// Pool of child processes used for derivation of addresses
const poolKeys = keys.addrDerivationPool
this.derivationPool = new ForkPool(
this.derivationPool = workerPool.pool(
`${__dirname}/parallel-address-derivation.js`,
{
networkKey: network.key,
max: poolKeys.maxNbChildren,
min: poolKeys.minNbChildren,
acquireTimeoutMillis: poolKeys.acquireTimeoutMillis
maxWorkers: poolKeys.maxNbChildren,
minWorkers: poolKeys.minNbChildren,
workerType: 'thread'
}
)
this.externalDerivationActivated = true
Logger.info(`Created ${poolKeys.minNbChildren} worker threads for addresses derivation (max = ${poolKeys.maxNbChildren})`)
}
/**
@ -374,7 +372,7 @@ class HDAccountsHelper {
type: info.type
}
const msg = await this.derivationPool.enqueue(data)
const msg = await this.derivationPool.exec('deriveAddresses', [data])
if (msg.status = 'ok') {
resolve(msg.addresses)
@ -384,7 +382,7 @@ class HDAccountsHelper {
}
} catch(e) {
Logger.error(e, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
Logger.error(null, 'HdAccountsHelper : A problem was met during parallel addresses derivation')
reject(e)
}
})

5
lib/bitcoin/hd-accounts-service.js

@ -144,10 +144,7 @@ class HDAccountsService {
const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme)
const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme)
const external = await externalPrm
const internal = await internalPrm
const addresses = _.flatten([external, internal])
const addresses = _.flatten(await Promise.all([externalPrm, internalPrm]))
return db.addAddressesToHDAccount(xpub, addresses)
}

21
lib/bitcoin/network.js

@ -5,8 +5,16 @@
'use strict'
const bitcoin = require('bitcoinjs-lib')
const keys = require('../../keys/')
/**
* A set of keywords encoding for mainnet
*/
const MAINNET_KEY = [
'bitcoin'
]
/**
* A set of keywords encoding for testnet
*/
@ -26,15 +34,20 @@ class Network {
* Constructor
*/
constructor() {
// Check if mainnet config is detected in index.js
for (let kw of MAINNET_KEY) {
if (kw in keys) {
this.key = 'bitcoin'
this.network = bitcoin.networks.bitcoin
return
}
}
// Check if testnet config is detected in index.js
for (let kw of TESTNET_KEY) {
// Calling like 'node file.js arg1 arg2'
if (process.argv.indexOf(kw) > 1) {
if (kw in keys) {
this.key = 'testnet'
this.network = bitcoin.networks.testnet
break
return
}
}
}

22
lib/bitcoin/parallel-address-derivation.js

@ -5,6 +5,7 @@
'use strict'
const bitcoin = require('bitcoinjs-lib')
const workerPool = require('workerpool')
const errors = require('../errors')
const activeNet = require('./network').network
const addrHelper = require('./addresses-helper')
@ -26,7 +27,7 @@ const BIP84 = 2
* @param {int} type - type of derivation
* @returns {Promise - object} returns an object {address: '...', chain: <int>, index: <int>}
*/
const deriveAddress = async function(chain, chainNode, index, type) {
async function deriveAddress(chain, chainNode, index, type) {
// Derive M/chain/index
const indexNode = chainNode.derive(index)
@ -51,9 +52,11 @@ const deriveAddress = async function(chain, chainNode, index, type) {
}
/**
* Receive message from parent process
* Derives a set of addresses for an hd account
* @param {object} msg - parameters used for the derivation
* @returns {Promise - object[]}
*/
process.on('message', async (msg) => {
async function deriveAddresses(msg) {
try {
const xpub = msg.xpub
const chain = msg.chain
@ -76,17 +79,20 @@ process.on('message', async (msg) => {
const addresses = await Promise.all(promises)
// Send response to parent process
process.send({
return {
status: 'ok',
addresses: addresses
})
}
} catch(e) {
process.send({
return {
status: 'error',
addresses: [],
error: e
})
error: JSON.stringify(e)
}
}
}
workerPool.worker({
deriveAddresses: deriveAddresses
})

9
lib/bitcoind-rpc/fees.js

@ -9,7 +9,7 @@ const errors = require('../errors')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const RpcClient = require('./rpc-client')
const { createRpcClient } = require('./rpc-client')
const latestBlock = require('./latest-block')
@ -27,7 +27,7 @@ class Fees {
this.fees = {}
this.feeType = keys.bitcoind.feeType
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
this.refresh()
}
@ -53,16 +53,15 @@ class Fees {
* @returns {Promise}
*/
async refresh() {
await util.seriesCall(this.targets, async tgt => {
await util.parallelCall(this.targets, async tgt => {
try {
const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType)
const level = await this.rpcClient.estimatesmartfee({ conf_target: tgt, estimate_mode: this.feeType })
this.fees[tgt] = (level.errors && level.errors.length > 0) ? 0 : Math.round(level.feerate * 1e5)
} catch(e) {
Logger.error(e, 'Bitcoind RPC : Fees.refresh()')
this.fees[tgt] = 0
}
})
this.block = latestBlock.height
}

6
lib/bitcoind-rpc/headers.js

@ -6,7 +6,7 @@
const LRU = require('lru-cache')
const errors = require('../errors')
const RpcClient = require('./rpc-client')
const { createRpcClient } = require('./rpc-client')
/**
@ -29,7 +29,7 @@ class Headers {
})
// Initialize the rpc client
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
}
/**
@ -42,7 +42,7 @@ class Headers {
return this.headers.get(hash)
try {
const header = await this.rpcClient.getblockheader(hash, true)
const header = await this.rpcClient.getblockheader({ blockhash: hash, verbose: true })
const fmtHeader = JSON.stringify(header, null, 2)
this.headers.set(hash, fmtHeader)
return fmtHeader

6
lib/bitcoind-rpc/latest-block.js

@ -9,7 +9,7 @@ const Logger = require('../logger')
const util = require('../util')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const RpcClient = require('./rpc-client')
const { createRpcClient } = require('./rpc-client')
/**
@ -27,7 +27,7 @@ class LatestBlock {
this.diff = null
// Initialize the rpc client
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
// Gets the latest block from bitcoind
this.rpcClient.getbestblockhash().then(hash => this.onBlockHash(hash))
@ -54,7 +54,7 @@ class LatestBlock {
* @returns {Promise}
*/
async onBlockHash(hash) {
const header = await this.rpcClient.getblockheader(hash)
const header = await this.rpcClient.getblockheader({ blockhash: hash })
this.height = header.height
this.hash = hash

60
lib/bitcoind-rpc/rpc-client.js

@ -4,7 +4,7 @@
*/
'use strict'
const rpc = require('bitcoind-rpc-client')
const {RPCClient} = require('rpc-bitcoin');
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const util = require('../util')
@ -14,40 +14,12 @@ const Logger = require('../logger')
/**
* Wrapper for bitcoind rpc client
*/
class RpcClient {
/**
* Constructor
*/
constructor() {
// Initiliaze the rpc client
this.client = new rpc({
host: keys.bitcoind.rpc.host,
port: keys.bitcoind.rpc.port
})
this.client.set('user', keys.bitcoind.rpc.user)
this.client.set('pass', keys.bitcoind.rpc.pass)
// Initialize a proxy postprocessing api calls
return new Proxy(this, {
get: function(target, name, receiver) {
const origMethod = target.client[name]
return async function(...args) {
const result = await origMethod.apply(target.client, args)
if (Array.isArray(result)) {
return result
} else if (result.result) {
return result.result
} else if (result.error) {
throw result.error
} else {
throw 'A problem was met with a request sent to bitcoind RPC API'
}
}
}
const createRpcClient = () => {
return new RPCClient({
url: `http://${keys.bitcoind.rpc.host}`,
port: keys.bitcoind.rpc.port,
user: keys.bitcoind.rpc.user,
pass: keys.bitcoind.rpc.pass
})
}
@ -57,12 +29,12 @@ class RpcClient {
* @param {string} err - error message
* @returns {boolean} returns true if message related to a connection error
*/
static isConnectionError(err) {
const isConnectionError = (err) => {
if (typeof err != 'string')
return false
const isTimeoutError = (err.indexOf('connect ETIMEDOUT') != -1)
const isConnRejected = (err.indexOf('Connection Rejected') != -1)
const isTimeoutError = (err.indexOf('connect ETIMEDOUT') !== -1)
const isConnRejected = (err.indexOf('Connection Rejected') !== -1)
return (isTimeoutError || isConnRejected)
}
@ -71,8 +43,8 @@ class RpcClient {
* Check if the rpc api is ready to process requests
* @returns {Promise}
*/
static async waitForBitcoindRpcApi() {
let client = new RpcClient()
const waitForBitcoindRpcApi = async () => {
let client = createRpcClient()
try {
await client.getblockchaininfo()
@ -80,11 +52,13 @@ class RpcClient {
client = null
Logger.info('Bitcoind RPC : API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForBitcoindRpcApi()
return waitForBitcoindRpcApi()
})
}
}
module.exports = {
createRpcClient,
isConnectionError,
waitForBitcoindRpcApi
}
module.exports = RpcClient

17
lib/bitcoind-rpc/transactions.js

@ -9,7 +9,7 @@ const LRU = require('lru-cache')
const errors = require('../errors')
const Logger = require('../logger')
const util = require('../util')
const RpcClient = require('./rpc-client')
const { createRpcClient } = require('./rpc-client')
const rpcLatestBlock = require('./latest-block')
@ -34,7 +34,7 @@ class Transactions {
// Initialize the rpc client
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
}
/**
@ -47,14 +47,17 @@ class Transactions {
try {
const rpcCalls = txids.map(txid => {
return {
'method': 'getrawtransaction',
'params': [txid, true]
method: 'getrawtransaction',
params: {
txid,
verbose: true
}
}
})
const txs = await this.rpcClient.batch(rpcCalls)
return await util.seriesCall(txs, async tx => {
return await util.parallelCall(txs, async tx => {
if (tx.result == null) {
Logger.info(`Bitcoind RPC : got null for ${txids[tx.id]}`)
return null
@ -77,7 +80,7 @@ class Transactions {
*/
async getTransaction(txid, fees) {
try {
const tx = await this.rpcClient.getrawtransaction(txid, true)
const tx = await this.rpcClient.getrawtransaction({ txid, verbose: true })
return this._prepareTxResult(tx, fees)
} catch(e) {
Logger.error(e, 'Bitcoind RPC : Transaction.getTransaction()')
@ -177,7 +180,7 @@ class Transactions {
if (this.prevCache.has(inTxid)) {
ptx = this.prevCache.get(inTxid)
} else {
ptx = await this.rpcClient.getrawtransaction(inTxid, true)
ptx = await this.rpcClient.getrawtransaction({ txid: inTxid, verbose: true })
this.prevCache.set(inTxid, ptx)
}

2
lib/db/mysql-db-wrapper.js

@ -5,14 +5,12 @@
'use strict'
const mysql = require('mysql')
const path = require('path')
const Logger = require('../logger')
const util = require('../util')
const errors = require('../errors')
const hdaHelper = require('../bitcoin/hd-accounts-helper')
const network = require('../bitcoin/network')
const keys = require('../../keys/')[network.key]
const keysDb = keys.db
const debug = !!(process.argv.indexOf('db-debug') > -1)
const queryDebug = !!(process.argv.indexOf('dbquery-debug') > -1)

85
lib/fork-pool.js

@ -1,85 +0,0 @@
/*!
* lib/fork-pool.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const os = require('os')
const childProcess = require('child_process')
const genericPool = require('generic-pool')
const Logger = require('./logger')
/**
* A class managing a pool of child processes
* Inspired from fork-pool by Andrew Sliwinski
* https://github.com/thisandagain/fork-pool/
*/
class ForkPool {
/**
* Constructor
*/
constructor(path, options) {
if (!options) {
this._networkKey = ''
this._options = {
max: os.cpus().length / 2,
min: os.cpus().length / 2,
acquireTimeoutMillis: 60000
}
} else {
this._networkKey = options.networkKey
this._options = options
}
const factory = {
create: () => {
return childProcess.fork(path, [this._networkKey])
},
destroy: (cp) => {
cp.kill()
}
}
this.pool = genericPool.createPool(factory, this._options)
Logger.info(`Created ${this._options.min} child processes for addresses derivation (max = ${this._options.max})`)
}
/**
* Enqueue a new task to be processed by a child process
* @param {object} data - data to be passed to the child process
* @returns {Promise}
*/
async enqueue(data) {
let cp
const pool = this.pool
return new Promise(async (resolve, reject) => {
try {
cp = await pool.acquire()
cp.send(data)
cp.once('message', async msg => {
pool.release(cp)
resolve(msg)
})
} catch(e) {
reject(e)
}
})
}
/**
* Drain the pool
*/
async drain() {
await this.pool.drain()
await this.pool.clear()
}
}
module.exports = ForkPool

19
lib/http-server/http-server.js

@ -4,10 +4,11 @@
*/
'use strict'
const fs = require('fs')
const express = require('express')
const { App } = require('@tinyhttp/app')
const sirv = require('sirv')
const helmet = require('helmet')
const Logger = require('../logger')
const errors = require('../errors');
/**
@ -28,12 +29,12 @@ class HttpServer {
// Listening server instance
this.server = null
// Initialize the express app
this.app = express()
// Initialize the tiny-http app
this.app = new App();
this.app.set('trust proxy', 'loopback')
// Middlewares for json responses and requests logging
this.app.use('/static', express.static('../static'));
this.app.use('/static', sirv('../static'));
this.app.use(HttpServer.setJSONResponse)
this.app.use(HttpServer.requestLogger)
this.app.use(HttpServer.setCrossOrigin)
@ -117,12 +118,18 @@ class HttpServer {
/**
* Return an error response
* @param {object} res - http response object
* @param {object} data - data object
* @param {string | Error} data - data object
* @param {number} [errorCode=400] - HTTP status code
*/
static sendError(res, data, errorCode) {
if (errorCode == null)
errorCode = 400
if (data instanceof Error) {
Logger.error(data, 'API: Unhandled error')
data = errors.generic.GEN
}
const ret = {
status: 'error',
error: data

9
lib/remote-importer/bitcoind-wrapper.js

@ -5,9 +5,10 @@
'use strict'
const bitcoin = require('bitcoinjs-lib')
const RpcClient = require('../bitcoind-rpc/rpc-client')
const { createRpcClient } = require('../bitcoind-rpc/rpc-client')
const rpcLatestBlock = require('../bitcoind-rpc/latest-block')
const Logger = require('../logger')
const addrHelper = require('../bitcoin/addresses-helper')
const network = require('../bitcoin/network')
const activeNet = network.network
const keys = require('../../keys')[network.key]
@ -24,7 +25,7 @@ class BitcoindWrapper extends Wrapper {
constructor() {
super(null, null)
// RPC client
this.client = new RpcClient()
this.client = createRpcClient()
}
/**
@ -34,7 +35,7 @@ class BitcoindWrapper extends Wrapper {
* @returns {Promise}
*/
async _get(descriptors) {
return this.client.cmd('scantxoutset', 'start', descriptors)
return await this.client.scantxoutset({ action: 'start', scanobjects: descriptors })
}
/**
@ -44,7 +45,7 @@ class BitcoindWrapper extends Wrapper {
*/
_xlatScriptPubKey(scriptPubKey) {
const bScriptPubKey = Buffer.from(scriptPubKey, 'hex')
return bitcoin.address.fromOutputScript(bScriptPubKey, activeNet)
return addrHelper.outputScript2Address(bScriptPubKey)
}
/**

157
lib/remote-importer/local-rest-indexer-wrapper.js

@ -0,0 +1,157 @@
/*!
* lib/remote-importer/local-rest-indexer-wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const axios = require('axios')
const bitcoin = require('bitcoinjs-lib')
const Logger = require('../logger')
const util = require('../util')
const network = require('../bitcoin/network')
const activeNet = network.network
const keys = require('../../keys')[network.key]
const Wrapper = require('./wrapper')
/**
* Wrapper for a local indexer
* providing a REST API
*/
class LocalRestIndexerWrapper extends Wrapper {
/**
* Constructor
*/
constructor(url) {
super(url, null)
}
/**
* Send a GET request to the API
* @param {string} route
* @returns {Promise}
*/
async _get(route) {
const params = {
url: `${this.base}${route}`,
method: 'GET',
responseType: 'json',
timeout: 15000,
headers: {
'User-Agent': 'Dojo'
}
}
const result = await axios(params)
return result.data
}
/**
* Translate a bitcoin address into a script hash
* (@see https://electrumx.readthedocs.io/en/latest/protocol-basics.html#script-hashes)
* @param {string} address - bitcoin address
* @returns {string} returns the script hash associated to the address
*/
_getScriptHash(address) {
const bScriptPubKey = bitcoin.address.toOutputScript(address, activeNet)
const bScriptHash = bitcoin.crypto.sha256(bScriptPubKey)
return bScriptHash.reverse().toString('hex')
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
const ret = {
address: address,
ntx: 0,
txids: []
}
const scriptHash = this._getScriptHash(address)
const uri = `/blockchain/scripthash/${scriptHash}/history`
const results = await this._get(uri)
for (let r of results) {
ret.txids.push(r.tx_hash)
ret.ntx++
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(`Importer : Import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
txids: []
}
}
return ret
}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
const ret = {}
const scriptHash2Address = {}
const scriptHashes = []
for (let a of addresses) {
const scriptHash = this._getScriptHash(a)
scriptHashes.push(scriptHash)
scriptHash2Address[scriptHash] = a
}
const sScriptHashes = scriptHashes.join(',')
const uri = `/blockchain/scripthashes/history?scripthashes=${sScriptHashes}`
const results = await this._get(uri)
for (let r of results) {
const a = scriptHash2Address[r.script_hash]
ret[a] = {
address: a,
ntx: r.txids.length,
txids: r.txids
}
}
const aRet = Object.values(ret)
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(`Importer : Import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}
return aRet
}
/**
* Retrieve the height of the chaintip for the remote source
* @returns {Promise} returns an object
* {chainTipHeight: <chaintip_height>}
*/
async getChainTipHeight() {
let chainTipHeight = null
const result = await this._get(`/blocks/tip`)
if (result != null && result['height'] != null)
chainTipHeight = parseInt(result['height'])
return {'chainTipHeight': chainTipHeight}
}
}
module.exports = LocalRestIndexerWrapper

343
lib/remote-importer/remote-importer.js

@ -60,79 +60,6 @@ class RemoteImporter {
return this.importing[xpub] ? this.importing[xpub] : null
}
/**
* Process the relations between a list of transactions
* @param {object[]} txs - array of transaction objects
* @returns {object} returns a object with 3 mappings
* {txMap: {], txChildren: {}, txParents: {}}
*/
_processTxsRelations(txs) {
const txMap = {}
const txChildren = {}
const txParents = {}
for (let tx of txs) {
let txid = tx.txid
// Populate txMap
txMap[txid] = tx
// Create parent-child transaction associations
if (!txChildren[txid])
txChildren[txid] = []
if (!txParents[txid])
txParents[txid] = []
for (let i in tx.inputs) {
const input = tx.inputs[i]
let prev = input.outpoint.txid
if (!txMap[prev]) continue
if (txParents[txid].indexOf(prev) == -1)
txParents[txid].push(prev)
if (!txChildren[prev])
txChildren[prev] = []
if (txChildren[prev].indexOf(txid) == -1)
txChildren[prev].push(txid)
}
}
return {
txMap: txMap,
txChildren: txChildren,
txParents: txParents
}
}
/**
* Import a list of transactions associated to a list of addresses
* @param {object[]} addresses - array of addresses objects
* @param {object[]} txns - array of transaction objects
* @returns {Promise}
*/
async _importTransactions(addresses, txns) {
const addrIdMap = await db.getAddressesIds(addresses)
// The transactions array must be topologically ordered, such that
// entries earlier in the array MUST NOT depend upon any entry later
// in the array.
const txMaps = this._processTxsRelations(txns)
const txOrdered = util.topologicalOrdering(txMaps.txParents, txMaps.txChildren)
const aTxs = []
for (let txid of txOrdered)
if (txMaps.txMap[txid])
aTxs.push(txMaps.txMap[txid])
// Store the transactions by batches of 200 transactions
const txsChunks = util.splitList(aTxs, 200)
for (let txsChunk of txsChunks)
await this.addTransactions(txsChunk, addrIdMap)
}
/**
* Import an HD account from remote sources
* @param {string} xpub - HD Account
@ -161,32 +88,22 @@ class RemoteImporter {
const t0 = Date.now()
const chains = [0,1]
let gaps = [gap.external, gap.internal]
// Allow custom higher gap limits
// for local scans relying on bitcoind or on a local indexer
if (gapLimit
&& ((keys.indexer.active == 'local_bitcoind')
|| (keys.indexer.active == 'local_indexer'))
) {
gaps = [gapLimit, gapLimit]
}
const isLocal = ['local_bitcoind', 'local_indexer'].includes(keys.indexer.active)
const gaps = (gapLimit && isLocal) ? [gapLimit, gapLimit] : [gap.external, gap.internal]
startIndex = (startIndex == null) ? -1 : startIndex - 1
const addrIdMap = {}
let txns = []
let addresses = []
try {
const results = await util.seriesCall(chains, chain => {
const results = await util.parallelCall(chains, chain => {
return this.xpubScan(xpub, chain, startIndex, startIndex, gaps[chain], type)
})
// Accumulate addresses and transactions from all chains
for (let result of results) {
txns = txns.concat(result.transactions)
addresses = addresses.concat(result.addresses)
}
const txns = results.map(r => r.transactions).flat()
const addresses = results.map(r => r.addresses).flat()
const aAddresses = addresses.map(a => a.address)
this.importing[xpub] = {
'status': this.STATUS_IMPORT,
@ -195,10 +112,13 @@ class RemoteImporter {
// Store the hdaccount and the addresses into the database
await db.ensureHDAccountId(xpub, type)
await db.addAddressesToHDAccount(xpub, addresses)
const addrChunks = util.splitList(addresses, 1000)
await util.parallelCall(addrChunks, chunk => {
return db.addAddressesToHDAccount(xpub, chunk)
})
// Store the transaction into the database
const aAddresses = addresses.map(a => a.address)
await this._importTransactions(aAddresses, txns)
} catch(e) {
@ -256,62 +176,39 @@ class RemoteImporter {
d = u + G
Logger.info(`Importer : derived M/${c}/${A.join(',')}`)
const addrMap = {}
for (let a of ret.addresses)
addrMap[a.address] = a
const addrMap = ret.addresses.reduce((m,a) => (m[a.address] = a, m), {})
const aAddresses = ret.addresses.map(a => a.address)
try {
const results = await this.sources.getAddresses(aAddresses)
let gotTransactions = false
const scanTx = []
for (let r of results) {
if (r.ntx == 0) continue
// Address is used. Update used parameter
u = Math.max(u, addrMap[r.address].index)
gotTransactions = true
// TODO: Handle pathological case of many address transactions
while (r.txids.length > 0) {
let txid = r.txids.pop()
if (!txids[txid])
scanTx.push(txid)
}
}
const filteredResults = results.flat().filter(r => r.ntx > 0)
const gotTransactions = filteredResults.length > 0
const scanTx = filteredResults.map(r => r.txids).flat().filter(t => !txids[t])
u = filteredResults.reduce((m,r) => Math.max(m, addrMap[r.address].index), u)
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 200 transactions
const txsChunks = util.splitList(scanTx, 200)
try {
for (let txsChunk of txsChunks) {
const txs = await rpcTxns.getTransactions(txsChunk, false)
for (let tx of txs) {
if (tx != null) {
ret.transactions.push(tx)
txids[tx.txid] = true
}
}
}
const txsChunks = util.splitList(scanTx, 200)
const txs = await util.seriesCall(txsChunks, chunk => {
return rpcTxns.getTransactions(chunk, false)
})
const filteredTxs = txs.flat().filter(tx => tx != null)
ret.transactions = ret.transactions.concat(filteredTxs)
txids = filteredTxs.reduce((m,tx) => (m[tx.txid] = true, m), txids)
} catch(e) {
Logger.error(e, `Importer : RemoteImporter.xpubScan() : getTransactions error`)
}
if (gotTransactions) {
if (c == 0)
this.importing[xpub]['txs_ext'] = Object.keys(txids).length
else
this.importing[xpub]['txs_int'] = Object.keys(txids).length
const keyStatus = (c == 0) ? 'txs_ext' : 'txs_int'
this.importing[xpub][keyStatus] = Object.keys(txids).length
// We must go deeper
const result = await this.xpubScan(xpub, c, d, u, G, type, txids)
// Accumulate results from further down the rabbit hole
for (let a of result.addresses)
ret.addresses.push(a)
for (let t of result.transactions)
ret.transactions.push(t)
ret.addresses = ret.addresses.concat(result.addresses)
ret.transactions = ret.transactions.concat(result.transactions)
}
} catch(e) {
@ -329,50 +226,34 @@ class RemoteImporter {
*/
async importAddresses(candidates, filterAddr) {
const t0 = Date.now()
const txns = []
const addresses = []
const imported = []
for (let address of candidates) {
if (!this.importing[address]) {
addresses.push(address)
this.importing[address] = true
} else {
Logger.info(`Importer : Import overlap for ${address}. Skipping`)
}
}
if (addresses.length == 0)
return true
// Check if some addresses are currently processed
const overlap = candidates.filter(c => this.importing[c])
for (let a of overlap)
Logger.info(`Importer : Import overlap for ${a}. Skipping`)
// List addresses that need to be processed
const addresses = candidates.filter(c => !this.importing[c])
this.importing = addresses.reduce((m,a) => (m[a] = true, m), this.importing)
if (addresses.length == 0) return true
Logger.info(`Importer : Importing ${addresses.join(',')}`)
try {
const scanTx = []
const results = await this.sources.getAddresses(addresses, filterAddr)
for (let r of results) {
// Mark the address as imported
imported.push(r.address)
if (r.ntx == 0) continue
// TODO: Handle pathological case of many address transactions
while (r.txids.length > 0) {
let txid = r.txids.pop()
if (scanTx.indexOf(txid) == -1)
scanTx.push(txid)
}
}
const imported = results.map(r => r.address)
const filteredResults = results.filter(r => r.ntx > 0)
const scanTx = [...new Set(filteredResults.map(r => r.txids).flat())]
Logger.info(`Importer : Got ${scanTx.length} transactions`)
// Retrieve the transactions by batches of 100 transactions
const txsChunks = util.splitList(scanTx, 100)
for (let txsChunk of txsChunks) {
const txs = await rpcTxns.getTransactions(txsChunk, false)
for (let tx of txs)
if (tx != null)
txns.push(tx)
}
const txs = await util.seriesCall(txsChunks, chunk => {
return rpcTxns.getTransactions(chunk, false)
})
const txns = txs.flat().filter(tx => tx != null)
// Import addresses and transactions into the database
await db.addAddresses(imported)
@ -396,84 +277,116 @@ class RemoteImporter {
}
}
/**
* Import a list of transactions associated to a list of addresses
* @param {object[]} addresses - array of addresses objects
* @param {object[]} txs - array of transaction objects
* @returns {Promise}
*/
async _importTransactions(addresses, txs) {
const addrChunks = util.splitList(addresses, 1000)
const addrIdMaps = await util.parallelCall(addrChunks, chunk => {
return db.getAddressesIds(chunk)
})
const addrIdMap = Object.assign({}, ...addrIdMaps)
// Process the transactions by batches of 200 transactions
const txsChunks = util.splitList(txs, 200)
await util.parallelCall(txsChunks, chunk => {
return this._addTransactions(chunk)
})
await util.parallelCall(txsChunks, chunk => {
return this._addOutputs(chunk, addrIdMap)
})
await util.parallelCall(txsChunks, chunk => {
return this._addInputs(chunk)
})
}
/**
* Add a collection of transactions to the database.
* @param {object[]} txs - array of transaction objects
* @params {object} addrIdMap - map address => addrId
* @returns {Promise}
*/
async addTransactions(txs, addrIdMap) {
async _addTransactions(txs) {
try {
// Store the transactions into the database
await db.addTransactions(txs)
// Confirm the transactions if needed
const blocksHashes = new Set()
for (let tx of txs)
if (tx.block)
blocksHashes.add(tx.block.hash)
const blocks = await db.getBlocksByHashes(Array.from(blocksHashes))
const blocksHashes = txs.filter(tx => tx.block).map(tx => tx.block.hash)
const blocks = await db.getBlocksByHashes(blocksHashes)
for (let block of blocks) {
// Filter the transactions by blockHash
return util.parallelCall(blocks, block => {
const filteredTxs = txs.filter(tx => (tx.block && tx.block.hash == block.blockHash))
if (filteredTxs.length > 0) {
if (filteredTxs.length == 0) return
const txids = filteredTxs.map(tx => tx.txid)
// Asynchronous confirmations
db.confirmTransactions(txids, block.blockID)
return db.confirmTransactions(txids, block.blockID)
})
} catch(e) {
Logger.error(e, `Importer : RemoteImporter.addTransactions() :`)
}
}
// Retrieve the database ids for the transactions
/**
* Add a collection of transaction outputs to the database.
* @param {object[]} txs - array of transaction objects
* @params {object} addrIdMap - map address => addrId
* @returns {Promise}
*/
async _addOutputs(txs, addrIdMap) {
try {
const txids = txs.map(tx => tx.txid)
const mapTxsIds = await db.getTransactionsIds(txids)
// Store the outputs in db
const outputs = []
for (let tx of txs) {
for (let output of tx.outputs) {
if (addrIdMap[output.address]) {
outputs.push({
txnID: mapTxsIds[tx.txid],
addrID: addrIdMap[output.address],
outIndex: output.n,
outAmount: output.value,
outScript: output.scriptpubkey,
})
}
const outputs = txs
.map(tx => tx.outputs.map(o => (o.txnID = mapTxsIds[tx.txid], o)))
.flat()
.filter(o => addrIdMap[o.address])
.map(o => { return {
txnID: o.txnID,
addrID: addrIdMap[o.address],
outIndex: o.n,
outAmount: o.value,
outScript: o.scriptpubkey,
}})
return db.addOutputs(outputs)
} catch(e) {
Logger.error(e, `Importer : RemoteImporter._addOutputs() :`)
}
}
await db.addOutputs(outputs)
// Store the inputs in db
const inputs = []
const spent = {}
/**
* Add a collection of transaction inputs to the database.
* @param {object[]} txs - array of transaction objects
* @returns {Promise}
*/
async _addInputs(txs) {
try {
// Retrieve the database ids for the transactions
const txids = txs.map(tx => tx.txid)
const mapTxsIds = await db.getTransactionsIds(txids)
// Get any outputs spent by the inputs of this transaction,
// add those database outIDs to the corresponding inputs, and store.
let outpoints = []
for (let tx of txs)
outpoints = outpoints.concat(tx.inputs.map(input => input.outpoint))
const outpoints = txs.map(tx => tx.inputs).flat().map(input => input.outpoint)
const res = await db.getOutputIds(outpoints)
for (let r of res)
spent[`${r.txnTxid}-${r.outIndex}`] = r.outID
for (let tx of txs) {
for (let input of tx.inputs) {
const key = `${input.outpoint.txid}-${input.outpoint.vout}`
if (spent[key]) {
inputs.push({
outID: spent[key],
txnID: mapTxsIds[tx.txid],
inIndex: input.n,
inSequence: input.seq
})
}
}
}
await db.addInputs(inputs)
const spent = res.reduce((m,r) => (m[`${r.txnTxid}-${r.outIndex}`] = r.outID, m), {})
const inputs = txs
.map(tx => tx.inputs.map(i => (i.txnID = mapTxsIds[tx.txid], i)))
.flat()
.filter(i => spent[`${i.outpoint.txid}-${i.outpoint.vout}`])
.map(i => { return {
outID: spent[`${i.outpoint.txid}-${i.outpoint.vout}`],
txnID: i.txnID,
inIndex: i.n,
inSequence: i.seq
}})
return db.addInputs(inputs)
} catch(e) {
Logger.error(e, `Importer : RemoteImporter.addTransactions() :`)

7
lib/remote-importer/sources-mainnet.js

@ -10,6 +10,7 @@ const keys = require('../../keys')[network.key]
const Sources = require('./sources')
const BitcoindWrapper = require('./bitcoind-wrapper')
const LocalIndexerWrapper = require('./local-indexer-wrapper')
const LocalRestIndexerWrapper = require('./local-rest-indexer-wrapper')
const OxtWrapper = require('./oxt-wrapper')
@ -40,6 +41,12 @@ class SourcesMainnet extends Sources {
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Importer : Activated local indexer as the data source for imports')
} else if (keys.indexer.active == 'local_rest_indexer') {
// If local rest indexer option is activated
// we'll use the local indexer as our unique source
const uri = `http://${keys.indexer.localIndexer.host}:${keys.indexer.localIndexer.port}`
this.source = new LocalRestIndexerWrapper(uri)
Logger.info('Importer : Activated local indexer (REST API) as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by OXT
this.source = new OxtWrapper(keys.indexer.oxt)

7
lib/remote-importer/sources-testnet.js

@ -11,6 +11,7 @@ const keys = require('../../keys')[network.key]
const Sources = require('./sources')
const BitcoindWrapper = require('./bitcoind-wrapper')
const LocalIndexerWrapper = require('./local-indexer-wrapper')
const LocalRestIndexerWrapper = require('./local-rest-indexer-wrapper')
const EsploraWrapper = require('./esplora-wrapper')
@ -41,6 +42,12 @@ class SourcesTestnet extends Sources {
// we'll use the local indexer as our unique source
this.source = new LocalIndexerWrapper()
Logger.info('Importer : Activated local indexer as the data source for imports')
} else if (keys.indexer.active == 'local_rest_indexer') {
// If local rest indexer option is activated
// we'll use the local indexer as our unique source
const uri = `http://${keys.indexer.localIndexer.host}:${keys.indexer.localIndexer.port}`
this.source = new LocalRestIndexerWrapper(uri)
Logger.info('Importer : Activated local indexer (REST API) as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by Esplora
this.source = new EsploraWrapper(keys.indexer.esplora)

81
lib/util.js

@ -15,77 +15,26 @@ class Util {
constructor() {}
/**
* Topological ordering of DAG
* https://en.wikipedia.org/wiki/Topological_sorting
*
* Kahn's algorithm
*
* L Empty list that will contain the sorted elements
* S Set of all nodes with no incoming edge
* while S is non-empty do
* remove a node n from S
* add n to tail of L
* for each node m with an edge e from n to m do
* remove edge e from the graph
* if m has no other incoming edges then
* insert m into S
*
* @param {object} parents - map of {[key]: [incoming edge keys]}
* @param {object} children - a map of {[key]: [outgoing edge keys]}
* @returns {object}
* if graph has edges then
* return error (graph has at least one cycle)
* else
* return L (a topologically sorted order)
* Serialize a series of asynchronous calls to a function
* over a list of objects
*/
static topologicalOrdering(parents, children) {
const S = []
static async seriesCall(list, fn) {
const results = []
for (let node in parents) {
if (parents[node].length == 0) {
// Node has no parent (incoming edges)
S.push(node)
}
for (const item of list) {
results.push(await fn(item));
}
const L = []
while (S.length > 0) {
const node = S.pop()
L.push(node)
// Loop over nodes that depend on node
for (let child of children[node]) {
let i = parents[child].indexOf(node)
if (i > -1)
parents[child].splice(i, 1)
if (parents[child].length == 0)
S.push(child)
}
}
return L
return results;
}
/**
* Serialize a series of asynchronous calls to a function
* Execute parallel asynchronous calls to a function
* over a list of objects
* ref: http://www.joezimjs.com/javascript/patterns-asynchronous-programming-promises/
*/
static seriesCall(list, fn) {
const results = []
return list.reduce((memo, item) => {
return memo.then(() => {
return fn(item)
}).then(result => {
results.push(result)
})
},
Promise.resolve()
).then(function() {
return results
})
static parallelCall(list, fn) {
const operations = list.map(item => { return fn(item) })
return Promise.all(operations)
}
/**
@ -101,13 +50,9 @@ class Util {
* Splits a list into a list of lists each with maximum length LIMIT
*/
static splitList(list, limit) {
if (list.length <= limit)
return [list]
const lists = []
while (list.length) {
lists.push(list.splice(0, limit))
}
for (let i=0; i < list.length; i += limit)
lists.push(list.slice(i, i+limit))
return lists
}

9
lib/wallet/address-info.js

@ -38,10 +38,19 @@ class AddressInfo {
* @returns {Promise}
*/
async loadInfo() {
return Promise.all([
this._loadBalance(),
this._loadNbTransactions()
])
}
async _loadBalance() {
const balance = await db.getAddressBalance(this.address)
if (balance !== null)
this.finalBalance = balance
}
async _loadNbTransactions() {
const nbTxs = await db.getAddressNbTransactions(this.address)
if (nbTxs !== null)
this.nTx = nbTxs

42
lib/wallet/hd-account-info.js

@ -67,38 +67,50 @@ class HdAccountInfo {
*/
async loadInfo() {
try {
const id = await db.getHDAccountId(this.xpub)
//if (id == null) return false
await Promise.all([
this._loadDerivationInfo(),
this._loadBalance(),
this._loadUnusedIndices(),
this._loadDerivedIndices(),
this._loadNbTransactions(),
])
return true
} catch(e) {
return false
}
}
async _loadDerivationInfo() {
const account = await db.getHDAccount(this.xpub)
this.created = account.hdCreated
this.derivation = hdaHelper.typeString(account.hdType)
this.tracked = true
const node = hdaHelper.getNode(this.xpub)
const index = node[2].index
const threshold = Math.pow(2,31)
const hardened = (index >= threshold)
this.account = hardened ? (index - threshold) : index
this.depth = node[2].depth
}
async _loadBalance() {
this.finalBalance = await db.getHDAccountBalance(this.xpub)
}
async _loadUnusedIndices() {
const unusedIdx = await db.getHDAccountNextUnusedIndices(this.xpub)
this.accountIndex = unusedIdx[0]
this.changeIndex = unusedIdx[1]
}
async _loadDerivedIndices() {
const derivedIdx = await db.getHDAccountDerivedIndices(this.xpub)
this.accountDerivedIndex = derivedIdx[0]
this.changeDerivedIndex = derivedIdx[1]
}
async _loadNbTransactions() {
this.nTx = await db.getHDAccountNbTransactions(this.xpub)
const node = hdaHelper.getNode(this.xpub)
const index = node[2].index
const threshold = Math.pow(2,31)
const hardened = (index >= threshold)
this.account = hardened ? (index - threshold) : index
this.depth = node[2].depth
return true
} catch(e) {
return false
}
}
/**

8
lib/wallet/wallet-info.js

@ -52,7 +52,7 @@ class WalletInfo {
* @returns {Promise}
*/
async ensureHdAccounts() {
return util.seriesCall(this.entities.xpubs, async xpub => {
return util.parallelCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
return hdaInfo.ensureHdAccount()
})
@ -63,7 +63,7 @@ class WalletInfo {
* @returns {Promise}
*/
async loadHdAccountsInfo() {
return util.seriesCall(this.entities.xpubs, async xpub => {
return util.parallelCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
await hdaInfo.loadInfo()
this.wallet.finalBalance += hdaInfo.finalBalance
@ -113,7 +113,7 @@ class WalletInfo {
* @returns {Promise}
*/
async loadAddressesInfo() {
return util.seriesCall(this.entities.addrs, async address => {
return util.parallelCall(this.entities.addrs, async address => {
const addrInfo = new AddressInfo(address)
await addrInfo.loadInfo()
this.wallet.finalBalance += addrInfo.finalBalance
@ -175,7 +175,7 @@ class WalletInfo {
*/
async loadUtxos() {
// Load the utxos for the hd accounts
await util.seriesCall(this.entities.xpubs, async xpub => {
await util.parallelCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
const utxos = await hdaInfo.loadUtxos()
for (let utxo of utxos)

66
lib/wallet/wallet-service.js

@ -51,37 +51,53 @@ class WalletService {
const walletInfo = new WalletInfo(active)
try {
await Promise.all([
// Add the new xpubs
await util.seriesCall(legacy.xpubs, this._newBIP44)
await util.seriesCall(bip49.xpubs, this._newBIP49)
await util.seriesCall(bip84.xpubs, this._newBIP84)
// Load hd accounts info
await walletInfo.ensureHdAccounts()
await walletInfo.loadHdAccountsInfo()
util.parallelCall(legacy.xpubs, this._newBIP44),
util.parallelCall(bip49.xpubs, this._newBIP49),
util.parallelCall(bip84.xpubs, this._newBIP84),
// Add the new addresses
await db.addAddresses(legacy.addrs)
await db.addAddresses(bip49.addrs)
await db.addAddresses(bip84.addrs)
await db.addAddresses(pubkeys.addrs)
// Ensure addresses exist
await walletInfo.ensureAddresses()
db.addAddresses(legacy.addrs),
db.addAddresses(bip49.addrs),
db.addAddresses(bip84.addrs),
db.addAddresses(pubkeys.addrs),
])
// Ensure hd accounts and addresses exist
await Promise.all([
walletInfo.ensureHdAccounts(),
walletInfo.ensureAddresses(),
])
// Force import of addresses associated to paynyms
// if dojo relies on a local index
if (keys.indexer.active != 'third_party_explorer')
await this._forceEnsureAddressesForActivePubkeys(active)
// Filter the addresses
await walletInfo.filterAddresses()
// Load wallet information
await Promise.all([
// Load the hd accounts,
walletInfo.loadHdAccountsInfo(),
// Load the utxos
await walletInfo.loadUtxos()
walletInfo.loadUtxos(),
// Load the addresses
await walletInfo.loadAddressesInfo()
walletInfo.loadAddressesInfo(),
// Load the most recent transactions
await walletInfo.loadTransactions(0, null, true)
walletInfo.loadTransactions(0, null, true),
// Load feerates
await walletInfo.loadFeesInfo()
walletInfo.loadFeesInfo(),
])
// Postprocessing
await walletInfo.postProcessAddresses()
await walletInfo.postProcessHdAccounts()
await Promise.all([
walletInfo.postProcessAddresses(),
walletInfo.postProcessHdAccounts(),
])
// Format the result
return this._formatGetFullWalletInfoResult(walletInfo)
@ -280,13 +296,18 @@ class WalletService {
try {
// Filter the addresses
await walletInfo.filterAddresses()
await Promise.all([
// Load the number of transactions
await walletInfo.loadNbTransactions()
walletInfo.loadNbTransactions(),
// Load the requested page of transactions
await walletInfo.loadTransactions(page, count, false)
walletInfo.loadTransactions(page, count, false),
])
// Postprocessing
await walletInfo.postProcessAddresses()
await walletInfo.postProcessHdAccounts()
// Format the result
ret.n_tx = walletInfo.nTx
ret.txs = walletInfo.txs
@ -344,10 +365,7 @@ class WalletService {
*/
_mergeEntities(active, legacy, bip49, bip84, pubkeys) {
// Put all xpub into active.xpubs
active.xpubs = active.xpubs
.concat(legacy.xpubs)
.concat(bip49.xpubs)
.concat(bip84.xpubs)
active.xpubs = active.xpubs.concat(legacy.xpubs, bip49.xpubs, bip84.xpubs)
// Put addresses and pubkeys into active
// but avoid duplicates

5644
package-lock.json

File diff suppressed because it is too large

28
package.json

@ -1,6 +1,6 @@
{
"name": "samourai-dojo",
"version": "1.9.0",
"version": "1.10.0",
"description": "Backend server for Samourai Wallet",
"main": "accounts/index.js",
"scripts": {
@ -14,26 +14,28 @@
"license": "AGPL-3.0-only",
"homepage": "https://code.samourai.io/dojo/samourai-dojo",
"dependencies": {
"@tinyhttp/app": "1.3.3",
"async-sema": "2.1.2",
"axios": "0.20.0",
"axios": "0.21.1",
"bip39": "2.4.0",
"bitcoind-rpc-client": "0.3.1",
"bitcoinjs-lib": "5.1.4",
"bitcoinjs-lib": "5.2.0",
"bitcoinjs-message": "1.0.1",
"body-parser": "1.18.3",
"express": "4.16.3",
"express-jwt": "5.3.1",
"generic-pool": "3.4.2",
"body-parser": "1.19.0",
"helmet": "3.23.3",
"lodash": "4.17.19",
"jsonwebtoken": "8.5.1",
"lodash": "4.17.21",
"lru-cache": "4.0.2",
"minimist": "1.2.3",
"mysql": "2.16.0",
"passport": "0.4.0",
"make-concurrent": "5.3.0",
"minimist": "1.2.5",
"mysql": "2.18.1",
"passport": "0.4.1",
"passport-localapikey-update": "0.6.0",
"rpc-bitcoin": "2.0.0",
"sirv": "1.0.11",
"socks-proxy-agent": "4.0.1",
"validator": "10.8.0",
"websocket": "1.0.28",
"websocket": "1.0.34",
"workerpool": "6.1.4",
"zeromq": "4.2.0"
},
"devDependencies": {

4
pushtx/index-orchestrator.js

@ -8,7 +8,7 @@
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { waitForBitcoindRpcApi } = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const Orchestrator = require('./orchestrator')
@ -23,7 +23,7 @@
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
await waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {

4
pushtx/index.js

@ -8,7 +8,7 @@
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { waitForBitcoindRpcApi } = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const HttpServer = require('../lib/http-server/http-server')
@ -24,7 +24,7 @@
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
await waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {

10
pushtx/orchestrator.js

@ -8,7 +8,7 @@ const zmq = require('zeromq')
const Sema = require('async-sema')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { createRpcClient, isConnectionError } = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const pushTxProcessor = require('./pushtx-processor')
@ -24,7 +24,7 @@ class Orchestrator {
*/
constructor() {
// RPC client
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
// ZeroMQ socket for bitcoind blocks messages
this.blkSock = null
// Initialize a semaphor protecting the onBlockHash() method
@ -77,7 +77,7 @@ class Orchestrator {
// Retrieve the block height
const blockHash = buf.toString('hex')
const header = await this.rpcClient.getblockheader(blockHash, true)
const header = await this.rpcClient.getblockheader({ blockhash: blockHash, verbose: true })
const height = header.height
Logger.info(`Orchestrator : Block ${height} ${blockHash}`)
@ -100,7 +100,7 @@ class Orchestrator {
// Check if previous transaction has been confirmed
if (hasParentTx) {
try {
parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true)
parentTx = await this.rpcClient.getrawtransaction({ txid: tx.schParentTxid, verbose: true })
} catch(e) {
Logger.error(e, 'Orchestrator : Transaction.getTransaction()')
}
@ -116,7 +116,7 @@ class Orchestrator {
Logger.error(e, `Orchestrator : Orchestrator.onBlockHash() : ${msg}`)
// Check if it's an issue with the connection to the RPC API
// (=> immediately stop the loop)
if (RpcClient.isConnectionError(e)) {
if (isConnectionError(e)) {
Logger.info('Orchestrator : Connection issue')
rpcConnOk = false
break

9
pushtx/pushtx-processor.js

@ -9,7 +9,8 @@ const zmq = require('zeromq')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const addrHelper = require('../lib/bitcoin/addresses-helper')
const network = require('../lib/bitcoin/network')
const activeNet = network.network
const keys = require('../keys')[network.key]
@ -36,7 +37,7 @@ class PushTxProcessor {
this.notifSock = null
this.sources = new Sources()
// Initialize the rpc client
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
}
/**
@ -68,7 +69,7 @@ class PushTxProcessor {
if (vout >= tx.outs.length)
throw errors.txout.VOUT
const output = tx.outs[vout]
const address = bitcoin.address.fromOutputScript(output.script, activeNet)
const address = addrHelper.outputScript2Address(output.script)
const nbTxs = await db.getAddressNbTransactions(address)
if (nbTxs == null || nbTxs > 0)
faultyOutputs.push(vout)
@ -108,7 +109,7 @@ class PushTxProcessor {
// At this point, the raw hex parses as a legitimate transaction.
// Attempt to send via RPC to the bitcoind instance
try {
const txid = await this.rpcClient.sendrawtransaction(rawtx)
const txid = await this.rpcClient.sendrawtransaction({ hexstring: rawtx })
Logger.info('PushTx : Pushed!')
// Update the stats
status.updateStats(value)

13
pushtx/status.js

@ -10,7 +10,7 @@ const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
/**
@ -49,7 +49,7 @@ class Status {
amount: 0,
count: 0
}
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
}
/**
@ -75,8 +75,7 @@ class Status {
this.status.push.count = this.stats.count
try {
await this._refreshNetworkInfo()
await this._refreshBlockchainInfo()
await Promise.all([this._refreshNetworkInfo(), this._refreshBlockchainInfo()])
} catch (e) {
Logger.error(e, 'PushTx : Status.getCurrent() : Error')
} finally {
@ -107,7 +106,7 @@ class Status {
* Refresh network info
*/
async _refreshNetworkInfo() {
const info = await this.rpcClient.getNetworkInfo()
const info = await this.rpcClient.getnetworkinfo()
this.status.bitcoind.conn = info.connections
this.status.bitcoind.version = info.version
this.status.bitcoind.protocolversion = info.protocolversion
@ -118,9 +117,9 @@ class Status {
* Refresh blockchain info
*/
async _refreshBlockchainInfo() {
const info = await this.rpcClient.getBlockchainInfo()
const info = await this.rpcClient.getblockchaininfo()
this.status.bitcoind.blocks = info.blocks
this.status.bitcoind.testnet = (info.chain != 'main')
this.status.bitcoind.testnet = (info.chain !== 'main')
this.status.bitcoind.up = true
}

6
pushtx/transactions-scheduler.js

@ -10,7 +10,7 @@ const errors = require('../lib/errors')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const pushTxProcessor = require('./pushtx-processor')
@ -23,7 +23,7 @@ class TransactionsScheduler {
* Constructor
*/
constructor() {
this.rpcClient = new RpcClient()
this.rpcClient = createRpcClient()
}
/**
@ -41,7 +41,7 @@ class TransactionsScheduler {
script.sort((a,b) => a.hop - b.hop || a.nlocktime - b.nlocktime)
// Get the height of last block seen
const info = await this.rpcClient.getBlockchainInfo()
const info = await this.rpcClient.getblockchaininfo()
const lastHeight = info.blocks
// Get the nLockTime associated to the first transaction

8
scripts/create-first-blocks.js

@ -9,7 +9,7 @@ const Logger = require('../lib/logger')
const util = require('../lib/util')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const keys = require('../keys')[network.key]
@ -19,7 +19,7 @@ const keys = require('../keys')[network.key]
*/
// RPC Client requests data from bitcoind
let client = new RpcClient()
let client = createRpcClient()
// Database id of the previous block
let prevID = null;
@ -28,10 +28,10 @@ let prevID = null;
async function processBlock(height) {
Logger.info('Start processing block ' + height)
const blockHash = await client.getblockhash(height)
const blockHash = await client.getblockhash({ height })
if (blockHash) {
const header = await client.getblockheader(blockHash, true)
const header = await client.getblockheader({ blockhash: blockHash, verbose: true })
if (header) {
const dbBlock = {

17
static/admin/css/style.css

@ -27,6 +27,12 @@ input[type="checkbox"]{
-webkit-transform: scale(1.3);
}
input:disabled, select:disabled {
border: 1px solid #8f8f8f;
color: #8f8f8f;
background-color: #3a3d3f;
}
a, a:visited {
color: #a1a1a1;
}
@ -545,6 +551,17 @@ button {
display: inline-block;
}
#xpubs-export-actions span {
display: inline;
}
#xpubs-export-actions select {
width: 240px;
margin-left: 5px;
margin-right: 5px;
display: inline-block;
}
#xpubs-tool-details #xpub-value {
overflow: hidden;
}

4
static/admin/dmt/status/status.js

@ -55,8 +55,10 @@ const statusScript = {
this.setStatusIndicator('#indexer-status-ind', 'ko')
}
const indexerType = apiStatus['indexer']['type']
if (indexerType)
if (indexerType) {
sessionStorage.setItem('indexerType', indexerType)
$('#indexer-type').text(indexerType.replace(/_/g, ' '))
}
const indexerUrl = apiStatus['indexer']['url']
if (indexerUrl)
$('#indexer-url').text(indexerUrl)

24
static/admin/dmt/xpubs-tools/xpubs-tools.html

@ -59,16 +59,17 @@
<div id="xpubs-tool-actions" class="row box-main">
<div class="center">
<button id="btn-xpub-details-rescan" class="btn btn-success" type="button">RESCAN THIS XPUB</button>
<button id="btn-xpub-details-retype" class="btn btn-success" type="button">RETYPE THIS XPUB</button>
<button id="btn-xpub-details-delete" class="btn btn-success" type="button">DELETE THIS XPUB</button>
<button id="btn-xpub-details-rescan" class="btn btn-success" type="button">RESCAN</button>
<button id="btn-xpub-details-retype" class="btn btn-success" type="button">RETYPE</button>
<button id="btn-xpub-details-delete" class="btn btn-success" type="button">DELETE</button>
<button id="btn-xpub-details-export" class="btn btn-success" type="button">CSV EXPORT</button>
<button id="btn-xpub-details-reset" class="btn btn-success" type="button">SEARCH ANOTHER XPUB</button>
</div>
</div>
<div id="xpubs-rescans-actions" class="row box-main">
<div class="center">
<span>Rescan this xpub starting at index</span>
<span>Rescan this XPUB starting at index</span>
<input id="rescan-start-idx" type="text" value="0" placeholder="index">
<span> with a lookahead of </span>
<input id="rescan-lookahead" type="text" value="100" placeholder="#addresses">
@ -80,12 +81,25 @@
<div id="xpubs-deletion-actions" class="row box-main">
<div class="center">
<span>Do you want to delete this xpub?</span>
<span>Do you want to delete this XPUB?</span>
<button id="btn-xpub-delete-go" class="btn btn-success" type="button">DELETE</button>
<button id="btn-xpub-delete-cancel" class="btn btn-success" type="button">CANCEL</button>
</div>
</div>
<div id="xpubs-export-actions" class="row box-main">
<div class="center">
<span>Do you want to export a list of </span>
<select id="export-type" type="select" value="full">
<option value="full" selected>all the transactions</option>
<option value="notNull">the transactions modifying the balance</option>
</select>
<span> of this XPUB?</span>
<button id="btn-xpub-export-go" class="btn btn-success" type="button">EXPORT</button>
<button id="btn-xpub-export-cancel" class="btn btn-success" type="button">CANCEL</button>
</div>
</div>
<div id="xpubs-tool-details-row1" class="row box-main">
<!-- GENERAL INFO -->
<div id="box-general" class="halfwidth-left box">

50
static/admin/dmt/xpubs-tools/xpubs-tools.js

@ -12,10 +12,13 @@ const screenXpubsToolsScript = {
$('#btn-xpub-details-reset').click(() => {this.showSearchForm()})
$('#btn-xpub-details-rescan').click(() => {this.showRescanForm()})
$('#btn-xpub-details-delete').click(() => {this.showDeletionForm()})
$('#btn-xpub-details-export').click(() => {this.showExportForm()})
$('#btn-xpub-rescan-go').click(() => {this.rescanXpub()})
$('#btn-xpub-rescan-cancel').click(() => {this.hideRescanForm()})
$('#btn-xpub-delete-go').click(() => {this.deleteXpub()})
$('#btn-xpub-delete-cancel').click(() => {this.hideDeletionForm()})
$('#btn-xpub-export-go').click(() => {this.exportXpubHistory()})
$('#btn-xpub-export-cancel').click(() => {this.hideExportForm()})
$('#btn-xpub-import-go').click(() => {this.importXpub()})
$('#btn-xpub-details-retype').click(() => {this.showImportForm(true)})
$('#btn-xpub-import-cancel').click(() => {this.hideImportForm(this.isReimport)})
@ -27,8 +30,15 @@ const screenXpubsToolsScript = {
},
preparePage: function() {
// Disable custom lookahead if data source is a third party explorer
const isTPE = sessionStorage.getItem('indexerType') == 'third_party_explorer'
const isLRI = sessionStorage.getItem('indexerType') == 'local_rest_indexer'
const disableLookahead = isTPE || isLRI
$('#rescan-lookahead').prop('disabled', disableLookahead)
this.hideRescanForm()
this.hideDeletionForm()
this.hideExportForm()
this.showSearchForm()
$("#xpub").focus()
},
@ -144,6 +154,35 @@ const screenXpubsToolsScript = {
})
},
exportXpubHistory: function() {
lib_msg.displayMessage('Exporting the transactional history of this xpub. Please wait...')
const args = {
'active': this.currentXpub,
'page': 0,
'count': 1000000000
}
if ($('#export-type').val() == 'notNull')
args['excludeNullXfer'] = 1
return lib_api.getTransactions(args)
.then(result => {
if (result['txs'] && result['txs'].length > 0) {
let content = 'data:text/csv;charset=utf-8,'
content += 'height,txid,date,flow\n'
for (let tx of result['txs'])
content += `${tx['block_height']},${tx['hash']},${new Date(tx['time']*1000).toString()},${tx['result']/100000000}\n`
const encodedURI = encodeURI(content)
window.open(encodedURI)
}
this.hideExportForm()
lib_msg.displayInfo('Transactional history successfully exported.')
}).catch(e => {
lib_errors.processError(e)
})
},
checkRescanStatus: function(callback) {
this.rescanStatusTimerId = setTimeout(() => {
lib_api.getXpubRescanStatus(this.currentXpub)
@ -308,6 +347,17 @@ const screenXpubsToolsScript = {
$('#xpubs-tool-actions').show()
},
showExportForm: function() {
$('#xpubs-tool-actions').hide()
$('#xpubs-export-actions').show()
lib_msg.cleanMessagesUi()
},
hideExportForm: function() {
$('#xpubs-export-actions').hide()
$('#xpubs-tool-actions').show()
},
}
screenScripts.set('#screen-xpubs-tools', screenXpubsToolsScript)

8
static/admin/lib/api-wrapper.js

@ -156,6 +156,14 @@ const lib_api = {
)
},
/**
* Transactions
*/
getTransactions: function(arguments) {
let uri = this.baseUri + '/txs'
return this.sendGetUriEncoded(uri, arguments)
},
/**
* Rescans a range of blocks
*/

1
static/admin/lib/auth-utils.js

@ -139,6 +139,7 @@ const lib_auth = {
this.setRefreshToken(null)
this.setAccessToken(null)
sessionStorage.setItem('activeTab', '')
sessionStorage.setItem('indexerType', '')
lib_cmn.goToHomePage()
}

50
tracker/abstract-processor.js

@ -1,50 +0,0 @@
/*!
* tracker/abstract-processor.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
/**
* An abstract class for tracker processors
*/
class AbstractProcessor {
/**
* Constructor
* @param {object} notifSock - ZMQ socket used for notifications
*/
constructor(notifSock) {
// RPC client
this.client = new RpcClient()
// ZeroMQ socket for notifications sent to others components
this.notifSock = notifSock
}
/**
* Notify a new transaction
* @param {object} tx - bitcoin transaction
*/
notifyTx(tx) {
// Real-time client updates for this transaction.
// Any address input or output present in transaction
// is a potential client to notify.
if (this.notifSock)
this.notifSock.send(['transaction', JSON.stringify(tx)])
}
/**
* Notify a new block
* @param {string} header - block header
*/
notifyBlock(header) {
// Notify clients of the block
if (this.notifSock)
this.notifSock.send(['block', JSON.stringify(header)])
}
}
module.exports = AbstractProcessor

173
tracker/block-worker.js

@ -0,0 +1,173 @@
/*!
* tracker/block-worker.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const { isMainThread, parentPort } = require('worker_threads')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const db = require('../lib/db/mysql-db-wrapper')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const Block = require('./block')
/**
* STATUS
*/
const IDLE = 0
module.exports.IDLE = IDLE
const INITIALIZED = 1
module.exports.INITIALIZED = INITIALIZED
const OUTPUTS_PROCESSED = 2
module.exports.OUTPUTS_PROCESSED = OUTPUTS_PROCESSED
const INPUTS_PROCESSED = 3
module.exports.INPUTS_PROCESSED = INPUTS_PROCESSED
const TXS_CONFIRMED = 4
module.exports.TXS_CONFIRMED = TXS_CONFIRMED
/**
* OPS
*/
const OP_INIT = 0
module.exports.OP_INIT = OP_INIT
const OP_PROCESS_OUTPUTS = 1
module.exports.OP_PROCESS_OUTPUTS = OP_PROCESS_OUTPUTS
const OP_PROCESS_INPUTS = 2
module.exports.OP_PROCESS_INPUTS = OP_PROCESS_INPUTS
const OP_CONFIRM = 3
module.exports.OP_CONFIRM = OP_CONFIRM
const OP_RESET = 4
module.exports.OP_RESET = OP_RESET
/**
* Process message received by the worker
* @param {object} msg - message received by the worker
*/
async function processMessage(msg) {
let res = null
let success = true
try {
switch(msg.op) {
case OP_INIT:
if (status != IDLE)
throw 'Operation not allowed'
res = await initBlock(msg.header)
break
case OP_PROCESS_OUTPUTS:
if (status != INITIALIZED)
throw 'Operation not allowed'
res = await processOutputs()
break
case OP_PROCESS_INPUTS:
if (status != OUTPUTS_PROCESSED)
throw 'Operation not allowed'
res = await processInputs()
break
case OP_CONFIRM:
if (status != INPUTS_PROCESSED)
throw 'Operation not allowed'
res = await confirmTransactions(msg.blockId)
break
case OP_RESET:
res = await reset()
break
default:
throw 'Invalid Operation'
}
} catch (e) {
success = false
res = e
} finally {
parentPort.postMessage({
'op': msg.op,
'status': success,
'res': res
})
}
}
/**
* Initialize the block
* @param {object} header - block header
*/
async function initBlock(header) {
status = INITIALIZED
const hex = await rpcClient.getblock({ blockhash: header.hash, verbosity: 0 })
block = new Block(hex, header)
return true
}
/**
* Process the transactions outputs
*/
async function processOutputs() {
status = OUTPUTS_PROCESSED
txsForBroadcast = await block.processOutputs()
return true
}
/**
* Process the transactions inputs
*/
async function processInputs() {
status = INPUTS_PROCESSED
const txs = await block.processInputs()
txsForBroadcast = txsForBroadcast.concat(txs)
return true
}
/**
* Confirm the transactions
* @param {integer} blockId - id of the block in db
*/
async function confirmTransactions(blockId) {
status = TXS_CONFIRMED
const aTxsForBroadcast = [...new Set(txsForBroadcast)]
await block.confirmTransactions(aTxsForBroadcast, blockId)
return aTxsForBroadcast
}
/**
* Reset
*/
function reset() {
status = IDLE
block = null
txsForBroadcast = []
return true
}
/**
* MAIN
*/
const rpcClient = createRpcClient()
let block = null
let txsForBroadcast = []
let status = IDLE
if (!isMainThread) {
db.connect({
connectionLimit: keys.db.connectionLimitTracker,
acquireTimeout: keys.db.acquireTimeout,
host: keys.db.host,
user: keys.db.user,
password: keys.db.pass,
database: keys.db.database
})
reset()
parentPort.on('message', processMessage)
}

121
tracker/block.js

@ -26,44 +26,92 @@ class Block extends TransactionsBundle {
super()
this.hex = hex
this.header = header
try {
if (hex != null) {
const block = bitcoin.Block.fromHex(hex)
this.transactions = block.transactions
}
} catch (e) {
Logger.error(e, 'Tracker : Block()')
Logger.error(null, header)
return Promise.reject(e)
}
}
/**
* Register the block and transactions of interest in db
* @dev This method isn't used anymore.
* It has been replaced by a parallel processing of blocks.
* (see blocks-processor and block-worker)
* @returns {Promise - object[]} returns an array of transactions to be broadcast
*/
async checkBlock() {
async processBlock() {
Logger.info('Tracker : Beginning to process new block.')
let block
const txsForBroadcast = []
const t0 = Date.now()
try {
block = bitcoin.Block.fromHex(this.hex)
this.transactions = block.transactions
} catch (e) {
Logger.error(e, 'Tracker : Block.checkBlock()')
Logger.error(null, this.header)
return Promise.reject(e)
const txsForBroadcast = new Map()
const txsForBroadcast1 = await this.processOutputs()
txsForBroadcast1.map(tx => {txsForBroadcast.set(tx.getId(), tx)})
const txsForBroadcast2 = await this.processInputs()
txsForBroadcast2.map(tx => {txsForBroadcast.set(tx.getId(), tx)})
const aTxsForBroadcast = [...txsForBroadcast.values()]
const blockId = await this.registerBlock()
await this.confirmTransactions(aTxsForBroadcast, blockId)
// Logs and result returned
const ntx = this.transactions.length
const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/ntx).toFixed(0)
Logger.info(`Tracker : Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
return aTxsForBroadcast
}
const t0 = Date.now()
let ntx = 0
// Filter transactions
const filteredTxs = await this.prefilterTransactions()
// Check filtered transactions
// and broadcast notifications
await util.seriesCall(filteredTxs, async tx => {
const filteredTx = new Transaction(tx)
const txCheck = await filteredTx.checkTransaction()
if (txCheck && txCheck.broadcast)
txsForBroadcast.push(txCheck.tx)
/**
* Process the transaction outputs
* @returns {Promise - object[]} returns an array of transactions to be broadcast
*/
async processOutputs() {
const txsForBroadcast = new Set()
const filteredTxs = await this.prefilterByOutputs()
await util.parallelCall(filteredTxs, async filteredTx => {
const tx = new Transaction(filteredTx)
await tx.processOutputs()
if (tx.doBroadcast)
txsForBroadcast.add(tx.tx)
})
return [...txsForBroadcast]
}
/**
* Process the transaction inputs
* @returns {Promise - object[]} returns an array of transactions to be broadcast
*/
async processInputs() {
const txsForBroadcast = new Set()
const filteredTxs = await this.prefilterByInputs()
await util.parallelCall(filteredTxs, async filteredTx => {
const tx = new Transaction(filteredTx)
await tx.processInputs()
if (tx.doBroadcast)
txsForBroadcast.add(tx.tx)
})
return [...txsForBroadcast]
}
// Retrieve the previous block
// and store the new block into the database
/**
* Store the block in db
* @returns {Promise - int} returns the id of the block
*/
async registerBlock() {
const prevBlock = await db.getBlockByHash(this.header.previousblockhash)
const prevID = (prevBlock && prevBlock.blockID) ? prevBlock.blockID : null
@ -76,18 +124,19 @@ class Block extends TransactionsBundle {
Logger.info(`Tracker : Added block ${this.header.height} (id=${blockId})`)
// Confirms the transactions
const txids = this.transactions.map(t => t.getId())
ntx = txids.length
const txidLists = util.splitList(txids, 100)
await util.seriesCall(txidLists, list => db.confirmTransactions(list, blockId))
// Logs and result returned
const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/ntx).toFixed(0)
Logger.info(`Tracker : Finished block ${this.header.height}, ${dt}s, ${ntx} tx, ${per}ms/tx`)
return blockId
}
return txsForBroadcast
/**
* Confirm the transactions in db
* @param {Set} txs - set of transactions stored in db
* @param {int} blockId - id of the block
* r@returns {Promise}
*/
async confirmTransactions(txs, blockId) {
const txids = txs.map(t => t.getId())
const txidLists = util.splitList(txids, 100)
return util.parallelCall(txidLists, list => db.confirmTransactions(list, blockId))
}
/**

97
tracker/blockchain-processor.js

@ -11,29 +11,34 @@ const util = require('../lib/util')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const keys = require('../keys')[network.key]
const AbstractProcessor = require('./abstract-processor')
const Block = require('./block')
const TransactionsBundle = require('./transactions-bundle')
const blocksProcessor = require('./blocks-processor')
/**
* A class allowing to process the blockchain
*/
class BlockchainProcessor extends AbstractProcessor {
class BlockchainProcessor {
/**
* Constructor
* @param {object} notifSock - ZMQ socket used for notifications
*/
constructor(notifSock) {
super(notifSock)
// RPC client
this.client = createRpcClient()
// ZeroMQ socket for bitcoind blocks messages
this.blkSock = null
// Initialize a semaphor protecting the onBlockHash() method
this._onBlockHashSemaphor = new Sema(1, { capacity: 50 })
// Array of worker threads used for parallel processing of blocks
this.blockWorkers = []
// Flag tracking Initial Block Download Mode
this.isIBD = true
// Initialize the blocks processor
blocksProcessor.init(notifSock)
}
/**
@ -55,8 +60,7 @@ class BlockchainProcessor extends AbstractProcessor {
* @returns {Promise}
*/
async catchup() {
const highest = await db.getHighestBlock()
const info = await this.client.getblockchaininfo()
const [highest, info] = await Promise.all([db.getHighestBlock(), this.client.getblockchaininfo()])
const daemonNbHeaders = info.headers
// Consider that we are in IBD mode if Dojo is far in the past (> 13,000 blocks)
@ -80,12 +84,11 @@ class BlockchainProcessor extends AbstractProcessor {
try {
Logger.info('Tracker : Tracker Startup (IBD mode)')
const info = await this.client.getblockchaininfo()
// Get highest block processed by the tracker
const [highest, info] = await Promise.all([db.getHighestBlock(), this.client.getblockchaininfo()])
const daemonNbBlocks = info.blocks
const daemonNbHeaders = info.headers
// Get highest block processed by the tracker
const highest = await db.getHighestBlock()
const dbMaxHeight = highest.blockHeight
let prevBlockId = highest.blockID
@ -114,8 +117,8 @@ class BlockchainProcessor extends AbstractProcessor {
await util.seriesCall(blockRange, async height => {
try {
const blockHash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(blockHash, true)
const blockHash = await this.client.getblockhash({ height })
const header = await this.client.getblockheader({ blockhash: blockHash, verbose: true })
prevBlockId = await this.processBlockHeader(header, prevBlockId)
} catch(e) {
Logger.error(e, 'Tracker : BlockchainProcessor.catchupIBDMode()')
@ -151,30 +154,23 @@ class BlockchainProcessor extends AbstractProcessor {
try {
Logger.info('Tracker : Tracker Startup (normal mode)')
const info = await this.client.getblockchaininfo()
// Get highest block processed by the tracker
const [highest, info] = await Promise.all([db.getHighestBlock(), this.client.getblockchaininfo()])
const daemonNbBlocks = info.blocks
// Get highest block processed by the tracker
const highest = await db.getHighestBlock()
if (highest == null) return null
if (daemonNbBlocks == highest.blockHeight) return null
// Compute blocks range to be processed
const blockRange = _.range(highest.blockHeight, daemonNbBlocks + 1)
Logger.info(`Tracker : Sync ${blockRange.length} blocks`)
// Process the blocks
return util.seriesCall(blockRange, async height => {
try {
const hash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
return this.processBlockRange(blockRange)
} catch(e) {
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
process.exit()
}
}, 'Tracker syncing', true)
} catch(e) {
Logger.error(e, 'Tracker : BlockchainProcessor.catchupNormalMode()')
@ -240,7 +236,7 @@ class BlockchainProcessor extends AbstractProcessor {
let headers = null
try {
const header = await this.client.getblockheader(blockHash, true)
const header = await this.client.getblockheader({ blockhash: blockHash, verbose: true })
Logger.info(`Tracker : Block #${header.height} ${blockHash}`)
// Grab all headers between this block and last known
headers = await this.chainBacktrace([header])
@ -262,9 +258,7 @@ class BlockchainProcessor extends AbstractProcessor {
await this.rewind(knownHeight)
// Process the blocks
return await util.seriesCall(headers, header => {
return this.processBlock(header)
})
return await this.processBlocks(headers)
} catch(e) {
Logger.error(e, 'Tracker : BlockchainProcessor.onBlockHash()')
@ -292,7 +286,7 @@ class BlockchainProcessor extends AbstractProcessor {
if (block == null) {
// Previous block does not exist in database. Grab from bitcoind
const header = await this.client.getblockheader(deepest.previousblockhash, true)
const header = await this.client.getblockheader({ blockhash: deepest.previousblockhash, verbose: true })
headers.push(header)
return this.chainBacktrace(headers)
} else {
@ -318,8 +312,6 @@ class BlockchainProcessor extends AbstractProcessor {
await db.unconfirmTransactions(txids)
}
// TODO: get accounts and notify of deletion ?
await db.deleteBlocksAfterHeight(height)
}
@ -342,45 +334,40 @@ class BlockchainProcessor extends AbstractProcessor {
Logger.info(`Blocks Rescan : starting a rescan for ${blockRange.length} blocks`)
// Process the blocks
return util.seriesCall(blockRange, async height => {
try {
Logger.info(`Tracker : Rescanning block ${height}`)
const hash = await this.client.getblockhash(height)
const header = await this.client.getblockheader(hash)
return this.processBlock(header)
return this.processBlockRange(blockRange)
} catch(e) {
Logger.error(e, 'Tracker : BlockchainProcessor.rescan()')
throw e
}
}, 'Tracker rescan', true)
}
/**
* Process a block
* @param {object} header - block header
* @returns {Promise}
* Process a list of blocks
* @param {object[]} headers - array of block headers
*/
async processBlock(header) {
try {
// Get raw block hex string from bitcoind
const hex = await this.client.getblock(header.hash, false)
const block = new Block(hex, header)
async processBlocks(headers) {
const chunks = util.splitList(headers, blocksProcessor.nbWorkers)
const txsForBroadcast = await block.checkBlock()
// Send notifications
for (let tx of txsForBroadcast)
this.notifyTx(tx)
await util.seriesCall(chunks, async chunk => {
return blocksProcessor.processChunk(chunk)
})
}
this.notifyBlock(header)
/**
* Process a range of blocks
* @param {int[]} heights - a range of block heights
*/
async processBlockRange(heights) {
const chunks = util.splitList(heights, blocksProcessor.nbWorkers)
} catch(e) {
// The show must go on.
// TODO: further notification that this block did not check out
Logger.error(e, 'Tracker : BlockchainProcessor.processBlock()')
}
return util.seriesCall(chunks, async chunk => {
const headers = await util.parallelCall(chunk, async height => {
const hash = await this.client.getblockhash({ height })
return await this.client.getblockheader({ blockhash: hash })
})
return this.processBlocks(headers)
})
}
/**

222
tracker/blocks-processor.js

@ -0,0 +1,222 @@
/*!
* tracker/blocks-processor.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const os = require('os')
const Sema = require('async-sema')
const { Worker } = require('worker_threads')
const Logger = require('../lib/logger')
const util = require('../lib/util')
const dbProcessor = require('../lib/db/mysql-db-wrapper')
const blockWorker = require('./block-worker')
let notifSock = null
let blockWorkers = []
let headersChunk = []
let txsForBroadcast = []
let t0 = null
let currentOp = null
let nbTasksEnqueued = 0
let nbTasksCompleted = 0
// Semaphor protecting the processBloks() method
const _processBlocksSemaphor = new Sema(1)
// Number of worker threads processing the blocks in parallel
const nbWorkers = os.cpus().length //- 1
module.exports.nbWorkers = nbWorkers
/**
* Initialize the processor
* @param {object} notifSock - ZMQ socket used for notifications
*/
function init(notifSock) {
notifSock = notifSock
for (let i = 0; i < nbWorkers; i++) {
const worker = new Worker(
`${__dirname}/block-worker.js`,
{ workerData: null }
)
worker.on('error', processWorkerError)
worker.on('message', processWorkerMessage)
blockWorkers.push(worker)
}
}
module.exports.init = init
/**
* Process a chunk of block headers
* @param {object[]} chunk - array of block headers
*/
async function processChunk(chunk) {
// Acquire the semaphor (wait for previous chunk)
await _processBlocksSemaphor.acquire()
t0 = Date.now()
const sBlockRange = `${chunk[0].height}-${chunk[chunk.length-1].height}`
Logger.info(`Tracker : Beginning to process blocks ${sBlockRange}`)
// Process the chunk
chunk.sort((a,b) => a.height - b.height)
headersChunk = chunk
txsForBroadcast = []
processTask(blockWorker.OP_INIT)
}
module.exports.processChunk = processChunk
/**
* Process an error returned by a worker thread
* @param {object} e - error
*/
async function processWorkerError(e) {
return processWorkerMessage({
'op': currentOp,
'status': false,
'res': e
})
}
/**
* Process a message returned by a worker thread
* @param {object} msg - message sent by the worker thread
*/
async function processWorkerMessage(msg) {
nbTasksCompleted++
if (!msg.status) {
Logger.error(msg.res, 'Tracker : processWorkerMessage()')
} else if (msg.op == blockWorker.OP_CONFIRM) {
txsForBroadcast = txsForBroadcast.concat(msg.res)
}
if (nbTasksCompleted == nbTasksEnqueued) {
switch (msg.op) {
case blockWorker.OP_INIT:
// Process the transaction outputs
processTask(blockWorker.OP_PROCESS_OUTPUTS)
break
case blockWorker.OP_PROCESS_OUTPUTS:
// Process the transaction inputs
processTask(blockWorker.OP_PROCESS_INPUTS)
break
case blockWorker.OP_PROCESS_INPUTS:
// Store the blocks in db and get their id
const blockIds = await util.seriesCall(headersChunk, async header => {
return registerBlock(header)
})
// Confirm the transactions
processTask(blockWorker.OP_CONFIRM, blockIds)
break
case blockWorker.OP_CONFIRM:
// Notify new transactions and blocks
await Promise.all([
util.parallelCall(txsForBroadcast, async tx => {
notifyTx(tx)
}),
util.parallelCall(headersChunk, async header => {
notifyBlock(header)
})
])
// Process complete. Reset the workers
processTask(blockWorker.OP_RESET)
break
case blockWorker.OP_RESET:
const dt = ((Date.now()-t0)/1000).toFixed(1)
const per = ((Date.now()-t0)/headersChunk.length).toFixed(0)
const sBlockRange = `${headersChunk[0].height}-${headersChunk[headersChunk.length-1].height}`
Logger.info(`Tracker : Finished processing blocks ${sBlockRange}, ${dt}s, ${per}ms/block`)
// Release the semaphor
await _processBlocksSemaphor.release()
break
}
}
}
/**
* Execute an operation processing a block
* @param {integer} op - operation
* @param {*} args
*/
function processTask(op, args) {
currentOp = op
nbTasksEnqueued = 0
nbTasksCompleted = 0
switch (op) {
case blockWorker.OP_INIT:
for (let i = 0; i < headersChunk.length; i++) {
blockWorkers[i].postMessage({
'op': op,
'header': headersChunk[i]
})
nbTasksEnqueued++
}
break
case blockWorker.OP_PROCESS_OUTPUTS:
case blockWorker.OP_PROCESS_INPUTS:
case blockWorker.OP_RESET:
for (let i = 0; i < headersChunk.length; i++) {
blockWorkers[i].postMessage({'op': op})
nbTasksEnqueued++
}
break
case blockWorker.OP_CONFIRM:
for (let i = 0; i < headersChunk.length; i++) {
blockWorkers[i].postMessage({
'op': op,
'blockId': args[i]
})
nbTasksEnqueued++
}
break
default:
Logger.error(null, 'Tracker : processTask() : Unknown operation')
}
}
/**
* Notify a new transaction
* @param {object} tx - bitcoin transaction
*/
function notifyTx(tx) {
// Real-time client updates for this transaction.
// Any address input or output present in transaction
// is a potential client to notify.
if (notifSock)
notifSock.send(['transaction', JSON.stringify(tx)])
}
/**
* Notify a new block
* @param {string} header - block header
*/
function notifyBlock(header) {
// Notify clients of the block
if (notifSock)
notifSock.send(['block', JSON.stringify(header)])
}
/**
* Store a block in db
* @param {object} header - block header
* @returns {Promise - int} returns the id of the block
*/
async function registerBlock(header) {
const prevBlock = await dbProcessor.getBlockByHash(header.previousblockhash)
const prevID = (prevBlock && prevBlock.blockID) ? prevBlock.blockID : null
const blockId = await dbProcessor.addBlock({
blockHeight: header.height,
blockHash: header.hash,
blockTime: header.time,
blockParent: prevID
})
Logger.info(`Tracker : Added block ${header.height} (id=${blockId})`)
return blockId
}

4
tracker/index.js

@ -6,7 +6,7 @@
'use strict'
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const { waitForBitcoindRpcApi } = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const db = require('../lib/db/mysql-db-wrapper')
@ -21,7 +21,7 @@
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
await waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {

72
tracker/mempool-processor.js

@ -11,8 +11,8 @@ const util = require('../lib/util')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const { createRpcClient } = require('../lib/bitcoind-rpc/rpc-client')
const keys = require('../keys')[network.key]
const AbstractProcessor = require('./abstract-processor')
const Transaction = require('./transaction')
const TransactionsBundle = require('./transactions-bundle')
@ -20,14 +20,17 @@ const TransactionsBundle = require('./transactions-bundle')
/**
* A class managing a buffer for the mempool
*/
class MempoolProcessor extends AbstractProcessor {
class MempoolProcessor {
/**
* Constructor
* @param {object} notifSock - ZMQ socket used for notifications
*/
constructor(notifSock) {
super(notifSock)
// RPC client
this.client = createRpcClient()
// ZeroMQ socket for notifications sent to others components
this.notifSock = notifSock
// Mempool buffer
this.mempoolBuffer = new TransactionsBundle()
// ZeroMQ socket for bitcoind Txs messages
@ -75,9 +78,11 @@ class MempoolProcessor extends AbstractProcessor {
clearInterval(this.processMempoolId)
//clearInterval(this.displayStatsId)
resolve(this.txSock.disconnect(keys.bitcoind.zmqTx).close())
resolve(this.pushTxSock.disconnect(keys.ports.notifpushtx).close())
resolve(this.orchestratorSock.disconnect(keys.ports.orchestrator).close())
this.txSock.disconnect(keys.bitcoind.zmqTx).close()
this.pushTxSock.disconnect(keys.ports.notifpushtx).close()
this.orchestratorSock.disconnect(keys.ports.orchestrator).close()
return Promise.resolve();
}
/**
@ -150,14 +155,27 @@ class MempoolProcessor extends AbstractProcessor {
let currentMempool = new TransactionsBundle(this.mempoolBuffer.toArray())
this.mempoolBuffer.clear()
const filteredTxs = await currentMempool.prefilterTransactions()
const txsForBroadcast = new Map()
return util.seriesCall(filteredTxs, async filteredTx => {
let filteredTxs = await currentMempool.prefilterByOutputs()
await util.parallelCall(filteredTxs, async filteredTx => {
const tx = new Transaction(filteredTx)
const txCheck = await tx.checkTransaction()
if (txCheck && txCheck.broadcast)
this.notifyTx(txCheck.tx)
await tx.processOutputs()
if (tx.doBroadcast)
txsForBroadcast[tx.txid] = tx.tx
})
filteredTxs = await currentMempool.prefilterByInputs()
await util.parallelCall(filteredTxs, async filteredTx => {
const tx = new Transaction(filteredTx)
await tx.processInputs()
if (tx.doBroadcast)
txsForBroadcast[tx.txid] = tx.tx
})
// Send the notifications
for (let tx of txsForBroadcast.values())
this.notifyTx(tx)
}
/**
@ -206,6 +224,29 @@ class MempoolProcessor extends AbstractProcessor {
}
}
/**
* Notify a new transaction
* @param {object} tx - bitcoin transaction
*/
notifyTx(tx) {
// Real-time client updates for this transaction.
// Any address input or output present in transaction
// is a potential client to notify.
if (this.notifSock)
this.notifSock.send(['transaction', JSON.stringify(tx)])
}
/**
* Notify a new block
* @param {string} header - block header
*/
notifyBlock(header) {
// Notify clients of the block
if (this.notifSock)
this.notifSock.send(['block', JSON.stringify(header)])
}
/**
* Check unconfirmed transactions
* @returns {Promise}
@ -218,9 +259,9 @@ class MempoolProcessor extends AbstractProcessor {
const unconfirmedTxs = await db.getUnconfirmedTransactions()
if (unconfirmedTxs.length > 0) {
await util.seriesCall(unconfirmedTxs, tx => {
await util.parallelCall(unconfirmedTxs, tx => {
try {
return this.client.getrawtransaction(tx.txnTxid, true)
return this.client.getrawtransaction( { txid: tx.txnTxid, verbose: true })
.then(async rtx => {
if (!rtx.blockhash) return null
// Transaction is confirmed
@ -255,11 +296,10 @@ class MempoolProcessor extends AbstractProcessor {
*/
async _refreshActiveStatus() {
// Get highest header in the blockchain
const info = await this.client.getblockchaininfo()
// Get highest block processed by the tracker
const [highestBlock, info] = await Promise.all([db.getHighestBlock(), this.client.getblockchaininfo()])
const highestHeader = info.headers
// Get highest block processed by the tracker
const highestBlock = await db.getHighestBlock()
if (highestBlock == null || highestBlock.blockHeight == 0) {
this.isActive = false
return

5
tracker/tracker-rest-api.js

@ -4,10 +4,7 @@
*/
'use strict'
const qs = require('querystring')
const validator = require('validator')
const bodyParser = require('body-parser')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
@ -29,8 +26,6 @@ class TrackerRestApi {
this.httpServer = httpServer
this.tracker = tracker
const urlencodedParser = bodyParser.urlencoded({ extended: true })
// Establish routes. Proxy server strips /pushtx
this.httpServer.app.get(
`/${keys.prefixes.support}/rescan`,

13
tracker/transaction.js

@ -8,6 +8,7 @@ const _ = require('lodash')
const bitcoin = require('bitcoinjs-lib')
const util = require('../lib/util')
const Logger = require('../lib/logger')
const addrHelper = require('../lib/bitcoin/addresses-helper')
const hdaHelper = require('../lib/bitcoin/hd-accounts-helper')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
@ -46,10 +47,10 @@ class Transaction {
async checkTransaction() {
try {
// Process transaction inputs
await this._processInputs()
await this.processInputs()
// Process transaction outputs
await this._processOutputs()
await this.processOutputs()
// If this point reached with no errors,
// store the fact that this transaction was checked.
@ -72,7 +73,7 @@ class Transaction {
* Process transaction inputs
* @returns {Promise}
*/
async _processInputs() {
async processInputs() {
// Array of inputs spent
const spends = []
// Store input indices, keyed by `txid-outindex` for easy retrieval
@ -150,7 +151,7 @@ class Transaction {
* Process transaction outputs
* @returns {Promise}
*/
async _processOutputs() {
async processOutputs() {
// Store outputs, keyed by address. Values are arrays of outputs
const indexedOutputs = {}
@ -159,7 +160,7 @@ class Transaction {
for (let output of this.tx.outs) {
try {
const address = bitcoin.address.fromOutputScript(output.script, activeNet)
const address = addrHelper.outputScript2Address(output.script)
if (!indexedOutputs[address])
indexedOutputs[address] = []
@ -255,7 +256,7 @@ class Transaction {
const xpubList = _.keys(hdAccounts)
if (xpubList.length > 0) {
await util.seriesCall(xpubList, async xpub => {
await util.parallelCall(xpubList, async xpub => {
const usedNewAddresses = await this._deriveNewAddresses(
xpub,
hdAccounts[xpub],

106
tracker/transactions-bundle.js

@ -6,12 +6,9 @@
const _ = require('lodash')
const LRU = require('lru-cache')
const bitcoin = require('bitcoinjs-lib')
const util = require('../lib/util')
const db = require('../lib/db/mysql-db-wrapper')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const activeNet = network.network
const addrHelper = require('../lib/bitcoin/addresses-helper')
/**
@ -63,64 +60,54 @@ class TransactionsBundle {
/**
* Find the transactions of interest
* based on theirs inputs
* @returns {object[]} returns an array of transactions objects
*/
async prefilterTransactions() {
async prefilterByInputs() {
// Process transactions by slices of 5000 transactions
const MAX_NB_TXS = 5000
const lists = util.splitList(this.transactions, MAX_NB_TXS)
const results = await util.seriesCall(lists, list => {
return this._prefilterTransactions(list)
})
const results = await util.parallelCall(lists, txs => this._prefilterByInputs(txs))
return _.flatten(results)
}
/**
* Find the transactions of interest (internal implementation)
* @params {object[]} transactions - array of transactions objects
* Find the transactions of interest
* based on theirs outputs
* @returns {object[]} returns an array of transactions objects
*/
async _prefilterTransactions(transactions) {
let inputs = []
let outputs = []
async prefilterByOutputs() {
// Process transactions by slices of 5000 transactions
const MAX_NB_TXS = 5000
const lists = util.splitList(this.transactions, MAX_NB_TXS)
const results = await util.parallelCall(lists, txs => this._prefilterByOutputs(txs))
return _.flatten(results)
}
// Store indices of txs to be processed
/**
* Find the transactions of interest
* based on theirs outputs (internal implementation)
* @params {object[]} txs - array of transactions objects
* @returns {object[]} returns an array of transactions objects
*/
async _prefilterByOutputs(txs) {
let addresses = []
let filteredIdxTxs = []
// Store txs indices, keyed by `txid-outindex`.
// Values are arrays of txs indices (for double spends)
let indexedInputs = {}
// Store txs indices, keyed by address.
// Values are arrays of txs indices
let indexedOutputs = {}
// Stores txs indices, keyed by txids
let indexedTxs = {}
//
// Prefilter against the outputs
//
// Index the transaction outputs
for (const i in transactions) {
const tx = transactions[i]
for (const i in txs) {
const tx = txs[i]
const txid = tx.getId()
indexedTxs[txid] = i
// If we already checked this tx
if (TransactionsBundle.cache.has(txid))
continue
for (const j in tx.outs) {
try {
const script = tx.outs[j].script
const address = bitcoin.address.fromOutputScript(script, activeNet)
outputs.push(address)
// Index the output
const address = addrHelper.outputScript2Address(script)
addresses.push(address)
if (!indexedOutputs[address])
indexedOutputs[address] = []
indexedOutputs[address].push(i)
@ -129,8 +116,7 @@ class TransactionsBundle {
}
// Prefilter
const outRes = await db.getUngroupedHDAccountsByAddresses(outputs)
const outRes = await db.getUngroupedHDAccountsByAddresses(addresses)
for (const i in outRes) {
const key = outRes[i].addrAddress
const idxTxs = indexedOutputs[key]
@ -141,41 +127,43 @@ class TransactionsBundle {
}
}
//
// Prefilter against the inputs
//
return filteredIdxTxs.map(x => txs[x])
}
/**
* Find the transactions of interest
* based on theirs inputs (internal implementation)
* @params {object[]} txs - array of transactions objects
* @returns {object[]} returns an array of transactions objects
*/
async _prefilterByInputs(txs) {
let inputs = []
let filteredIdxTxs = []
let indexedInputs = {}
// Index the transaction inputs
for (const i in transactions) {
const tx = transactions[i]
for (const i in txs) {
const tx = txs[i]
const txid = tx.getId()
// If we already checked this tx
if (TransactionsBundle.cache.has(txid))
continue
for (const j in tx.ins) {
const spendHash = tx.ins[j].hash
const spendTxid = Buffer.from(spendHash).reverse().toString('hex')
// Check if this input consumes an output
// generated by a transaction from this block
if (filteredIdxTxs.indexOf(indexedTxs[spendTxid]) > -1 && filteredIdxTxs.indexOf(i) == -1) {
filteredIdxTxs.push(i)
} else {
const spendIdx = tx.ins[j].index
inputs.push({txid: spendTxid, index: spendIdx})
// Index the input
const key = spendTxid + '-' + spendIdx
if (!indexedInputs[key])
indexedInputs[key] = []
indexedInputs[key].push(i)
}
}
}
// Prefilter
const inRes = await db.getOutputSpends(inputs)
const lists = util.splitList(inputs, 1000)
const results = await util.parallelCall(lists, list => db.getOutputSpends(list))
const inRes = _.flatten(results)
for (const i in inRes) {
const key = inRes[i].txnTxid + '-' + inRes[i].outIndex
const idxTxs = indexedInputs[key]
@ -186,11 +174,7 @@ class TransactionsBundle {
}
}
//
// Returns the matching transactions
//
filteredIdxTxs.sort((a, b) => a - b);
return filteredIdxTxs.map(x => transactions[x])
return filteredIdxTxs.map(x => txs[x])
}
}

Loading…
Cancel
Save