Browse Source

Initial commit

umbrel v1.0.0
kenshin-samourai 5 years ago
commit
b4cd156301
  1. 3
      .dockerignore
  2. 7
      .gitignore
  3. 616
      LICENSE.md
  4. 77
      README.md
  5. 160
      accounts/api-helper.js
  6. 55
      accounts/fees-rest-api.js
  7. 78
      accounts/headers-rest-api.js
  8. 37
      accounts/index-cluster.js
  9. 76
      accounts/index.js
  10. 136
      accounts/multiaddr-rest-api.js
  11. 83
      accounts/notifications-server.js
  12. 476
      accounts/notifications-service.js
  13. 56
      accounts/status-rest-api.js
  14. 52
      accounts/status.js
  15. 384
      accounts/support-rest-api.js
  16. 156
      accounts/transactions-rest-api.js
  17. 136
      accounts/unspent-rest-api.js
  18. 450
      accounts/xpub-rest-api.js
  19. 212
      db-scripts/1_db.sql
  20. 37
      doc/DELETE_xpub.md
  21. 196
      doc/DOCKER_setup.md
  22. 39
      doc/GET_fees.md
  23. 47
      doc/GET_header.md
  24. 117
      doc/GET_multiaddr.md
  25. 131
      doc/GET_tx.md
  26. 87
      doc/GET_txs.md
  27. 61
      doc/GET_unspent.md
  28. 45
      doc/GET_xpub.md
  29. 69
      doc/POST_auth_login.md
  30. 31
      doc/POST_auth_refresh.md
  31. 38
      doc/POST_pushtx.md
  32. 121
      doc/POST_pushtx_schedule.md
  33. 41
      doc/POST_xpub.md
  34. 39
      doc/POST_xpub_lock.md
  35. 29
      doc/README.md
  36. 52
      docker/my-dojo/.env
  37. 56
      docker/my-dojo/bitcoin/Dockerfile
  38. 22
      docker/my-dojo/bitcoin/bitcoin.conf
  39. 14
      docker/my-dojo/bitcoin/restart.sh
  40. 178
      docker/my-dojo/bitcoin/wait-for-it.sh
  41. 32
      docker/my-dojo/conf/docker-bitcoind.conf
  42. 15
      docker/my-dojo/conf/docker-mysql.conf
  43. 30
      docker/my-dojo/conf/docker-node.conf
  44. 130
      docker/my-dojo/docker-compose.yaml
  45. 222
      docker/my-dojo/dojo.sh
  46. 7
      docker/my-dojo/mysql/Dockerfile
  47. 2
      docker/my-dojo/mysql/mysql-dojo.cnf
  48. 18
      docker/my-dojo/nginx/Dockerfile
  49. 53
      docker/my-dojo/nginx/dojo.conf
  50. 44
      docker/my-dojo/nginx/nginx.conf
  51. 79
      docker/my-dojo/nginx/wait-for
  52. 43
      docker/my-dojo/node/Dockerfile
  53. 244
      docker/my-dojo/node/keys.index.js
  54. 13
      docker/my-dojo/node/restart.sh
  55. 178
      docker/my-dojo/node/wait-for-it.sh
  56. 55
      docker/my-dojo/tor/Dockerfile
  57. 49
      docker/my-dojo/tor/torrc
  58. 178
      docker/my-dojo/tor/wait-for-it.sh
  59. 349
      keys/index-example.js
  60. 106
      lib/auth/auth-rest-api.js
  61. 77
      lib/auth/authentication-manager.js
  62. 296
      lib/auth/authorizations-manager.js
  63. 62
      lib/auth/localapikey-strategy-configurator.js
  64. 106
      lib/bitcoin/addresses-helper.js
  65. 44
      lib/bitcoin/addresses-service.js
  66. 400
      lib/bitcoin/hd-accounts-helper.js
  67. 250
      lib/bitcoin/hd-accounts-service.js
  68. 44
      lib/bitcoin/network.js
  69. 92
      lib/bitcoin/parallel-address-derivation.js
  70. 71
      lib/bitcoind-rpc/fees.js
  71. 56
      lib/bitcoind-rpc/headers.js
  72. 69
      lib/bitcoind-rpc/latest-block.js
  73. 88
      lib/bitcoind-rpc/rpc-client.js
  74. 215
      lib/bitcoind-rpc/transactions.js
  75. 1974
      lib/db/mysql-db-wrapper.js
  76. 80
      lib/errors.js
  77. 85
      lib/fork-pool.js
  78. 242
      lib/http-server/http-server.js
  79. 67
      lib/logger.js
  80. 129
      lib/remote-importer/bitcoind-wrapper.js
  81. 122
      lib/remote-importer/btccom-wrapper.js
  82. 90
      lib/remote-importer/insight-wrapper.js
  83. 114
      lib/remote-importer/oxt-wrapper.js
  84. 436
      lib/remote-importer/remote-importer.js
  85. 112
      lib/remote-importer/sources-mainnet.js
  86. 153
      lib/remote-importer/sources-testnet.js
  87. 40
      lib/remote-importer/sources.js
  88. 47
      lib/remote-importer/wrapper.js
  89. 368
      lib/util.js
  90. 152
      lib/wallet/address-info.js
  91. 187
      lib/wallet/hd-account-info.js
  92. 88
      lib/wallet/wallet-entities.js
  93. 309
      lib/wallet/wallet-info.js
  94. 301
      lib/wallet/wallet-service.js
  95. 42
      package.json
  96. 49
      pushtx/index-orchestrator.js
  97. 57
      pushtx/index.js
  98. 182
      pushtx/orchestrator.js
  99. 77
      pushtx/pushtx-processor.js
  100. 223
      pushtx/pushtx-rest-api.js

3
.dockerignore

@ -0,0 +1,3 @@
node_modules
.git
private-tests

7
.gitignore

@ -0,0 +1,7 @@
db-scripts/updates/
keys/index.js
keys/sslcert/
node_modules/
private-tests/
*.log
package-lock.json

616
LICENSE.md

@ -0,0 +1,616 @@
### GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc.
<https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
### Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains
free software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing
under this license.
The precise terms and conditions for copying, distribution and
modification follow.
### TERMS AND CONDITIONS
#### 0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public
License.
"Copyright" also means copyright-like laws that apply to other kinds
of works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of
an exact copy. The resulting work is called a "modified version" of
the earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user
through a computer network, with no transfer of a copy, is not
conveying.
An interactive user interface displays "Appropriate Legal Notices" to
the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
#### 1. Source Code.
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of
a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users can
regenerate automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same
work.
#### 2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey,
without conditions so long as your license otherwise remains in force.
You may convey covered works to others for the sole purpose of having
them make modifications exclusively for you, or provide you with
facilities for running those works, provided that you comply with the
terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for
you must do so exclusively on your behalf, under your direction and
control, on terms that prohibit them from making any copies of your
copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the
conditions stated below. Sublicensing is not allowed; section 10 makes
it unnecessary.
#### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such
circumvention is effected by exercising rights under this License with
respect to the covered work, and you disclaim any intention to limit
operation or modification of the work as a means of enforcing, against
the work's users, your or third parties' legal rights to forbid
circumvention of technological measures.
#### 4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
#### 5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these
conditions:
- a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
- b) The work must carry prominent notices stating that it is
released under this License and any conditions added under
section 7. This requirement modifies the requirement in section 4
to "keep intact all notices".
- c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
- d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
#### 6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of
sections 4 and 5, provided that you also convey the machine-readable
Corresponding Source under the terms of this License, in one of these
ways:
- a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
- b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the Corresponding
Source from a network server at no charge.
- c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
- d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
- e) Convey the object code using peer-to-peer transmission,
provided you inform other peers where the object code and
Corresponding Source of the work are being offered to the general
public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal,
family, or household purposes, or (2) anything designed or sold for
incorporation into a dwelling. In determining whether a product is a
consumer product, doubtful cases shall be resolved in favor of
coverage. For a particular product received by a particular user,
"normally used" refers to a typical or common use of that class of
product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected
to use, the product. A product is a consumer product regardless of
whether the product has substantial commercial, industrial or
non-consumer uses, unless such uses represent the only significant
mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to
install and execute modified versions of a covered work in that User
Product from a modified version of its Corresponding Source. The
information must suffice to ensure that the continued functioning of
the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or
updates for a work that has been modified or installed by the
recipient, or for the User Product in which it has been modified or
installed. Access to a network may be denied when the modification
itself materially and adversely affects the operation of the network
or violates the rules and protocols for communication across the
network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
#### 7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders
of that material) supplement the terms of this License with terms:
- a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
- b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
- c) Prohibiting misrepresentation of the origin of that material,
or requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
- d) Limiting the use for publicity purposes of names of licensors
or authors of the material; or
- e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
- f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions
of it) with contractual assumptions of liability to the recipient,
for any liability that these contractual assumptions directly
impose on those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions; the
above requirements apply either way.
#### 8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your license
from a particular copyright holder is reinstated (a) provisionally,
unless and until the copyright holder explicitly and finally
terminates your license, and (b) permanently, if the copyright holder
fails to notify you of the violation by some reasonable means prior to
60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
#### 9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run
a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
#### 10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
#### 11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned
or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on
the non-exercise of one or more of the rights that are specifically
granted under this License. You may not convey a covered work if you
are a party to an arrangement with a third party that is in the
business of distributing software, under which you make payment to the
third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties
who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by
you (or copies made from those copies), or (b) primarily for and in
connection with specific products or compilations that contain the
covered work, unless you entered into that arrangement, or that patent
license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
#### 12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under
this License and any other pertinent obligations, then as a
consequence you may not convey it at all. For example, if you agree to
terms that obligate you to collect a royalty for further conveying
from those to whom you convey the Program, the only way you could
satisfy both those terms and this License would be to refrain entirely
from conveying the Program.
#### 13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your
version supports such interaction) an opportunity to receive the
Corresponding Source of your version by providing access to the
Corresponding Source from a network server at no charge, through some
standard or customary means of facilitating copying of software. This
Corresponding Source shall include the Corresponding Source for any
work covered by version 3 of the GNU General Public License that is
incorporated pursuant to the following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
#### 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Affero General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever
published by the Free Software Foundation.
If the Program specifies that a proxy can decide which future versions
of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
#### 15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
#### 16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
#### 17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
Copyright © 2019 – Katana Cryptographic Ltd. All Rights Reserved.

77
README.md

@ -0,0 +1,77 @@
# Samourai Dojo
Samourai Dojo is the backing server for Samourai Wallet. Provides HD account & loose addresses (BIP47) balances & transactions lists. Provides unspent output lists to the wallet. PushTX endpoint broadcasts transactions through the backing bitcoind node.
[View API documentation](../master/doc/README.md)
## Installation ##
### MyDojo (installation with Docker and Docker Compose)
This setup is recommended to Samourai users who feel comfortable with a few command lines.
It provides in a single command the setup of a full Samourai backend composed of:
* a bitcoin full node only accessible as an ephemeral Tor hidden service,
* the backend database,
* the backend modules with an API accessible as a static Tor hidden service,
* a maintenance tool accessible through a Tor web browser.
See [the documentation](./doc/DOCKER_setup.md) for detailed setup instructions.
### Manual installation (developers only)
A full manual setup isn't recommended if you don't intend to install a local development environment.
## Theory of Operation
Tracking wallet balances via `xpub` requires conforming to [BIP44](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki), [BIP49](https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki) or [BIP84](https://github.com/bitcoin/bips/blob/master/bip-0084.mediawiki) address derivation scheme. Public keys received by Dojo correspond to single accounts and derive all addresses in the account and change chains. These addresses are at `M/0/x` and `M/1/y`, respectively.
Dojo relies on the backing bitcoind node to maintain privacy.
### Architecture
Dojo is composed of 3 modules:
* API (/account): web server providing a REST API and web sockets used by Samourai Wallet and Sentinel.
* PushTx (/pushtx): web server providing a REST API used to push transactions on the Bitcoin P2P network.
* Tracker (/tracker): process listening to the bitcoind node and indexing transactions of interest.
API and PushTx modules are able to operate behind a web server (e.g. nginx) or as frontend http servers (not recommended). Both support HTTP or HTTPS (if SSL has been properly configured in /keys/index.js). These modules can also operate as a Tor hidden service (recommended).
Authentication is enforced by an API key and Json Web Tokens.
### Implementation Notes
**Tracker**
* ZMQ notifications send raw transactions and block hashes. Keep track of txids with timestamps, clearing out old txids after a timeout
* On realtime transaction:
* Query database with all output addresses to see if an account has received a transaction. Notify client via WebSocket.
* Query database with all input txids to see if an account has sent coins. Make proper database entries and notify via WebSocket.
* On a block notification, query database for txids included and update confirmed height
* On a blockchain reorg (orphan block), previous block hash will not match last known block hash in the app. Need to mark transactions as unconfirmed and rescan blocks from new chain tip to last known hash. Note that many of the transactions from the orphaned block may be included in the new chain.
* When an input spending a known output is confirmed in a block, delete any other inputs referencing that output, since this would be a double-spend.
**Import of HD Accounts and data sources**
* First import of an unknown HD account relies on a data source (local bitcoind or OXT). After that, the tracker will keep everything current.
* Default option relies on the local bitcoind and makes you 100% independent of Samourai Wallet's infrastructure. This option is recommended for better privacy.
* Activation of bitcoind as the data source:
* Edit /keys/index.js and set "explorers.bitcoind" to "active". OXT API will be ignored.
* Activation of OXT as the data source (through socks5):
* Edit /keys/index.js and set "explorers.bitcoind" to "inactive".
* Main drawbacks of using your local bitcoind for these imports:
* It doesn't return the full transactional history associated to the HD account but only transactions having an unspent output controlled by the HD account.
* It's slightly slower than using the option relying on the OXT API.
* In some specific cases, the importer might miss the most recent unspent outputs. Higher values of gap.external and gap.internal in /keys/index.js should help to mitigate this issue. Another workaround is to request the endpoint /support/xpub/.../rescan provided by the REST API with the optional gap parameter.
* This option is considered as experimental.

160
accounts/api-helper.js

@ -0,0 +1,160 @@
/*!
* accounts/api-helper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
const validator = require('validator')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const WalletEntities = require('../lib/wallet/wallet-entities')
const network = require('../lib/bitcoin/network')
const activeNet = network.network
const hdaHelper = require('../lib/bitcoin/hd-accounts-helper')
const addrHelper = require('../lib/bitcoin/addresses-helper')
const HttpServer = require('../lib/http-server/http-server')
/**
* A singleton providing util methods used by the API
*/
class ApiHelper {
/**
* Parse a string and extract (x|y|z|t|u|v)pubs, addresses and pubkeys
* @param {string} str - list of entities separated by '|'
* @returns {object} returns a WalletEntities object
*/
parseEntities(str) {
const ret = new WalletEntities()
if (typeof str !== 'string')
return ret
for (let item of str.split('|')) {
try {
if (hdaHelper.isValid(item) && !ret.hasXPub(item)) {
const xpub = hdaHelper.xlatXPUB(item)
if (hdaHelper.isYpub(item))
ret.addHdAccount(xpub, item, false)
else if (hdaHelper.isZpub(item))
ret.addHdAccount(xpub, false, item)
else
ret.addHdAccount(item, false, false)
} else if (addrHelper.isSupportedPubKey(item) && !ret.hasPubKey(item)) {
// Derive pubkey as 3 addresses (P1PKH, P2WPKH/P2SH, BECH32)
const bufItem = new Buffer(item, 'hex')
const funcs = [
addrHelper.p2pkhAddress,
addrHelper.p2wpkhP2shAddress,
addrHelper.p2wpkhAddress
]
for (let f of funcs) {
const addr = f(bufItem)
if (ret.hasAddress(addr))
ret.updatePubKey(addr, item)
else
ret.addAddress(addr, item)
}
} else if (bitcoin.address.toOutputScript(item, activeNet) && !ret.hasAddress(item)) {
// Bech32 addresses are managed in lower case
if (addrHelper.isBech32(item))
item = item.toLowerCase()
ret.addAddress(item, false)
}
} catch(e) {}
}
return ret
}
/**
* Check entities passed as url params
* @param {object} params - request query or body object
* @returns {boolean} return true if conditions are met, false otherwise
*/
checkEntitiesParams(params) {
return params.active
|| params.new
|| params.pubkey
|| params.bip49
|| params.bip84
}
/**
* Parse the entities passed as arguments of an url
* @param {object} params - request query or body object
* @returns {object} return a mapping object
* {active:..., legacy:..., pubkey:..., bip49:..., bip84:...}
*/
parseEntitiesParams(params) {
return {
active: this.parseEntities(params.active),
legacy: this.parseEntities(params.new),
pubkey: this.parseEntities(params.pubkey),
bip49: this.parseEntities(params.bip49),
bip84: this.parseEntities(params.bip84)
}
}
/**
* Express middleware validating if entities params are well formed
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateEntitiesParams(req, res, next) {
const params = this.checkEntitiesParams(req.query) ? req.query : req.body
let isValid = true
if (params.active && !this.subValidateEntitiesParams(params.active))
isValid &= false
if (params.new && !this.subValidateEntitiesParams(params.new))
isValid &= false
if (params.pubkey && !this.subValidateEntitiesParams(params.pubkey))
isValid &= false
if (params.bip49 && !this.subValidateEntitiesParams(params.bip49))
isValid &= false
if (params.bip84 && !this.subValidateEntitiesParams(params.bip84))
isValid &= false
if (isValid) {
next()
} else {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
params,
`ApiHelper.validateEntitiesParams() : Invalid arguments`
)
}
}
/**
* Validate a request argument
* @param {string} arg - request argument
*/
subValidateEntitiesParams(arg) {
for (let item of arg.split('|')) {
const isValid = validator.isAlphanumeric(item)
if (!isValid)
return false
}
return true
}
}
module.exports = new ApiHelper()

55
accounts/fees-rest-api.js

@ -0,0 +1,55 @@
/*!
* accounts/get-fees-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const Logger = require('../lib/logger')
const rpcFees = require('../lib/bitcoind-rpc/fees')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* A singleton providing util methods used by the API
*/
class FeesRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
this.httpServer.app.get(
'/fees',
authMgr.checkAuthentication.bind(authMgr),
this.getFees.bind(this),
HttpServer.sendAuthError
)
// Refresh the network fees
rpcFees.refresh()
}
/**
* Refresh and return the current fees
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getFees(req, res) {
try {
const fees = await rpcFees.getFees()
HttpServer.sendOkDataOnly(res, fees)
} catch (e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /fees`)
}
}
}
module.exports = FeesRestApi

78
accounts/headers-rest-api.js

@ -0,0 +1,78 @@
/*!
* accounts/headers-fees-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const validator = require('validator')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const rpcHeaders = require('../lib/bitcoind-rpc/headers')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const apiHelper = require('./api-helper')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Headers API endpoints
*/
class HeadersRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
this.httpServer.app.get(
'/header/:hash',
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsGetHeader.bind(this),
this.getHeader.bind(this),
HttpServer.sendAuthError
)
}
/**
* Retrieve the block header for a given hash
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getHeader(req, res) {
try {
const header = await rpcHeaders.getHeader(req.params.hash)
HttpServer.sendRawData(res, header)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /header/${req.params.hash}`)
}
}
/**
* Validate request arguments
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetHeader(req, res, next) {
const isValidHash = validator.isHash(req.params.hash, 'sha256')
if (!isValidHash) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.hash,
'HeadersRestApi.validateArgsGetHeader() : Invalid hash'
)
} else {
next()
}
}
}
module.exports = HeadersRestApi

37
accounts/index-cluster.js

@ -0,0 +1,37 @@
/*!
* accounts/index-cluster.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const os = require('os')
const cluster = require('cluster')
const Logger = require('../lib/logger')
/**
* Launch a cluster of Samourai API
*/
const nbCPUS = os.cpus()
if (cluster.isMaster) {
nbCPUS.forEach(function() {
cluster.fork()
})
cluster.on('listening', function(worker) {
Logger.info(`Cluster ${worker.process.pid} connected`)
})
cluster.on('disconnect', function(worker) {
Logger.info(`Cluster ${worker.process.pid} disconnected`)
})
cluster.on('exit', function(worker) {
Logger.info(`Cluster ${worker.process.pid} is dead`)
// Ensuring a new cluster will start if an old one dies
cluster.fork()
})
} else {
require('./index.js')
}

76
accounts/index.js

@ -0,0 +1,76 @@
/*!
* accounts/index.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
(async () => {
'use strict'
const Logger = require('../lib/logger')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const db = require('../lib/db/mysql-db-wrapper')
const hdaHelper = require('../lib/bitcoin/hd-accounts-helper')
const HttpServer = require('../lib/http-server/http-server')
const AuthRestApi = require('../lib/auth/auth-rest-api')
const XPubRestApi = require('./xpub-rest-api')
const FeesRestApi = require('./fees-rest-api')
const HeadersRestApi = require('./headers-rest-api')
const TransactionsRestApi = require('./transactions-rest-api')
const StatusRestApi = require('./status-rest-api')
const notifServer = require('./notifications-server')
const MultiaddrRestApi = require('./multiaddr-rest-api')
const UnspentRestApi = require('./unspent-rest-api')
const SupportRestApi = require('./support-rest-api')
/**
* Samourai REST API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the REST API')
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {
connectionLimit: keys.db.connectionLimitApi,
acquireTimeout: keys.db.acquireTimeout,
host: keys.db.host,
user: keys.db.user,
password: keys.db.pass,
database: keys.db.database
}
db.connect(dbConfig)
// Activate addresses derivation
// in an external process
hdaHelper.activateExternalDerivation()
// Initialize the http server
const port = keys.ports.account
const httpsOptions = keys.https.account
const httpServer = new HttpServer(port, httpsOptions)
// Initialize the rest api endpoints
const authRestApi = new AuthRestApi(httpServer)
const xpubRestApi = new XPubRestApi(httpServer)
const feesRestApi = new FeesRestApi(httpServer)
const headersRestApi = new HeadersRestApi(httpServer)
const transactionsRestApi = new TransactionsRestApi(httpServer)
const statusRestApi = new StatusRestApi(httpServer)
const multiaddrRestApi = new MultiaddrRestApi(httpServer)
const unspentRestApi = new UnspentRestApi(httpServer)
const supportRestApi = new SupportRestApi(httpServer)
// Start the http server
httpServer.start()
// Attach the web sockets server to the web server
notifServer.attach(httpServer)
})()

136
accounts/multiaddr-rest-api.js

@ -0,0 +1,136 @@
/*!
* accounts/multiaddr-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bodyParser = require('body-parser')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const walletService = require('../lib/wallet/wallet-service')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const apiHelper = require('./api-helper')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Multiaddr API endpoints
*/
class MultiaddrRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
this.httpServer.app.get(
'/multiaddr',
authMgr.checkAuthentication.bind(authMgr),
apiHelper.validateEntitiesParams.bind(apiHelper),
this.getMultiaddr.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/multiaddr',
urlencodedParser,
authMgr.checkAuthentication.bind(authMgr),
apiHelper.validateEntitiesParams.bind(apiHelper),
this.postMultiaddr.bind(this),
HttpServer.sendAuthError
)
}
/**
* Handle multiaddr GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getMultiaddr(req, res) {
try {
// Check request params
if (!apiHelper.checkEntitiesParams(req.query))
return HttpServer.sendError(res, errors.multiaddr.NOACT)
// Parse params
const entities = apiHelper.parseEntitiesParams(req.query)
const result = await walletService.getWalletInfo(
entities.active,
entities.legacy,
entities.bip49,
entities.bip84,
entities.pubkey
)
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
if (debugApi) {
const strParams =
`${req.query.active ? req.query.active : ''} \
${req.query.new ? req.query.new : ''} \
${req.query.pubkey ? req.query.pubkey : ''} \
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /multiaddr ${strParams}`)
}
}
}
/**
* Handle multiaddr POST request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async postMultiaddr(req, res) {
try {
// Check request params
if (!apiHelper.checkEntitiesParams(req.body))
return HttpServer.sendError(res, errors.multiaddr.NOACT)
// Parse params
const entities = apiHelper.parseEntitiesParams(req.body)
const result = await walletService.getWalletInfo(
entities.active,
entities.legacy,
entities.bip49,
entities.bip84,
entities.pubkey
)
HttpServer.sendOkDataOnly(res, result)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
if (debugApi) {
const strParams =
`${req.body.active ? req.body.active : ''} \
${req.body.new ? req.body.new : ''} \
${req.body.pubkey ? req.body.pubkey : ''} \
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /multiaddr ${strParams}`)
}
}
}
}
module.exports = MultiaddrRestApi

83
accounts/notifications-server.js

@ -0,0 +1,83 @@
/*!
* accounts/notification-web-sockets.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const _ = require('lodash')
const zmq = require('zeromq')
const WebSocket = require('websocket')
const Logger = require('../lib/logger')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const status = require('./status')
const NotificationsService = require('./notifications-service')
/**
* A singleton providing a notifications server over web sockets
*/
class NotificationsServer {
/**
* Constructor
*/
constructor() {
// Http server
this.httpServer = null
// Notifications service
this.notifService = null
// Initialize the zmq socket for communications
// with the tracker
this._initTrackerSocket()
}
/**
* Attach the web sockets server to the listening web server
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
attach(httpServer) {
this.httpServer = httpServer
if (this.notifService !== null) return
this.notifService = new NotificationsService(httpServer.server)
}
/**
* Initialize a zmq socket for notifications from the tracker
*/
_initTrackerSocket() {
this.sock = zmq.socket('sub')
this.sock.connect(`tcp://127.0.0.1:${keys.ports.tracker}`)
this.sock.subscribe('block')
this.sock.subscribe('transaction')
this.sock.on('message', (topic, message, sequence) => {
switch(topic.toString()) {
case 'block':
try {
const header = JSON.parse(message.toString())
this.notifService.notifyBlock(header)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in block message')
}
break
case 'transaction':
try {
const tx = JSON.parse(message.toString())
this.notifService.notifyTransaction(tx)
} catch(e) {
Logger.error(e, 'NotificationServer._initTrackerSocket() : Error in transaction message')
}
break
default:
Logger.info(`Unknown ZMQ message topic: "${topic}"`)
}
})
}
}
module.exports = new NotificationsServer()

476
accounts/notifications-service.js

@ -0,0 +1,476 @@
/*!
* accounts/notification-web-sockets.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const _ = require('lodash')
const LRU = require('lru-cache')
const WebSocket = require('websocket')
const Logger = require('../lib/logger')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const apiHelper = require('./api-helper')
const status = require('./status')
const authMgr = require('../lib/auth/authorizations-manager')
const debug = !!(process.argv.indexOf('ws-debug') > -1)
/**
* A class providing a notifications server over web sockets
*/
class NotificationsService {
/**
* Constructor
* @param {object} server - listening instance of a http server
*/
constructor(server) {
// Web sockets server
this.ws = null
// Dictionary of connections
this.conn = {}
// Dictionary of subscriptions
this.subs = {}
// Dictionary mapping addresses to pubkeys
this.cachePubKeys = {}
// Cache registering the most recent subscriptions received
// Used to filter multiple subscriptions sent by external apps.
this.cacheSubs = LRU({
// Maximum number of subscriptions to store in cache
// Estimate: 1000 clients with an average of 5 subscriptions
max: 5000,
// Function used to compute length of item
length: (n, key) => 1,
// Maximum age for items in the cache (1mn)
maxAge: 60000
})
// Initialize the web socket server
this._initWSServer(server)
}
/**
* Initialize the web sockets server
* @param {object} server - listening instance of a http server
*/
_initWSServer(server) {
this.ws = new WebSocket.server({httpServer: server})
Logger.info('Created WebSocket server')
this.ws.on('request', req => {
try {
let conn = req.accept(null, req.origin)
conn.id = status.sessions++
conn.subs = []
debug && Logger.info(`Client ${conn.id} connected`)
conn.on('close', () => {
this._closeWSConnection(conn, false)
})
conn.on('error', err => {
Logger.error(err, `NotificationsService : Error on connection ${conn.id}`)
if (conn.connected)
this._closeWSConnection(conn, true)
})
conn.on('message', msg => {
if (msg.type == 'utf8')
this._handleWSMessage(msg.utf8Data, conn)
else
this._closeWSConnection(conn, true)
})
this.conn[conn.id] = conn
status.clients = status.clients + 1
status.maxConn = Math.max(status.maxConn, Object.keys(this.conn).length)
} catch(e) {
Logger.error(e, `NotificationsService._initWSServer() : Error during request accept`)
}
})
}
/**
* Close a web sockets connection
* @param {object} conn - web socket connection
* @param {boolean} forcedClose - true if close initiated by server
*/
_closeWSConnection(conn, forcedClose) {
try {
for (let topic of conn.subs) {
this._unsub(topic, conn.id)
// Close initiated by client, remove subscriptions from cache
if (!forcedClose && this.cacheSubs.has(topic))
this.cacheSubs.del(topic)
}
if (this.conn[conn.id]) {
delete this.conn[conn.id]
status.clients = status.clients - 1
}
// Close initiated by server, drop the connection
if (forcedClose && conn.connected)
conn.drop(1008, 'Get out of here!')
debug && Logger.info(`Client ${conn.id} disconnected`)
} catch(e) {
Logger.error(e, 'NotificationsService._closeWSConnection()')
}
}
/**
* Filter potential duplicate subscriptions
* @param {string} msg - subscription received
* @returns {boolean} returns false if it's a duplicate, true otherwise.
*/
_filterWSMessage(msg) {
if (this.cacheSubs.has(msg)) {
debug && Logger.info('Duplicate subscriptions detected')
return false
} else {
this.cacheSubs.set(msg, true)
return true
}
}
/**
* Handle messages received over the web sockets
* (subscriptions)
* @param {string} msg - subscription received
* @param {object} conn - connection
*/
_handleWSMessage(msg, conn) {
try {
debug && Logger.info(`Received from client ${conn.id}: ${msg}`)
const data = JSON.parse(msg)
// Check authentication (if needed)
if (authMgr.authActive && authMgr.isMandatory) {
try {
authMgr.isAuthenticated(msg.at)
} catch(e) {
this.notifyAuthError(e, conn.id)
return
}
}
switch(data.op) {
case 'ping':
conn.sendUTF('{"op": "pong"}')
break
case 'addr_sub':
if (data.addr) {
// Check for potential flood by clients
// subscribing for the same xpub again and again
if (this._filterWSMessage(data.addr))
this._entitysub(data.addr, conn)
else
this._closeWSConnection(conn, true)
}
break
case 'blocks_sub':
this._addsub('block', conn)
break
}
} catch(e) {
Logger.error(e, 'NotificationsService._handleWSMessage() : WebSocket message error')
}
}
/**
* Subscribe to a list of addresses/xpubs/pubkeys
* @param {string} topic - topic
* @param {object} conn - connection asking for subscription
*/
_entitysub(topic, conn) {
const valid = apiHelper.parseEntities(topic)
for (let a in valid.addrs) {
const address = valid.addrs[a]
this._addsub(address, conn)
if (valid.pubkeys[a]) {
this.cachePubKeys[address] = valid.pubkeys[a]
}
}
for (let xpub of valid.xpubs)
this._addsub(xpub, conn)
}
/**
* Subscribe to a topic
* @param {string} topic - topic
* @param {object} conn - connection asking for subscription
*/
_addsub(topic, conn) {
if (conn.subs.indexOf(topic) >= 0)
return false
conn.subs.push(topic)
if (!this.subs[topic])
this.subs[topic] = []
this.subs[topic].push(conn.id)
debug && Logger.info(`Client ${conn.id} subscribed to ${topic}`)
}
/**
* Unsubscribe from a topic
* @param {string} topic - topic
* @param {int} cid - client id
*/
_unsub(topic, cid) {
if (!this.subs[topic])
return false
const index = this.subs[topic].indexOf(cid)
if (index < 0)
return false
this.subs[topic].splice(index, 1)
if (this.subs[topic].length == 0) {
delete this.subs[topic]
if (this.cachePubKeys.hasOwnProperty(topic))
delete this.cachePubKeys[topic]
}
return true
}
/**
* Dispatch a notification to all clients
* who have subscribed to a topic
* @param {string} topic - topic
* @param {string} msg - content of the notification
*/
dispatch(topic, msg) {
if (!this.subs[topic])
return
for (let cid of this.subs[topic]) {
if (!this.conn[cid])
continue
try {
this.conn[cid].sendUTF(msg)
} catch(e) {
Logger.error(e, `NotificationsService.dispatch() : Error sending dispatch for ${topic} to client ${cid}`)
}
}
}
/**
* Dispatch notifications for a new block
* @param {string} header - block header
*/
notifyBlock(header) {
try {
const data = {
op: 'block',
x: header
}
this.dispatch('block', JSON.stringify(data))
} catch(e) {
Logger.error(e, `NotificationsService.notifyBlock()`)
}
}
/**
* Dispatch notifications for a transaction
*
* Transaction notification operates within these constraints:
* 1. Notify each client ONCE of a relevant transaction
* 2. Maintain privacy of other parties when transactions are between clients
*
* Clients subscribe to a list of xpubs and addresses. Transactions identify
* address and xpub if available on inputs and outputs, omitting inputs and
* outputs for untracked addresses.
*
* Example:
* tx
* inputs
* addr1
* xpub2
* outputs
* xpub1
* xpub2
* addr2
* xpub3
*
* subs
* addr1: client1, client2
* addr2: client1
* xpub1: client1
* xpub2: client2
* xpub4: client3
*
* client1: addr1, addr2, xpub1
* client2: addr1, xpub2
* client3: xpub4
*
* tx -> client1
* inputs
* addr1
* outputs
* xpub1
* addr2
*
* tx -> client2
* inputs
* addr1
* xpub2
* outputs
* xpub2
*
* @param {object} tx - transaction
*
* @note Synchronous processing done by this method
* may become a bottleneck in the future if under heavy load.
* Split in multiple async calls might make sense.
*/
notifyTransaction(tx) {
try {
// Topics extracted from the transaction
const topics = {}
// Client subscriptions: {[cid]: [topic1, topic2, ...]}
const clients = {}
// Extract topics from the inputs
for (let i in tx.inputs) {
let input = tx.inputs[i]
let topic = null
if (input.prev_out) {
// Topic is either xpub or addr. Should it be both?
if (input.prev_out.xpub) {
topic = input.prev_out.xpub.m
} else if (input.prev_out.addr) {
topic = input.prev_out.addr
}
}
if (this.subs[topic]) {
topics[topic] = true
// Add topic information to the input
input.topic = topic
}
}
// Extract topics from the outputs
for (let o in tx.out) {
let output = tx.out[o]
let topic = null
if (output.xpub) {
topic = output.xpub.m
} else if (output.addr) {
topic = output.addr
}
if (this.subs[topic]) {
topics[topic] = true
// Add topic information to the output
output.topic = topic
}
}
for (let topic in topics) {
for (let cid of this.subs[topic]) {
if (!clients[cid])
clients[cid] = []
if (clients[cid].indexOf(topic) == -1)
clients[cid].push(topic)
}
}
// Tailor a transaction for each client
for (let cid in clients) {
const ctx = _.cloneDeep(tx)
ctx.inputs = []
ctx.out = []
// List of topics relevant to this client
const clientTopics = clients[cid]
// Check for topic information on inputs & outputs (added above)
for (let input of tx.inputs) {
const topic = input.topic
if (topic && clientTopics.indexOf(topic) > -1) {
const cin = _.cloneDeep(input)
delete cin.topic
if (this.cachePubKeys.hasOwnProperty(topic))
cin.pubkey = this.cachePubKeys[topic]
ctx.inputs.push(cin)
}
}
for (let output of tx.out) {
const topic = output.topic
if (topic && clientTopics.indexOf(topic) > -1) {
const cout = _.cloneDeep(output)
delete cout.topic
if (this.cachePubKeys.hasOwnProperty(topic))
cout.pubkey = this.cachePubKeys[topic]
ctx.out.push(cout)
}
}
// Move on if the custom transaction has no inputs or outputs
if (ctx.inputs.length == 0 && ctx.out.length == 0)
continue
// Send custom transaction to client
const data = {
op: 'utx',
x: ctx
}
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent ctx ${ctx.hash} to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction() : Trouble sending ctx to client ${cid}`)
}
}
} catch(e) {
Logger.error(e, `NotificationsService.notifyTransaction()`)
}
}
/**
* Dispatch notification for an authentication error
* @param {string} err - error
* @param {integer} cid - connection id
*/
notifyAuthError(err, cid) {
const data = {
op: 'error',
msg: err
}
try {
this.conn[cid].sendUTF(JSON.stringify(data))
debug && Logger.error(`Sent authentication error to client ${cid}`)
} catch(e) {
Logger.error(e, `NotificationsService.notifyAuthError() : Trouble sending authentication error to client ${cid}`)
}
}
}
module.exports = NotificationsService

56
accounts/status-rest-api.js

@ -0,0 +1,56 @@
/*!
* accounts/status-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const Logger = require('../lib/logger')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const status = require('./status')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Status API endpoints
*/
class StatusRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
this.httpServer.app.get(
`/${keys.prefixes.status}/`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.getStatus.bind(this),
HttpServer.sendAuthError
)
}
/**
* Return information about the api
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getStatus(req, res) {
try {
const currStatus = await status.getCurrent()
HttpServer.sendRawData(res, JSON.stringify(currStatus, null, 2))
} catch(e) {
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /status`)
}
}
}
module.exports = StatusRestApi

52
accounts/status.js

@ -0,0 +1,52 @@
/*!
* accounts/status.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const util = require('../lib/util')
const db = require('../lib/db/mysql-db-wrapper')
/**
* Singleton providing information about the accounts endpoints
*/
class Status {
/**
* Constructor
*/
constructor() {
this.t0 = Date.now()
this.clients = 0
this.sessions = 0
this.maxConn = 0
}
/**
* Get current status
* @returns {Promise - object} status object
*/
async getCurrent() {
const uptime = util.timePeriod((Date.now() - this.t0) / 1000, false)
const memory = `${util.toMb(process.memoryUsage().rss)} MiB`
// Get highest block processed by the tracker
const highest = await db.getHighestBlock()
const dbMaxHeight = highest.blockHeight
return {
uptime: uptime,
memory: memory,
ws: {
clients: this.clients,
sessions: this.sessions,
max: this.maxConn
},
blocks: dbMaxHeight
}
}
}
module.exports = new Status()

384
accounts/support-rest-api.js

@ -0,0 +1,384 @@
/*!
* accounts/support-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const heapdump = require('heapdump')
const validator = require('validator')
const bodyParser = require('body-parser')
const errors = require('../lib/errors')
const Logger = require('../lib/logger')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const network = require('../lib/bitcoin/network')
const hdaService = require('../lib/bitcoin/hd-accounts-service')
const addrService = require('../lib/bitcoin/addresses-service')
const HdAccountInfo = require('../lib/wallet/hd-account-info')
const AddressInfo = require('../lib/wallet/address-info')
const apiHelper = require('./api-helper')
const keys = require('../keys')[network.key]
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Support API endpoints
*/
class SupportRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
this.httpServer.app.get(
`/${keys.prefixes.support}/address/:addr/info`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.validateAddress.bind(this),
this.getAddressInfo.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.support}/address/:addr/rescan`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.validateAddress.bind(this),
this.getAddressRescan.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.support}/xpub/:xpub/info`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.validateArgsGetXpubInfo.bind(this),
this.getXpubInfo.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.support}/xpub/:xpub/rescan`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.validateArgsGetXpubRescan.bind(this),
this.getXpubRescan.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.support}/dump/heap`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.getHeapDump.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.support}/pairing`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.getPairing.bind(this),
HttpServer.sendAuthError
)
}
/**
* Retrieve information for a given address
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getAddressInfo(req, res) {
try {
// Parse the entities passed as url params
const entities = apiHelper.parseEntities(req.params.addr).addrs
if (entities.length == 0)
return HttpServer.sendError(res, errors.address.INVALID)
const address = entities[0]
const info = new AddressInfo(address)
await info.loadInfoExtended()
await info.loadTransactions()
await info.loadUtxos()
const ret = this._formatAddressInfoResult(info)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/info`)
}
}
/**
* Format response to be returned
* for calls to getAddressInfo
* @param {AddressInfo} info
* @returns {string} return the json to be sent as a response
*/
_formatAddressInfoResult(info) {
const res = info.toPojoExtended()
/*res._endpoints = []
if (info.tracked) {
res._endpoints.push({
task: 'Rescan this address from remote sources',
url: `/${keys.prefixes.support}/address/${info.address}/rescan`
})
}
if (info.xpub != null) {
res._endpoints.push({
task: 'Get information about the HD account that owns this address',
url: `/${keys.prefixes.support}/xpub/${info.xpub}/info`
})
res._endpoints.push({
task: 'Rescan the whole HD account that owns this address',
url: `/${keys.prefixes.support}/xpub/${info.xpub}/rescan`
})
}*/
return JSON.stringify(res, null, 2)
}
/**
* Rescan the blockchain for a given address
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getAddressRescan(req, res) {
try {
// Parse the entities passed as url params
const entities = apiHelper.parseEntities(req.params.addr).addrs
if (entities.length == 0)
return HttpServer.sendError(res, errors.address.INVALID)
const address = entities[0]
const ret = {
status: 'Rescan complete',
/*_endpoints: [{
task: 'Get updated information about this address',
url: `/${keys.prefixes.support}/address/${address}/info`
}]*/
}
await addrService.rescan(address)
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/address/${req.params.addr}/rescan`)
}
}
/**
* Retrieve information for a given hd account
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getXpubInfo(req, res) {
try {
// Parse the entities passed as url params
const entities = apiHelper.parseEntities(req.params.xpub).xpubs
if (entities.length == 0)
return HttpServer.sendError(res, errors.xpub.INVALID)
const xpub = entities[0]
let info
try {
info = new HdAccountInfo(xpub)
await info.loadInfo()
const ret = this._formatXpubInfoResult(info)
HttpServer.sendRawData(res, ret)
} catch(e) {
if(e == errors.db.ERROR_NO_HD_ACCOUNT) {
const ret = this._formatXpubInfoResult(info)
HttpServer.sendRawData(res, ret)
} else {
HttpServer.sendError(res, errors.generic.GEN)
}
}
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/info`)
}
}
/**
* Format response to be returned
* for calls to getXpubInfo
* @param {HdAccountInfo} info
* @returns {string} return the json to be sent as a response
*/
_formatXpubInfoResult(info) {
const res = info.toPojoExtended()
/*res._endpoints = [{
task: 'Rescan the whole HD account from remote sources',
url: `/${keys.prefixes.support}/xpub/${info.xpub}/rescan`
}]*/
return JSON.stringify(res, null, 2)
}
/**
* Rescan the blockchain for a given address
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getXpubRescan(req, res) {
try {
// Parse the entities passed as url params
const entities = apiHelper.parseEntities(req.params.xpub).xpubs
if (entities.length == 0)
return HttpServer.sendError(res, errors.xpub.INVALID)
const xpub = entities[0]
const ret = {
status: 'Rescan complete',
/*_endpoints: [{
task: 'Get updated information about this HD account',
url: `/${keys.prefixes.support}/xpub/${xpub}/info`
}]*/
}
const gapLimit = req.query.gap != null ? parseInt(req.query.gap) : 0
const startIndex = req.query.startidx != null ? parseInt(req.query.startidx) : 0
try {
await hdaService.rescan(xpub, gapLimit, startIndex)
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} catch(e) {
if (e == errors.db.ERROR_NO_HD_ACCOUNT) {
ret.status = 'Error: Not tracking xpub'
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} else if (e == errors.xpub.OVERLAP) {
ret.status = 'Error: Rescan in progress'
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} else {
ret.status = 'Rescan Error'
Logger.error(e, 'SupportRestApi.getXpubRescan() : Support rescan error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
}
}
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed GET /support/xpub/${req.params.xpub}/rescan`)
}
}
/**
* Get a dump of the heap
* and store it on the filesystem
*/
async getHeapDump(req, res) {
try {
heapdump.writeSnapshot(function(err, filename) {
Logger.info(`Dump written to ${filename}`)
})
HttpServer.sendOk(res)
} catch(e) {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getHeapDump() : Support head dump error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /dump/heap`)
}
}
/**
* Get pairing info
*/
async getPairing(req, res) {
try {
const ret = {
'pairing': {
'type': 'dojo.api',
'version': keys.dojoVersion,
'apikey': keys.auth.strategies.localApiKey.apiKeys[0]
}
}
HttpServer.sendRawData(res, JSON.stringify(ret, null, 2))
} catch(e) {
const ret = {
status: 'error'
}
Logger.error(e, 'SupportRestApi.getPairing() : Support pairing error')
HttpServer.sendError(res, JSON.stringify(ret, null, 2))
} finally {
debugApi && Logger.info(`Completed GET /pairing`)
}
}
/**
* Validate arguments related to GET xpub info requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetXpubInfo(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
if (!isValidXpub) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateArgsGetXpubInfo() : Invalid xpub ${req.params.xpub}`)
} else {
next()
}
}
/**
* Validate arguments related to GET xpub rescan requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetXpubRescan(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
const isValidGap = !req.query.gap || validator.isInt(req.query.gap)
if (!(isValidXpub && isValidGap)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, 'SupportRestApi.validateArgsGetXpubRescan() : Invalid arguments')
} else {
next()
}
}
/**
* Validate arguments related to addresses requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateAddress(req, res, next) {
const isValidAddress = validator.isAlphanumeric(req.params.addr)
if (!isValidAddress) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(null, `SupportRestApi.validateAddress() : Invalid address ${req.params.addr}`)
} else {
next()
}
}
}
module.exports = SupportRestApi

156
accounts/transactions-rest-api.js

@ -0,0 +1,156 @@
/*!
* accounts/transactions-fees-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const validator = require('validator')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const rpcTxns = require('../lib/bitcoind-rpc/transactions')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const walletService = require('../lib/wallet/wallet-service')
const network = require('../lib/bitcoin/network')
const apiHelper = require('./api-helper')
const keys = require('../keys')[network.key]
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Transactions API endpoints
*/
class TransactionsRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
this.httpServer.app.get(
'/tx/:txid',
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsGetTransaction.bind(this),
this.getTransaction.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
'/txs',
authMgr.checkAuthentication.bind(authMgr),
apiHelper.validateEntitiesParams.bind(apiHelper),
this.validateArgsGetTransactions.bind(this),
this.getTransactions.bind(this),
HttpServer.sendAuthError
)
}
/**
* Retrieve the transaction for a given tiid
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getTransaction(req, res) {
try {
const tx = await rpcTxns.getTransaction(req.params.txid, req.query.fees)
const ret = JSON.stringify(tx, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
const strParams = `${req.query.fees ? req.query.fees : ''}`
debugApi && Logger.info(`Completed GET /tx/${req.params.txid} ${strParams}`)
}
}
/**
* Retrieve a page of transactions related to a wallet
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getTransactions(req, res) {
try {
// Check request params
if (!apiHelper.checkEntitiesParams(req.query))
return HttpServer.sendError(res, errors.multiaddr.NOACT)
// Parse params
const active = apiHelper.parseEntities(req.query.active)
const page = req.query.page != null ? parseInt(req.query.page) : 0
const count = req.query.count != null ? parseInt(req.query.count) : keys.multiaddr.transactions
const result = await walletService.getWalletTransactions(active, page, count)
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
const strParams =
`${req.query.active} \
${req.query.page ? req.query.page : ''} \
${req.query.count ? req.query.count : ''}`
debugApi && Logger.info(`Completed GET /txs ${strParams}`)
}
}
/**
* Validate arguments of /tx requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetTransaction(req, res, next) {
const isValidTxid = validator.isHash(req.params.txid, 'sha256')
const isValidFees =
!req.query.fees
|| validator.isAlphanumeric(req.query.fees)
if (!(isValidTxid && isValidFees)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'HeadersRestApi.validateArgsGetTransaction() : Invalid arguments'
)
Logger.error(req.query, '')
} else {
next()
}
}
/**
* Validate arguments of /txs requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetTransactions(req, res, next) {
const isValidPage =
!req.query.page
|| validator.isInt(req.query.page)
const isValidCount =
!req.query.count
|| validator.isInt(req.query.count)
if (!(isValidPage && isValidCount)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.query,
'HeadersRestApi.validateArgsGetTransactions() : Invalid arguments'
)
} else {
next()
}
}
}
module.exports = TransactionsRestApi

136
accounts/unspent-rest-api.js

@ -0,0 +1,136 @@
/*!
* accounts/unspent-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bodyParser = require('body-parser')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const walletService = require('../lib/wallet/wallet-service')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const apiHelper = require('./api-helper')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
/**
* Unspent API endpoints
*/
class UnspentRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
this.httpServer.app.get(
'/unspent',
authMgr.checkAuthentication.bind(authMgr),
apiHelper.validateEntitiesParams.bind(apiHelper),
this.getUnspent.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/unspent',
urlencodedParser,
authMgr.checkAuthentication.bind(authMgr),
apiHelper.validateEntitiesParams.bind(apiHelper),
this.postUnspent.bind(this),
HttpServer.sendAuthError
)
}
/**
* Handle unspent GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getUnspent(req, res) {
// Check request params
if (!apiHelper.checkEntitiesParams(req.query))
return HttpServer.sendError(res, errors.multiaddr.NOACT)
// Parse params
const entities = apiHelper.parseEntitiesParams(req.query)
try {
const result = await walletService.getWalletUtxos(
entities.active,
entities.legacy,
entities.bip49,
entities.bip84,
entities.pubkey
)
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
if (debugApi) {
const strParams =
`${req.query.active ? req.query.active : ''} \
${req.query.new ? req.query.new : ''} \
${req.query.pubkey ? req.query.pubkey : ''} \
${req.query.bip49 ? req.query.bip49 : ''} \
${req.query.bip84 ? req.query.bip84 : ''}`
Logger.info(`Completed GET /unspent ${strParams}`)
}
}
}
/**
* Handle unspent POST request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async postUnspent(req, res) {
// Check request params
if (!apiHelper.checkEntitiesParams(req.body))
return HttpServer.sendError(res, errors.multiaddr.NOACT)
// Parse params
const entities = apiHelper.parseEntitiesParams(req.body)
try {
const result = await walletService.getWalletUtxos(
entities.active,
entities.legacy,
entities.bip49,
entities.bip84,
entities.pubkey
)
HttpServer.sendOkDataOnly(res, result)
} catch(e) {
HttpServer.sendError(res, e)
} finally {
if (debugApi) {
const strParams =
`${req.body.active ? req.body.active : ''} \
${req.body.new ? req.body.new : ''} \
${req.body.pubkey ? req.body.pubkey : ''} \
${req.body.bip49 ? req.body.bip49 : ''} \
${req.body.bip84 ? req.body.bip84 : ''}`
Logger.info(`Completed POST /unspent ${strParams}`)
}
}
}
}
module.exports = UnspentRestApi

450
accounts/xpub-rest-api.js

@ -0,0 +1,450 @@
/*!
* accounts/xpub-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const validator = require('validator')
const bodyParser = require('body-parser')
const errors = require('../lib/errors')
const network = require('../lib/bitcoin/network')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const hdaHelper = require('../lib/bitcoin/hd-accounts-helper')
const hdaService = require('../lib/bitcoin/hd-accounts-service')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const HdAccountInfo = require('../lib/wallet/hd-account-info')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const debugApi = !!(process.argv.indexOf('api-debug') > -1)
const gap = require('../keys/')[network.key].gap
/**
* XPub API endpoints
*/
class XPubRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Initialize the rpc client
this.rpcClient = new RpcClient()
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
this.httpServer.app.post(
'/xpub/',
urlencodedParser,
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsPostXpub.bind(this),
this.postXpub.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
'/xpub/:xpub',
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsGetXpub.bind(this),
this.getXpub.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/xpub/:xpub/lock',
urlencodedParser,
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsPostLockXpub.bind(this),
this.postLockXpub.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.delete(
'/xpub/:xpub',
urlencodedParser,
authMgr.checkAuthentication.bind(authMgr),
this.validateArgsDeleteXpub.bind(this),
this.deleteXpub.bind(this),
HttpServer.sendAuthError
)
}
/**
* Handle xPub POST request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async postXpub(req, res) {
try {
let xpub
// Check request arguments
if (!req.body)
return HttpServer.sendError(res, errors.body.NODATA)
if (!req.body.xpub)
return HttpServer.sendError(res, errors.body.NOXPUB)
if (!req.body.type)
return HttpServer.sendError(res, errors.body.NOTYPE)
// Extracts arguments
const argXpub = req.body.xpub
const argSegwit = req.body.segwit
const argAction = req.body.type
const argForceOverride = req.body.force
// Translate xpub if needed
try {
const ret = this.xlatHdAccount(argXpub, true)
xpub = ret.xpub
} catch(e) {
return HttpServer.sendError(res, e)
}
// Define the derivation scheme
let scheme = hdaHelper.BIP44
if (argSegwit) {
const segwit = argSegwit.toLowerCase()
if (segwit == 'bip49')
scheme = hdaHelper.BIP49
else if (segwit == 'bip84')
scheme = hdaHelper.BIP84
else
return HttpServer.sendError(res, errors.xpub.SEGWIT)
}
// Define default forceOverride if needed
const forceOverride = argForceOverride ? argForceOverride : false
// Process action
if (argAction == 'new') {
// New hd account
try {
await hdaService.createHdAccount(xpub, scheme)
HttpServer.sendOk(res)
} catch(e) {
HttpServer.sendError(res, e)
}
} else if (argAction == 'restore') {
// Restore hd account
try {
await hdaService.restoreHdAccount(xpub, scheme, forceOverride)
HttpServer.sendOk(res)
} catch(e) {
HttpServer.sendError(res, e)
}
} else {
// Unknown action
return HttpServer.sendError(res, errors.body.INVTYPE)
}
} catch(e) {
return HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed POST /xpub ${req.body.xpub}`)
}
}
/**
* Handle xPub GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getXpub(req, res) {
try {
let xpub
// Extracts arguments
const argXpub = req.params.xpub
// Translate xpub if needed
try {
const ret = this.xlatHdAccount(argXpub)
xpub = ret.xpub
} catch(e) {
return HttpServer.sendError(res, e)
}
const hdaInfo = new HdAccountInfo(xpub)
const info = await hdaInfo.loadInfo()
if (!info)
return Promise.reject()
const ret = {
balance: hdaInfo.finalBalance,
unused: {
external: hdaInfo.accountIndex,
internal: hdaInfo.changeIndex,
},
derivation: hdaInfo.derivation,
created: hdaInfo.created
}
HttpServer.sendOkData(res, ret)
} catch(e) {
Logger.error(e, 'XpubRestApi.getXpub()')
HttpServer.sendError(res, e)
} finally {
debugApi && Logger.info(`Completed GET /xpub/${req.params.xpub}`)
}
}
/**
* Handle Lock XPub POST request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async postLockXpub(req, res) {
try {
let xpub, scheme
// Check request arguments
if (!req.body)
return HttpServer.sendError(res, errors.body.NODATA)
if (!req.body.address)
return HttpServer.sendError(res, errors.body.NOADDR)
if (!req.body.signature)
return HttpServer.sendError(res, errors.body.NOSIG)
if (!req.body.message)
return HttpServer.sendError(res, errors.body.NOMSG)
if (!(req.body.message == 'lock' || req.body.message == 'unlock'))
return HttpServer.sendError(res, errors.sig.INVMSG)
// Extract arguments
const argXpub = req.params.xpub
const argAddr = req.body.address
const argSig = req.body.signature
const argMsg = req.body.message
// Translate xpub if needed
try {
const ret = this.xlatHdAccount(argXpub)
xpub = ret.xpub
scheme = ret.scheme
} catch(e) {
return HttpServer.sendError(res, e)
}
try {
// Check the signature and process the request
await hdaService.verifyXpubSignature(xpub, argAddr, argSig, argMsg, scheme)
const lock = (argMsg == 'unlock') ? false : true
const ret = await hdaService.lockHdAccount(xpub, lock)
HttpServer.sendOkData(res, {derivation: ret})
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
}
} finally {
debugApi && Logger.info(`Completed POST /xpub/${req.params.xpub}/lock`)
}
}
/**
* Handle XPub DELETE request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async deleteXpub(req, res) {
try {
let xpub, scheme
// Check request arguments
if (!req.body)
return HttpServer.sendError(res, errors.body.NODATA)
if (!req.body.address)
return HttpServer.sendError(res, errors.body.NOADDR)
if (!req.body.signature)
return HttpServer.sendError(res, errors.body.NOSIG)
// Extract arguments
const argXpub = req.params.xpub
const argAddr = req.body.address
const argSig = req.body.signature
// Translate xpub if needed
try {
const ret = this.xlatHdAccount(argXpub)
xpub = ret.xpub
scheme = ret.scheme
} catch(e) {
return HttpServer.sendError(res, e)
}
try {
// Check the signature and process the request
await hdaService.verifyXpubSignature(xpub, argAddr, argSig, argXpub, scheme)
await hdaService.deleteHdAccount(xpub)
HttpServer.sendOk(res)
} catch(e) {
HttpServer.sendError(res, e)
}
} catch(e) {
HttpServer.sendError(res, errors.generic.GEN)
} finally {
debugApi && Logger.info(`Completed DELETE /xpub/${req.params.xpub}`)
}
}
/**
* Translate a ypub/zpub into a xpub
* @param {string} origXpub - original hd account
* @param {boolean} trace - flag indicating if we shoudl trace the conversion in our logs
* @returns {object} returns an object {xpub: <translated_xpub>, scheme: <derivation_scheme>}
* or raises an exception
*/
xlatHdAccount(origXpub, trace) {
try {
// Translate xpub if needed
let xpub = origXpub
let scheme = hdaHelper.BIP44
const isYpub = hdaHelper.isYpub(origXpub)
const isZpub = hdaHelper.isZpub(origXpub)
if (isYpub || isZpub) {
xpub = hdaHelper.xlatXPUB(origXpub)
scheme = isYpub ? hdaHelper.BIP49 : hdaHelper.BIP84
if (trace) {
Logger.info('Converted: ' + origXpub)
Logger.info('Resulting xpub: ' + xpub)
}
}
if (!hdaHelper.isValid(xpub))
throw errors.xpub.INVALID
return {
xpub: xpub,
scheme: scheme
}
} catch(e) {
const err = (e == errors.xpub.PRIVKEY) ? e : errors.xpub.INVALID
throw err
}
}
/**
* Validate arguments of postXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsPostXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.body.xpub)
const isValidSegwit =
!req.body.segwit
|| validator.isAlphanumeric(req.body.segwit)
const isValidType =
!req.body.type
|| validator.isAlphanumeric(req.body.type)
const isValidForce =
!req.body.force
|| validator.isAlphanumeric(req.body.force)
if (!(isValidXpub && isValidSegwit && isValidType && isValidForce)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.body,
'XpubRestApi.validateArgsPostXpub() : Invalid arguments'
)
} else {
next()
}
}
/**
* Validate arguments of getXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsGetXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
if (!isValidXpub) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params.xpub,
'XpubRestApi.validateArgsGetXpub() : Invalid arguments'
)
} else {
next()
}
}
/**
* Validate arguments of postLockXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsPostLockXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
const isValidAddr = validator.isAlphanumeric(req.body.address)
const isValidSig = validator.isBase64(req.body.signature)
const isValidMsg = validator.isAlphanumeric(req.body.message)
if (!(isValidXpub && isValidAddr && isValidSig && isValidMsg)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsPostLockXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {
next()
}
}
/**
* Validate arguments of deleteXpub requests
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next express middleware
*/
validateArgsDeleteXpub(req, res, next) {
const isValidXpub = validator.isAlphanumeric(req.params.xpub)
const isValidAddr = validator.isAlphanumeric(req.body.address)
const isValidSig = validator.isBase64(req.body.signature)
if (!(isValidXpub && isValidAddr && isValidSig)) {
HttpServer.sendError(res, errors.body.INVDATA)
Logger.error(
req.params,
'XpubRestApi.validateArgsDeleteXpub() : Invalid arguments'
)
Logger.error(req.body, '')
} else {
next()
}
}
}
module.exports = XPubRestApi

212
db-scripts/1_db.sql

@ -0,0 +1,212 @@
# Database tables
# Copyright © 2019 – Katana Cryptographic Ltd. All Rights Reserved.
# Naming conventions
# 1. Table names are lowercase plural
# 2. Join table names are snake_case plural
# 3. Column names have a table prefix
# 4. Foreign key names match primary key of foreign table
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `addresses`
--
DROP TABLE IF EXISTS `addresses`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `addresses` (
`addrID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`addrAddress` varchar(74) DEFAULT NULL,
PRIMARY KEY (`addrID`),
UNIQUE KEY `addrAddress` (`addrAddress`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `banned_addresses`
--
DROP TABLE IF EXISTS `banned_addresses`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `banned_addresses` (
`bannedAddressId` int(11) NOT NULL AUTO_INCREMENT,
`addrAddress` varchar(35) NOT NULL,
PRIMARY KEY (`bannedAddressId`),
UNIQUE KEY `banned_addresses_addresses` (`addrAddress`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `blocks`
--
DROP TABLE IF EXISTS `blocks`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `blocks` (
`blockID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`blockHash` char(64) NOT NULL DEFAULT '',
`blockParent` int(10) unsigned DEFAULT NULL,
`blockHeight` int(10) unsigned NOT NULL DEFAULT '0',
`blockTime` int(10) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`blockID`),
UNIQUE KEY `blockHash` (`blockHash`),
KEY `blockParent` (`blockParent`),
KEY `blockHeight` (`blockHeight`),
CONSTRAINT `blocks_ibfk_1` FOREIGN KEY (`blockParent`) REFERENCES `blocks` (`blockID`) ON DELETE SET NULL ON UPDATE NO ACTION
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `hd`
--
DROP TABLE IF EXISTS `hd`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `hd` (
`hdID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`hdXpub` char(112) DEFAULT NULL,
`hdCreated` int(10) unsigned NOT NULL DEFAULT '0',
`hdType` smallint(5) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`hdID`),
UNIQUE KEY `hdXpub` (`hdXpub`),
KEY `hdCreated` (`hdCreated`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `hd_addresses`
--
DROP TABLE IF EXISTS `hd_addresses`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `hd_addresses` (
`hdAddrID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`hdID` int(10) unsigned NOT NULL DEFAULT '0',
`addrID` int(10) unsigned NOT NULL DEFAULT '0',
`hdAddrChain` smallint(5) unsigned NOT NULL DEFAULT '0',
`hdAddrIndex` int(10) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`hdAddrID`),
UNIQUE KEY `hdID_2` (`hdID`,`addrID`),
KEY `hdID` (`hdID`),
KEY `addrID` (`addrID`),
CONSTRAINT `hd_addresses_ibfk_1` FOREIGN KEY (`hdID`) REFERENCES `hd` (`hdID`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `hd_addresses_ibfk_2` FOREIGN KEY (`addrID`) REFERENCES `addresses` (`addrID`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `inputs`
--
DROP TABLE IF EXISTS `inputs`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `inputs` (
`inID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`outID` int(10) unsigned NOT NULL DEFAULT '0',
`txnID` int(10) unsigned NOT NULL DEFAULT '0',
`inIndex` int(10) unsigned NOT NULL DEFAULT '0',
`inSequence` int(10) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`inID`),
UNIQUE KEY `txnID_2` (`txnID`,`inIndex`),
KEY `outID` (`outID`),
KEY `txnID` (`txnID`),
CONSTRAINT `inputs_ibfk_1` FOREIGN KEY (`txnID`) REFERENCES `transactions` (`txnID`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `inputs_ibfk_2` FOREIGN KEY (`outID`) REFERENCES `outputs` (`outID`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `outputs`
--
DROP TABLE IF EXISTS `outputs`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `outputs` (
`outID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`txnID` int(10) unsigned NOT NULL DEFAULT '0',
`addrID` int(10) unsigned NOT NULL DEFAULT '0',
`outIndex` int(10) unsigned NOT NULL DEFAULT '0',
`outAmount` bigint(20) unsigned NOT NULL DEFAULT '0',
`outScript` varchar(20000) NOT NULL DEFAULT '',
PRIMARY KEY (`outID`),
UNIQUE KEY `txnID_2` (`txnID`,`addrID`,`outIndex`),
KEY `txnID` (`txnID`),
KEY `addrID` (`addrID`),
CONSTRAINT `outputs_ibfk_1` FOREIGN KEY (`txnID`) REFERENCES `transactions` (`txnID`) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT `outputs_ibfk_2` FOREIGN KEY (`addrID`) REFERENCES `addresses` (`addrID`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `transactions`
--
DROP TABLE IF EXISTS `transactions`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `transactions` (
`txnID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`txnTxid` char(64) DEFAULT NULL,
`txnCreated` int(10) unsigned NOT NULL DEFAULT '0',
`txnVersion` int(10) unsigned NOT NULL DEFAULT '0',
`txnLocktime` int(10) unsigned NOT NULL DEFAULT '0',
`blockID` int(10) unsigned DEFAULT NULL,
PRIMARY KEY (`txnID`),
UNIQUE KEY `txnTxid` (`txnTxid`),
KEY `txnCreated` (`txnCreated`),
KEY `blockID` (`blockID`),
CONSTRAINT `transactions_ibfk_1` FOREIGN KEY (`blockID`) REFERENCES `blocks` (`blockID`) ON DELETE SET NULL ON UPDATE NO ACTION
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `scheduled_transactions`
--
DROP TABLE IF EXISTS `scheduled_transactions`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `scheduled_transactions` (
`schID` int(10) unsigned NOT NULL AUTO_INCREMENT,
`schTxid` char(64) NOT NULL DEFAULT '',
`schCreated` int(10) unsigned NOT NULL DEFAULT '0',
`schRaw` varchar(50000) NOT NULL DEFAULT '',
`schParentID` int(10) unsigned DEFAULT NULL,
`schParentTxid` char(64) DEFAULT '',
`schDelay` int(10) unsigned NOT NULL DEFAULT '0',
`schTrigger` int(10) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`schID`),
UNIQUE KEY `schTxid` (`schTxid`),
KEY `schParentID` (`schParentID`),
CONSTRAINT `scheduled_transactions_ibfk_1` FOREIGN KEY (`schParentID`) REFERENCES `scheduled_transactions` (`schID`) ON DELETE SET NULL ON UPDATE NO ACTION
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;

37
doc/DELETE_xpub.md

@ -0,0 +1,37 @@
# Delete HD Account
Remove an HD account from the server. All addresses and transactions associated with the HD account will be removed. Transactions that are also associated with another `xpub` will remain.
Note: this endpoint uses the HTTP `DELETE` verb.
```
DELETE /xpub/:xpub
```
## Parameters
* **address** - `string` - The first address of the internal chain for this `xpub`, derivation path `M/1/0`. Use compressed P2PHK address regardless of HD derivation scheme.
* **signature** - `string` - The base64-encoded signature of the double SHA256 hash of `[varuint length of xpub string, xpub string]`. Signature scheme follows [bitcoinjs-message](https://github.com/bitcoinjs/bitcoinjs-message/blob/master/index.js) with a message prefix matching the [coin type](https://github.com/bitcoinjs/bitcoinjs-lib/blob/v3.1.1/src/networks.js). Use the ECPair associated with the `M/1/0` address to sign.
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Example
```
DELETE /xpub/xpub0123456789?address=1address&signature=Base64X==
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok"
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

196
doc/DOCKER_setup.md

@ -0,0 +1,196 @@
# Installation of Dojo with Docker and Docker Compose
MyDojo is a set of Docker containers providing a full Samourai backend composed of:
* a bitcoin full node accessible as an ephemeral Tor hidden service,
* the backend database,
* the backend modules with an API accessible as a static Tor hidden service,
* a maintenance tool accessible through a Tor web browser.
## Architecture ##
------------------- ------------------- --------------------
| Samourai Wallet | | Sentinel | | Bitcoin full nodes |
------------------- ------------------- --------------------
|_______________________|_______________________|
|
------------
Tor network
------------
|
Host machine | (Tor - port 80)
______________________________ | _____________________________
| | |
| ------------------- |
| | Tor Container | |
| ------------------- |
| | | |
| ------------------- | |
| | Nginx Container | | dmznet |
| ------------------- | |
|- - - - - - - - - - - | - - - - - - - | - - - - - - - - - - - |
| -------------------- -------------------- |
| | Nodejs Container | ------ | Bitcoind Container | |
| -------------------- -------------------- |
| | |
| ------------------- |
| | MySQL Container | dojonet |
| ------------------- |
|______________________________________________________________|
## Requirements ##
* A dedicated computer (host machine) connected 24/7 to internet
* OS: Linux is recommended
* Disk: 500GB (minimal) / 1TB (recommended) - SSD is recommended
* RAM: 4GB (minimal)
* Docker and Docker Compose installed on the host machine (be sure to run a recent version supporting v3.2 of docker-compose files, i.e. Docker Engine v17.04.0+)
* Check that the clock of your computer is properly set (required for Tor)
* Tor Browser installed on the host machine (or on another machine if your host is a headless server)
## Setup ##
* Install [Docker and Docker Compose](https://docs.docker.com/compose/install/) on the host machine and check that your installation is working.
* Install [Tor Browser](https://www.torproject.org/projects/torbrowser.html.en) on the host machine.
* Download the most recent version of Dojo from [Github](https://github.com/Samourai-Wallet/samourai-dojo/archive/master.zip)
* Uncompress the archive on the host machine in a temporary directory of your choice (named /tmp_dir in this doc)
* Create a directory for Dojo (named /dojo_dir in this doc)
* Copy the content of the "/tmp_dir/samourai-dojo-master" directory into the "/dojo_dir" directory
* Customize the configuration of your Dojo
* Go to the "/dojo_dir/docker/my_dojo/conf" directory
* Edit docker-bitcoin.conf and provide a new value for the following parameters:
* BITCOIND_RPC_USER = login protecting the access to the RPC API of your full node,
* BITCOIND_RPC_PASSWORD = password protecting the access to the RPC API of your full node.
* If your machine has a lot of RAM, it's recommended that you increase the value of BITCOIND_DB_CACHE for a faster Initial Block Download.
* Edit docker-mysql.conf and provide a new value for the following parameters:
* MYSQL_ROOT_PASSWORD = password protecting the root account of MySQL,
* MYSQL_USER = login of the account used to access the database of your Dojo,
* MYSQL_PASSWORD = password of the account used to access the database of your Dojo.
* Edit docker-node.conf and provide a new value for the following parameters:
* NODE_API_KEY = API key which will be required from your Samourai Wallet / Sentinel for its interactions with the API of your Dojo,
* NODE_ADMIN_KEY = API key which will be required from the maintenance tool for accessing a set of advanced features provided by the API of your Dojo,
* NODE_JWT_SECRET = secret used by your Dojo for the initialization of a cryptographic key signing Json Web Tokens.
These parameters will protect the access to your Dojo. Be sure to provide alphanumeric values with enough entropy.
* Open the docker quickstart terminal or a terminal console and go to the "/dojo_dir/docker/my_dojo" directory. This directory contains a script named dojo.sh which will be your entrypoint for all operations related to the management of your Dojo.
* Launch the installation of your Dojo with
```
./dojo.sh install
```
Docker and Docker Compose are going to build the images and containers of your Dojo. This operation will take a few minutes (download and setup of all required software components). After completion, your Dojo will be launched and will begin the initialization of the full node (Bitcoin Initial Block Download and syncing of the database). This step will take several hours/days according to the specs of your machine. Be patient. Use CTRL+C to stop the display of the full logs.
* Monitor the progress made for the initialization of the database with this command displaying the logs of the tracker
```
./dojo.sh logs tracker
```
Exit the logs with CTRL+C when the syncing of the database has completed.
* Retrieve the Tor onion addresses (v2 and v3) of the API of your Dojo
```
./dojo.sh onion
```
* Restrict the access to your host machine as much as possible by configuring its firewall.
## Dojo shell script ##
dojo.sh is a multifeature tool allowing to interact with your Dojo.
```
Usage: ./dojo.sh command [module] [options]
Available commands:
help Display the help message.
bitcoin-cli Launch a bitcoin-cli console for interacting with bitcoind RPC API.
install Install your Dojo.
logs [module] [options] Display the logs of your Dojo. Use CTRL+C to stop the logs.
Available modules:
dojo.sh logs : display the logs of all containers
dojo.sh logs bitcoind : display the logs of bitcoind
dojo.sh logs db : display the logs of the MySQL database
dojo.sh logs tor : display the logs of tor
dojo.sh logs api : display the logs of the REST API (nodejs)
dojo.sh logs tracker : display the logs of the Tracker (nodejs)
dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)
dojo.sh logs pushtx-orchest : display the logs of the Orchestrator (nodejs)
Available options (for api, tracker, pushtx and pushtx-orchest modules):
-d [VALUE] : select the type of log to be displayed.
VALUE can be output (default) or error.
-n [VALUE] : display the last VALUE lines
onion Display the Tor onion address allowing your wallet to access your Dojo.
restart Restart your Dojo.
start Start your Dojo.
stop Stop your Dojo.
uninstall Delete your Dojo. Be careful! This command will also remove all data.
```
## Dojo maintenance tool ##
A maintenance tool is accessible through your Tor browser at the url: <v2_onion_address>/admin
The maintenance tool requires that you allow javascript for the site.
Sign in with the value entered for NODE_ADMIN_KEY.
## Pairing ##
Once the database has finished syncing, you can pair your Samourai Wallet with your Dojo in 2 steps:
* Open the maintenance tool in a Tor browser and sign in with your admin key.
* Get your smartphone and launch the Samourai Wallet app. Scan the QRCode displayed in the "Pairing" tab of the maintenance tool.
## Network connections ##
The API of your Dojo is accessed as a Tor hidden service (static onion address).
If OXT is selected as the default source for imports, OXT clearnet API is accessed through the Tor local proxy.
The maintenance tool is accessed as a Tor hidden service (static onion address).
The Bitcoin node only allows incoming connections from Tor (dynamic onion address).
The Bitcoin node attempts outgoing connections to both Tor and clearrnet nodes (through the Tor local proxy).

39
doc/GET_fees.md

@ -0,0 +1,39 @@
# Get Fees
Returns `bitcoind`'s estimated fee rates for inclusion in blocks at various delays. Fee rates are in Satoshi/byte.
```
GET /fees
```
## Parameters
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /fees
```
#### Success
Status code 200 with JSON response:
```json
{
"2": 181,
"4": 150,
"6": 150,
"12": 111,
"24": 62
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

47
doc/GET_header.md

@ -0,0 +1,47 @@
# Get Block Header
Request the header for a given block.
```
GET /header/:hash
```
## Parameters
* **hash** - `string` - The block hash
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /header/000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
```
#### Success
Status code 200 with JSON response:
```json
{
"hash": "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f",
"confirmations": 475000,
"height": 0,
"version": 1,
"versionHex": "00000001",
"merkleroot": "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b",
"time": 1231006505,
"mediantime": 1231006505,
"nonce": 2083236893,
"bits": "1d00ffff",
"difficulty": 1,
"chainwork": "0000000000000000000000000000000000000000000000000000000100010001",
"nextblockhash": "00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048"
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

117
doc/GET_multiaddr.md

@ -0,0 +1,117 @@
# Get Multiaddr
Request details about a collection of HD accounts and/or loose addresses and/or public keys. If accounts do not exist, they will be created with a relayed call to the [POST /xpub](./POST_xpub.md) mechanics if new or will be imported from external data sources. Instruct the server that entities are new with `?new=xpub1|addr2|addr3` in the query parameters, and the server will skip importing for those entities. SegWit support via BIP49 is activated for new xpubs with `?bip49=xpub3|xpub4`. SegWit support via BIP84 is activated for new xpubs with `?bip84=xpub3|xpub4`. Pass xpubs to `?bip49` or `?bip84` only for newly-created accounts. Support of BIP47 (with addresses derived in 3 formats (P2PKH, P2WPKH/P2SH, P2WPKH Bech32)) is activated for new pubkeys with `?pubkey=pubkey1|pubkey2`.
Note that loose addresses that are also part of one of the HD accounts requested will be ignored. Their balances and transactions are listed as part of the HD account result.
The `POST` version of multiaddr is identical, except the `active` and `new` parameters are in the POST body.
```
GET /multiaddr?active=...[&new=...][&bip49=...][&bip84=...][&pubkey=...]
```
## Parameters
* **active** - `string` - A pipe-separated list of extended public keys and/or loose addresses and/or pubkeys (`xpub1|address1|address2|pubkey1|...`)
* **new** - `string` - A pipe-separated list of extended public keys and/or loose addresses that need no import from external services
* **bip49** - `string` - A pipe-separated list of new extended public keys to be derived via [BIP49](https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki)
* **bip84** - `string` - A pipe-separated list of new extended public keys to be derived via [BIP84](https://github.com/bitcoin/bips/blob/master/bip-0084.mediawiki)
* **pubkey** - `string` - A pipe-separated list of public keys to be derived as P2PKH, P2WPKH/P2SH, P2WPKH Bech32 addresses.
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /multiaddr?active=xpub0123456789&new=address2|address3&pubkey=pubkey4
GET /multiaddr?active=xpub0123456789|address1|address2
GET /multiaddr?bip49=xpub0123456789
GET /multiaddr?bip84=xpub0123456789
GET /multiaddr?pubkey=0312345678901
```
#### Success
Status code 200 with JSON response:
```json
{
"wallet": {
"final_balance": 100000000
},
"info": {
"latest_block": {
"height": 100000,
"hash": "abcdef",
"time": 1000000000
}
},
"addresses": [
{
"address": "xpubABCDEF -or- 1xAddress",
"pubkey": "04Pubkey -or- inexistant attribute"
"final_balance": 100000000,
"account_index": 0,
"change_index": 0,
"n_tx": 0
}
],
"txs": [
{
"block_height": 100000,
"hash": "abcdef",
"version": 1,
"locktime": 0,
"result": -10000,
"balance": 90000,
"time": 1400000000,
"inputs": [
{
"vin": 1,
"prev_out": {
"txid": "abcdef",
"vout": 2,
"value": 20000,
"xpub": {
"m": "xpubABCDEF",
"path": "M/0/3"
},
"addr": "1xAddress",
"pubkey": "04Pubkey"
},
"sequence": 4294967295
}
],
"out": [
{
"n": 2,
"value": 10000,
"addr": "1xAddress",
"pubkey": "03Pubkey"
"xpub": {
"m": "xpubABCDEF",
"path": "M/1/5"
}
}
]
}
]
}
```
**Notes**
* The transaction `inputs` and `out` arrays are for known addresses only and do not reflect the full input and output list of the transaction on the blockchain
* `result.addresses[i].n_tx` used by BIP47 logic to detemine unused index
* `result.txs[i].block_height` should not be present for unconfirmed transactions
* `result.txs[i].result` is the change in value for the "wallet" as defined by all entries on the `active` query parameter
* `result.txs[i].inputs[j].prev_out.addr` should be present for BIP47-related addresses but may be `null` if the previous output address is unknown
* `result.txs[i].out[j].addr` should be present for BIP47-related addresses
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```
## Notes
Multiaddr response is consumed by the wallet in the [APIFactory](https://github.com/Samourai-Wallet/samourai-wallet-android/blob/master/app/src/main/java/com/samourai/wallet/api/APIFactory.java)

131
doc/GET_tx.md

@ -0,0 +1,131 @@
# Get Transaction
Request details about a single Bitcoin transaction. Pass `?fees=1` to scan the previous outputs and compute the fees paid in this transaction.
```
GET /tx/:txid
GET /tx/:txid?fees=1
```
## Parameters
* **txid** - `string` - The transaction ID
* **fees** - `string` - (optional) Scan previous outputs to compute fees
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /tx/abcdef
GET /tx/abcdef?fees=1
```
#### Success
Status code 200 with JSON response:
```json
{
"txid": "abcdef",
"size": 250,
"vsize": 125,
"version": 1,
"locktime": 0,
"block": {
"height": 100000,
"hash": "abcdef",
"time": 1400000000
},
"inputs": [
{
"n": 0,
"outpoint": {
"txid": "abcdef",
"vout": 2
},
"sig": "0a1b2c3d4e5f",
"seq": 4294967295
},
{
"n": 1,
"outpoint": {
"txid": "abcdef",
"vout": 3
},
"sig": "",
"seq": 4294967295,
"witness": [
"aabbccddeeff",
"00112233"
]
}
],
"outputs": [
{
"n": 0,
"value": 10000,
"scriptpubkey": "0a1b2c3d4e5f",
"type": "pubkeyhash",
"address": "1xAddress"
},
{
"n": 1,
"value": 0,
"scriptpubkey": "0a1b2c3d4e5f",
"type": "nulldata"
},
{
"n": 2,
"value": 10000,
"scriptpubkey": "0a1b2c3d4e5f",
"type": "multisig",
"addresses": [
"1xAddress",
"1yAddress"
]
},
{
"n": 3,
"value": 10000,
"scriptpubkey": "000a1b2c3d4e5f",
"type": "witness_v0_scripthash"
},
{
"n": 4,
"value": 10000,
"scriptpubkey": "000b1b2c3d4e5f",
"type": "witness_v0_keyhash"
}
]
}
```
Additional fields with `?fees=1`:
```json
{
"fees": 10000,
"feerate": 50,
"vfeerate": 75,
"inputs": [
{
"outpoint": {
"value": 20000,
"scriptpubkey": "0a1b2c3d4e5f"
}
}
],
"outputs": ["..."]
}
```
**Notes**
* `block` details will be missing for unconfirmed transactions
* Input `sig` is the raw hex, not ASM of script signature
* `feerate` has units of Satoshi/byte
* `vsize` and `vfeerate` are the virtual size and virtual fee rate and are different than `size` and `feerate` for SegWit transactions
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

87
doc/GET_txs.md

@ -0,0 +1,87 @@
# Get Transactions
Request a paginated list of transactions related to a collection of HD accounts and/or loose addresses and/or public keys.
Note that loose addresses that are also part of one of the HD accounts requested will be ignored. Their transactions are listed as part of the HD account result.
```
GET /txs?active=...
```
## Parameters
* **active** - `string` - A pipe-separated list of extended public keys and/or loose addresses and/or pubkeys (`xpub1|address1|address2|pubkey1|...`)
* **page** - `integer` - Index of the requested page (first page is index 0)
* **count** - `integer` - Number of transactions returned per page
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /txs?active=xpub0123456789
GET /txs?active=xpub0123456789|address1|address2|pubkey1
```
#### Success
Status code 200 with JSON response:
```json
{
"n_tx": 153,
"page": 2,
"n_tx_page": 50,
"txs": [
{
"block_height": 100000,
"hash": "abcdef",
"version": 1,
"locktime": 0,
"result": -10000,
"time": 1400000000,
"inputs": [
{
"vin": 1,
"prev_out": {
"txid": "abcdef",
"vout": 2,
"value": 20000,
"xpub": {
"m": "xpubABCDEF",
"path": "M/0/3"
},
"addr": "1xAddress",
"pubkey": "04Pubkey"
},
"sequence": 4294967295
}
],
"out": [
{
"n": 2,
"value": 10000,
"addr": "1xAddress",
"pubkey": "03Pubkey"
"xpub": {
"m": "xpubABCDEF",
"path": "M/1/5"
}
}
]
}
]
}
```
**Notes**
* The transaction `inputs` and `out` arrays are for known addresses only and do not reflect the full input and output list of the transaction on the blockchain
* `result.txs[i].block_height` should not be present for unconfirmed transactions
* `result.txs[i].result` is the change in value for the "wallet" as defined by all entries on the `active` query parameter
* `result.txs[i].inputs[j].prev_out.addr` should be present for BIP47-related addresses but may be `null` if the previous output address is unknown
* `result.txs[i].out[j].addr` should be present for BIP47-related addresses
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

61
doc/GET_unspent.md

@ -0,0 +1,61 @@
# Get Unspent
Request a list of unspent transaction outputs from a collection of HD accounts and/or loose addresses and or public keys. If accounts do not exist, they will be created with a relayed call to the [POST /xpub](./POST_xpub.md) mechanics if new or will be imported from external data sources. Instruct the server that entities are new with `?new=xpub1|addr2|addr3` in the query parameters. SegWit support via BIP49 is activated for new xpubs with `?bip49=xpub3|xpub4`. SegWit support via BIP84 is activated for new xpubs with `?bip84=xpub3|xpub4`. Pass xpubs to `?bip49` or `?bip84` only for newly-created accounts. Support of BIP47 (with addresses derived in 3 formats (P2PKH, P2WPKH/P2SH, P2WPKH Bech32)) is activated for new pubkeys with `?pubkey=pubkey1|pubkey2`.
The `POST` version of unspent is identical, except the parameters are in the POST body.
```
GET /unspent?active=...&new=...&bip49=...&bip84=...&pubkey=...
```
## Parameters
* **active** - `string` - A pipe-separated list of extended public keys and/or loose addresses (`xpub1|address1|address2|...`)
* **new** - `string` - A pipe-separated list of extended public keys and/or loose addresses that need no import from external services
* **bip49** - `string` - A pipe-separated list of new extended public keys to be derived via [BIP49](https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki)
* **bip84** - `string` - A pipe-separated list of new extended public keys to be derived via [BIP84](https://github.com/bitcoin/bips/blob/master/bip-0084.mediawiki)
* **pubkey** - `string` - A pipe-separated list of public keys to be derived as P2PKH, P2WPKH/P2SH, P2WPKH Bech32 addresses.
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Examples
```
GET /unspent?active=xpub0123456789&new=address2|address3&pubkey=pubkey4
GET /unspent?active=xpub0123456789|address1|address2|pubkey4
```
#### Success
Status code 200 with JSON response:
```json
{
"unspent_outputs": [
{
"tx_hash": "abcdef",
"tx_output_n": 2,
"tx_version": 1,
"tx_locktime": 0,
"value": 100000000,
"script": "abcdef",
"addr": "1xAddress",
"pubkey": "04Pubkey -or- inexistant attribute"
"confirmations": 10000,
"xpub": {
"m": "xpub0123456789",
"path": "M/0/5"
}
}
]
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```
## Notes
Unspent response is consumed by the wallet in the [APIFactory](https://github.com/Samourai-Wallet/samourai-wallet-android/blob/master/app/src/main/java/com/samourai/wallet/api/APIFactory.java)

45
doc/GET_xpub.md

@ -0,0 +1,45 @@
# Get HD Account
Request details about an HD account. If account does not exist, it must be created with [POST /xpub](./POST_xpub.md), and this call will return an error.
Data returned includes the unspent `balance`, the next `unused` address indices for external and internal chains, the `derivation` path of addresses, and the `created` timestamp when the server first saw this HD account.
```
GET /xpub/:xpub
```
## Parameters
* **:xpub** - `string` - The extended public key for the HD Account
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Example
```
GET /xpub/xpub0123456789
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok",
"data": {
"balance": 100000000,
"unused": {
"external": 2,
"internal": 1
},
"derivation": "BIP44|BIP49",
"created": 1500000000
}
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

69
doc/POST_auth_login.md

@ -0,0 +1,69 @@
# Authentication
Authenticate to the backend by providing the API key expected by the server. If authentication succeeds, the endpoint returns a json embedding an access token and a refresh token (JSON Web Tokens). The access token must be passed as an argument for all later calls to the backend (account & pushtx REST API + websockets). The refresh token must be passed as an argument for later calls to /auth/refresh allowing to generate a new access token.
Authentication is activated in /keys/inndex.js configuration file
```
auth: {
// Name of the authentication strategy used
// Available values:
// null : No authentication
// 'localApiKey' : authentication with a shared local api key
activeStrategy: 'localApiKey',
// List of available authentication strategies
strategies: {
// Authentication with a shared local api key
localApiKey: {
// API key (alphanumeric characters)
apiKey: 'myApiKey',
// DO NOT MODIFY
configurator: 'localapikey-strategy-configurator'
}
},
// Configuration of Json Web Tokens
// used for the management of authorizations
jwt: {
// Secret passphrase used by the server to sign the jwt
// (alphanumeric characters)
secret: 'myJwtSecret',
accessToken: {
// Number of seconds after which the jwt expires
expires: 900
},
refreshToken: {
// Number of seconds after which the jwt expires
expires: 7200
}
}
},
```
```
POST /auth/login
```
## Parameters
* **apikey** - `string` - The API key securing access to the backend
### Example
```
POST /auth/login?apikey=myAPIKey
```
#### Success
Status code 200 with JSON response:
```json
{
"authorizations": {
"access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJTYW1vdXJhaSBXYWxsZXQgYmFja2VuZCIsInR5cGUiOiJhY2Nlc3MtdG9rZW4iLCJpYXQiOjE1NDQxMDM5MjksImV4cCI6MTU0NDEwNDUyOX0.DDzz0EUEQS8vqdhfUwi_MFhjnSLKZ9nY-P55Yoi0wlI",
"refresh_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJTYW1vdXJhaSBXYWxsZXQgYmFja2VuZCIsInR5cGUiOiJyZWZyZXNoLXRva2VuIiwiaWF0IjoxNTQ0MTAzOTI5LCJleHAiOjE1NDQxMTExMjl9.6gykKq31WL4Jq7hfmoTwi1fpmBTtAeFb4KjfmSO6l00"
}
}
```
#### Failure
Status code 401

31
doc/POST_auth_refresh.md

@ -0,0 +1,31 @@
# Refresh the access token
Request a new access token from the backend. A valid refresh token must be passed as an argument.
```
POST /auth/refresh
```
## Parameters
* **rt** - `string` - A valid refresh token
### Example
```
POST /auth/refresh?rt=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJTYW1vdXJhaSBXYWxsZXQgYmFja2VuZCIsInR5cGUiOiJyZWZyZXNoLXRva2VuIiwiaWF0IjoxNTQ0MTAzOTI5LCJleHAiOjE1NDQxMTExMjl9.6gykKq31WL4Jq7hfmoTwi1fpmBTtAeFb4KjfmSO6l00
```
#### Success
Status code 200 with JSON response:
```json
{
"authorizations": {
"access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJTYW1vdXJhaSBXYWxsZXQgYmFja2VuZCIsInR5cGUiOiJhY2Nlc3MtdG9rZW4iLCJpYXQiOjE1NDQxMDM5MjksImV4cCI6MTU0NDEwNDUyOX0.DDzz0EUEQS8vqdhfUwi_MFhjnSLKZ9nY-P55Yoi0wlI"
}
}
```
#### Failure
Status code 401

38
doc/POST_pushtx.md

@ -0,0 +1,38 @@
# PushTX
Push a transaction to the network.
```
POST /pushtx/
```
## Parameters
* **tx** - `hex string` - The raw transaction hex
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Example
```
POST /pushtx/?tx=abcdef0123456789
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok",
"data": "<txid>"
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": {
"message": "<error message>",
"code": "<error code>"
}
}
```

121
doc/POST_pushtx_schedule.md

@ -0,0 +1,121 @@
# Scheduled PushTX
Schedule the delayed push of an ordered list of transactions (used for programmable Ricochet).
```
POST /pushtx/schedule
```
## Parameters
* **script** - `ScriptStep[]` - An array of ScriptStep objects defining the script.
## ScriptStep structure
* **hop** - `integer` - Index of this step in the script.
Transactions are pushed by ascending order of **hop** values.
* **nlocktime** - `integer` - Height of the block after which the transaction should be pushed to the network.
This value shouldn't be set too far in the future (default tolerance is currently the height of current tip + 18 blocks).
If step A has a **hop** value higher than step B, then step A MUST have a **nlocktime** greater than or equal to the **nlocktime** of step B.
If step A and step B have the same **hop** value, then they MAY HAVE different **nlocktime** values.
* **tx** - `string` - The raw transaction hex for the transaction to be pushed during this step.
The transaction MUST HAVE its nLockTime field filled with the height of a block.
The height of the block MUST BE equal to the value of the **nlocktime** field of the ScriptStep object.
### Examples
Ricochet-like script
```
tx0 -- tx1 -- tx2 -- tx3 -- tx4
POST /pushtx/schedule
Request Body (JSON-encoded)
{
"script": [{
"hop": 0,
"nlocktime": 549817,
"tx": "<tx0_raw_hex>"
}, {
"hop": 1,
"nlocktime": 549818,
"tx": "<tx1_raw_hex>"
}, {
"hop": 2,
"nlocktime": 549820,
"tx": "<tx2_raw_hex>"
}, {
"hop": 3,
"nlocktime": 549823,
"tx": "<tx3_raw_hex>"
}, {
"hop": 4,
"nlocktime": 549824,
"tx": "<tx4_raw_hex>"
}]
}
```
Serialized script with 2 parallel branches
```
-- tx1 -- tx3 ---------
tx0 --| |-- tx5
-- tx2 --------- tx4 --
POST /pushtx/schedule
Request Body (JSON-encoded)
{
"script": [{
"hop": 0,
"nlocktime": 549817,
"tx": "<tx0_raw_hex>"
}, {
"hop": 1,
"nlocktime": 549818,
"tx": "<tx1_raw_hex>"
}, {
"hop": 1,
"nlocktime": 549818,
"tx": "<tx2_raw_hex>"
}, {
"hop": 2,
"nlocktime": 549819,
"tx": "<tx3_raw_hex>"
}, {
"hop": 2,
"nlocktime": 549820,
"tx": "<tx4_raw_hex>"
}, {
"hop": 3,
"nlocktime": 549821,
"tx": "<tx5_raw_hex>"
}]
}
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok"
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

41
doc/POST_xpub.md

@ -0,0 +1,41 @@
# Add HD Account
Notify the server of the new HD account for tracking. When new accounts are sent, there is no need to rescan the addresses for existing transaction activity. SegWit support is provided via [BIP49](https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki) or [BIP84](https://github.com/bitcoin/bips/blob/master/bip-0084.mediawiki).
Response time for restored accounts might be long if there is much previous activity.
```
POST /xpub
```
## Parameters
* **xpub** - `string` - The extended public key for the HD Account
* **type** - `string` - Whether this is a newly-created account or one being restored. Recognized values are `'new'` and `'restore'`.
* **segwit** - `string` (optional) - What type of SegWit support for this xpub, if any. Valid values: `'bip49'` and `'bip84'`
* **force** - `boolean` (optional) - Force an override of derivation scheme even if xpub is locked. Used for `'restore'` operation.
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Example
```
POST /xpub?xpub=xpub0123456789&type=restore
POST /xpub?xpub=xpub0123456789&type=new&segwit=bip49
POST /xpub?xpub=xpub0123456789&type=restore&segwit=bip84
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok"
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

39
doc/POST_xpub_lock.md

@ -0,0 +1,39 @@
# Lock an HD Account Type
To avoid errors related to `POST xpub` and SegWit derivation type, this endpoint allows locking of the type of an xpub in the database.
```
POST /xpub/:xpub/lock
```
## Parameters
* **address** - `string` - The first address of the internal chain for this `xpub`, derivation path `M/1/0`. Use compressed P2PHK address regardless of HD derivation scheme.
* **message** - `string` - Either `"lock"` or `"unlock"`
* **signature** - `string` - The base64-encoded signature of the double SHA256 hash of `[varuint length of message string, message string]`. Signature scheme follows [bitcoinjs-message](https://github.com/bitcoinjs/bitcoinjs-message/blob/master/index.js) with a message prefix matching the [coin type](https://github.com/bitcoinjs/bitcoinjs-lib/blob/v3.1.1/src/networks.js). Use the ECPair associated with the `M/1/0` address to sign.
* **at** - `string` (optional) - Access Token (json web token). Required if authentication is activated.
### Example
```
POST /xpub/xpub0123456789/lock?address=1address&message=lock&signature=Base64X==
```
#### Success
Status code 200 with JSON response:
```json
{
"status": "ok",
"data": {
"derivation": "LOCKED BIP49, etc"
}
}
```
#### Failure
Status code 400 with JSON response:
```json
{
"status": "error",
"error": "<error message>"
}
```

29
doc/README.md

@ -0,0 +1,29 @@
# Installation
# Endpoint documentation
Endpoint documentation is split into separate files and presented here under specific servers.
## Accounts Server
Keeps track of HD account balances, represented by `xpub` extended public keys.
### Endpoints
* [POST auth/login](./POST_auth_login.md)
* [GET multiaddr](./GET_multiaddr.md)
* [GET unspent](./GET_unspent.md)
* [GET xpub](./GET_xpub.md)
* [POST xpub](./POST_xpub.md)
* [POST xpub/lock](./POST_xpub_lock.md)
* [GET tx](./GET_tx.md)
* [GET txs](./GET_txs.md)
* [GET header](./GET_header.md)
* [GET fees](./GET_fees.md)
## PushTX Server
A simple server that relays transactions from the wallet to the full node.
### Endpoints
* [POST pushtx](./POST_pushtx.md)

52
docker/my-dojo/.env

@ -0,0 +1,52 @@
#########################################
# SYSTEM ENVIRONMENT VARIABLES
# DO NOT MODIFY
#########################################
#########################################
# GLOBAL
#########################################
COMPOSE_CONVERT_WINDOWS_PATHS=1
DOJO_VERSION_TAG=1.0.0
#########################################
# MYSQL
#########################################
MYSQL_DATABASE=samourai-main
#########################################
# BITCOIND
#########################################
BITCOIND_DNSSEED=0
BITCOIND_DNS=0
#########################################
# NODEJS
#########################################
NODE_GAP_EXTERNAL=100
NODE_GAP_INTERNAL=100
NODE_ADDR_FILTER_THRESHOLD=1000
NODE_URL_OXT_API=https://api.oxt.me
NODE_ADDR_DERIVATION_MIN_CHILD=2
NODE_ADDR_DERIVATION_MAX_CHILD=2
NODE_ADDR_DERIVATION_THRESHOLD=10
NODE_TXS_SCHED_MAX_ENTRIES=10
NODE_TXS_SCHED_MAX_DELTA_HEIGHT=18
NODE_JWT_ACCESS_EXPIRES=900
NODE_JWT_REFRESH_EXPIRES=7200
NODE_PREFIX_STATUS=status
NODE_PREFIX_SUPPORT=support
NODE_PREFIX_STATUS_PUSHTX=status
NODE_TRACKER_MEMPOOL_PERIOD=10000
NODE_TRACKER_UNCONF_TXS_PERIOD=300000

56
docker/my-dojo/bitcoin/Dockerfile

@ -0,0 +1,56 @@
FROM debian:stretch
#################################################################
# INSTALL BITCOIN
#################################################################
ENV BITCOIN_HOME /home/bitcoin
ENV BITCOIN_VERSION 0.18.0
ENV BITCOIN_URL https://bitcoincore.org/bin/bitcoin-core-0.18.0/bitcoin-0.18.0-x86_64-linux-gnu.tar.gz
ENV BITCOIN_SHA256 5146ac5310133fbb01439666131588006543ab5364435b748ddfc95a8cb8d63f
ENV BITCOIN_ASC_URL https://bitcoincore.org/bin/bitcoin-core-0.18.0/SHA256SUMS.asc
ENV BITCOIN_PGP_KEY 01EA5486DE18A882D4C2684590C8019E36C2E964
RUN set -ex && \
apt-get update && \
apt-get install -qq --no-install-recommends ca-certificates dirmngr gosu gpg wget && \
rm -rf /var/lib/apt/lists/*
# Build and install bitcoin binaries
RUN set -ex && \
cd /tmp && \
wget -qO bitcoin.tar.gz "$BITCOIN_URL" && \
echo "$BITCOIN_SHA256 bitcoin.tar.gz" | sha256sum -c - && \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$BITCOIN_PGP_KEY" && \
wget -qO bitcoin.asc "$BITCOIN_ASC_URL" && \
gpg --batch --verify bitcoin.asc && \
tar -xzvf bitcoin.tar.gz -C /usr/local --strip-components=1 --exclude=*-qt && \
rm -rf /tmp/*
# Create group & user bitcoin
RUN addgroup --system -gid 1108 bitcoin && \
adduser --system --ingroup bitcoin -uid 1105 bitcoin
# Create data directory
RUN mkdir "$BITCOIN_HOME/.bitcoin" && \
chown -h bitcoin:bitcoin "$BITCOIN_HOME/.bitcoin"
# Copy bitcoin config file
COPY ./bitcoin.conf "$BITCOIN_HOME/.bitcoin/bitcoin.conf"
RUN chown bitcoin:bitcoin "$BITCOIN_HOME/.bitcoin/bitcoin.conf"
# Copy restart script
COPY ./restart.sh /restart.sh
RUN chown bitcoin:bitcoin /restart.sh && \
chmod 777 /restart.sh
# Copy wait-for-it script
COPY ./wait-for-it.sh /wait-for-it.sh
RUN chown bitcoin:bitcoin /wait-for-it.sh && \
chmod u+x /wait-for-it.sh && \
chmod g+x /wait-for-it.sh
EXPOSE 8333 9501 9502 28256
USER bitcoin

22
docker/my-dojo/bitcoin/bitcoin.conf

@ -0,0 +1,22 @@
# Bitcoin Configuration
server=1
listen=1
bind=127.0.0.1
# Tor proxy through dojonet
proxy=172.28.1.4:9050
# Non-default RPC Port
rpcport=28256
rpcallowip=::/0
rpcbind=bitcoind
# Store transaction information for fully-spent txns
txindex=1
# No wallet
disablewallet=1
# ZeroMQ Notification Settings
zmqpubhashblock=tcp://0.0.0.0:9502
zmqpubrawtx=tcp://0.0.0.0:9501

14
docker/my-dojo/bitcoin/restart.sh

@ -0,0 +1,14 @@
#!/bin/bash
set -e
echo "## Start bitcoind #############################"
bitcoind -datadir=/home/bitcoin/.bitcoin \
-dbcache=$BITCOIND_DB_CACHE \
-dnsseed=$BITCOIND_DNSSEED \
-dns=$BITCOIND_DNS \
-rpcuser=$BITCOIND_RPC_USER \
-rpcpassword=$BITCOIND_RPC_PASSWORD \
-maxconnections=$BITCOIND_MAX_CONNECTIONS \
-maxmempool=$BITCOIND_MAX_MEMPOOL \
-mempoolexpiry=$BITCOIND_MEMPOOL_EXPIRY \
-minrelaytxfee=$BITCOIND_MIN_RELAY_TX_FEE

178
docker/my-dojo/bitcoin/wait-for-it.sh

@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
WAITFORIT_BUSYTIMEFLAG="-t"
else
WAITFORIT_ISBUSY=0
WAITFORIT_BUSYTIMEFLAG=""
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

32
docker/my-dojo/conf/docker-bitcoind.conf

@ -0,0 +1,32 @@
#########################################
# CONFIGURATION OF BITCOIND CONTAINER
#########################################
# User account used for rpc access to bitcoind
# Type: alphanumeric
BITCOIND_RPC_USER=dojorpc
# Password of user account used for rpc access to bitcoind
# Type: alphanumeric
BITCOIND_RPC_PASSWORD=dojorpcpassword
# Max number of connections to network peers
# Type: integer
BITCOIND_MAX_CONNECTIONS=16
# Mempool maximum size in MB
# Type: integer
BITCOIND_MAX_MEMPOOL=1024
# Db cache size in MB
# Type: integer
BITCOIND_DB_CACHE=1024
# Mempool expiry in hours
# Defines how long transactions stay in your local mempool before expiring
# Type: integer
BITCOIND_MEMPOOL_EXPIRY=72
# Min relay tx fee in BTC
# Type: numeric
BITCOIND_MIN_RELAY_TX_FEE=0.00001

15
docker/my-dojo/conf/docker-mysql.conf

@ -0,0 +1,15 @@
#########################################
# CONFIGURATION OF MYSQL CONTAINER
#########################################
# Password of MySql root account
# Type: alphanumeric
MYSQL_ROOT_PASSWORD=rootpassword
# User account used for db access
# Type: alphanumeric
MYSQL_USER=samourai
# Password of of user account
# Type: alphanumeric
MYSQL_PASSWORD=password

30
docker/my-dojo/conf/docker-node.conf

@ -0,0 +1,30 @@
#########################################
# CONFIGURATION OF NODE JS CONTAINER
#########################################
# API key required for accessing the services provided by the server
# Keep this API key secret!
# Provide a value with a high entropy!
# Type: alphanumeric
NODE_API_KEY=myApiKey
# API key required for accessing the admin/maintenance services provided by the server
# Keep this Admin key secret!
# Provide a value with a high entropy!
# Type: alphanumeric
NODE_ADMIN_KEY=myAdminKey
# Secret used by the server for signing Json Web Token
# Keep this value secret!
# Provide a value with a high entropy!
# Type: alphanumeric
NODE_JWT_SECRET=myJwtSecret
# Data source used for imports and rescans (bitcoind or OXT)
# Note: support of local bitcoind is an experimental feature
# Values: active | inactive
NODE_IMPORT_FROM_BITCOIND=active
# FEE TYPE USED FOR FEES ESTIMATIONS BY BITCOIND
# Allowed values are ECONOMICAL or CONSERVATIVE
NODE_FEE_TYPE=ECONOMICAL

130
docker/my-dojo/docker-compose.yaml

@ -0,0 +1,130 @@
version: "3.2"
services:
db:
image: "samouraiwallet/dojo-db:1.0.0"
container_name: db
build:
context: ./../..
dockerfile: ./docker/my-dojo/mysql/Dockerfile
env_file:
- ./.env
- ./conf/docker-mysql.conf
restart: on-failure
expose:
- "3306"
volumes:
- data-mysql:/var/lib/mysql
networks:
dojonet:
ipv4_address: 172.28.1.1
bitcoind:
image: "samouraiwallet/dojo-bitcoind:1.0.0"
container_name: bitcoind
build:
context: ./bitcoin
env_file:
- ./.env
- ./conf/docker-bitcoind.conf
restart: on-failure
command: "/wait-for-it.sh tor:9050 --timeout=360 --strict -- /restart.sh"
expose:
- "28256"
- "9501"
- "9502"
volumes:
- data-bitcoind:/home/bitcoin/.bitcoin
depends_on:
- db
- tor
networks:
dojonet:
ipv4_address: 172.28.1.5
node:
image: "samouraiwallet/dojo-nodejs:1.0.0"
container_name: nodejs
build:
context: ./../..
dockerfile: ./docker/my-dojo/node/Dockerfile
env_file:
- ./.env
- ./conf/docker-mysql.conf
- ./conf/docker-bitcoind.conf
- ./conf/docker-node.conf
restart: on-failure
command: "/home/node/app/wait-for-it.sh db:3306 --timeout=360 --strict -- /home/node/app/restart.sh"
expose:
- "8080"
- "8081"
volumes:
- data-nodejs:/data
depends_on:
- bitcoind
- db
networks:
dojonet:
ipv4_address: 172.28.1.2
nginx:
image: "samouraiwallet/dojo-nginx:1.0.0"
container_name: nginx
build:
context: ./nginx
env_file:
- ./.env
restart: on-failure
command: "/wait-for node:8080 --timeout=360 -- nginx"
expose:
- "80"
volumes:
- data-nginx:/data
depends_on:
- node
networks:
dmznet:
ipv4_address: 172.29.1.3
dojonet:
ipv4_address: 172.28.1.3
tor:
image: "samouraiwallet/dojo-tor:1.0.0"
container_name: tor
build:
context: ./tor
env_file:
- ./.env
restart: on-failure
command: tor
ports:
- "80:80"
volumes:
- data-tor:/var/lib/tor
networks:
dmznet:
ipv4_address: 172.29.1.4
dojonet:
ipv4_address: 172.28.1.4
networks:
dojonet:
driver: bridge
ipam:
driver: default
config:
- subnet: 172.28.0.0/16
dmznet:
driver: bridge
ipam:
driver: default
config:
- subnet: 172.29.0.0/16
volumes:
data-mysql:
data-bitcoind:
data-bitcoind-tor:
data-nodejs:
data-nginx:
data-tor:

222
docker/my-dojo/dojo.sh

@ -0,0 +1,222 @@
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source "$DIR/conf/docker-bitcoind.conf"
# Start
start() {
docker-compose up --remove-orphans -d
}
# Stop
stop() {
docker exec -it bitcoind bitcoin-cli \
-rpcconnect=bitcoind \
--rpcport=28256 \
--rpcuser="$BITCOIND_RPC_USER" \
--rpcpassword="$BITCOIND_RPC_PASSWORD" \
stop
echo "Preparing shutdown of dojo. Please wait."
sleep 15s
docker-compose down
}
# Restart dojo
restart() {
docker exec -it bitcoind bitcoin-cli \
-rpcconnect=bitcoind \
--rpcport=28256 \
--rpcuser="$BITCOIND_RPC_USER" \
--rpcpassword="$BITCOIND_RPC_PASSWORD" \
stop
echo "Preparing shutdown of dojo. Please wait."
sleep 15s
docker-compose down
docker-compose up -d
}
# Install
install() {
docker-compose up -d --remove-orphans
docker-compose logs --tail=0 --follow
}
# Delete everything
uninstall() {
docker-compose rm
docker-compose down
docker image rm samouraiwallet/dojo-db:1.0.0
docker image rm samouraiwallet/dojo-bitcoind:1.0.0
docker image rm samouraiwallet/dojo-nodejs:1.0.0
docker image rm samouraiwallet/dojo-nginx:1.0.0
docker image rm samouraiwallet/dojo-tor:1.0.0
docker volume prune
}
# Display the onion address
onion() {
V2_ADDR=$( docker exec -it tor cat /var/lib/tor/hsv2dojo/hostname )
V3_ADDR=$( docker exec -it tor cat /var/lib/tor/hsv3dojo/hostname )
echo "API Hidden Service address (v3) = $V3_ADDR"
echo "API Hidden Service address (v2) = $V2_ADDR"
}
# Display logs
logs_node() {
if [ $3 -eq 0 ]; then
docker exec -ti nodejs tail -f /data/logs/$1-$2.log
else
docker exec -ti nodejs tail -n $3 /data/logs/$1-$2.log
fi
}
logs() {
case $1 in
db )
docker-compose logs --tail=50 --follow db
;;
bitcoind )
docker exec -ti bitcoind tail -f /home/bitcoin/.bitcoin/debug.log
;;
tor )
docker-compose logs --tail=50 --follow tor
;;
api | pushtx | pushtx-orchest | tracker )
logs_node $1 $2 $3
;;
* )
docker-compose logs --tail=0 --follow
;;
esac
}
# Display the help
help() {
echo "Usage: dojo.sh command [module] [options]"
echo "Interact with your dojo."
echo " "
echo "Available commands:"
echo " "
echo " help Display this help message."
echo " "
echo " bitcoin-cli Launch a bitcoin-cli console allowing to interact with your full node through its RPC API."
echo " "
echo " install Install your dojo."
echo " "
echo " logs [module] [options] Display the logs of your dojo. Use CTRL+C to stop the logs."
echo " "
echo " Available modules:"
echo " dojo.sh logs : display the logs of all the Docker containers"
echo " dojo.sh logs bitcoind : display the logs of bitcoind"
echo " dojo.sh logs db : display the logs of the MySQL database"
echo " dojo.sh logs tor : display the logs of tor"
echo " dojo.sh logs api : display the logs of the REST API (nodejs)"
echo " dojo.sh logs tracker : display the logs of the Tracker (nodejs)"
echo " dojo.sh logs pushtx : display the logs of the pushTx API (nodejs)"
echo " dojo.sh logs pushtx-orchest : display the logs of the pushTx Orchestrator (nodejs)"
echo " "
echo " Available options (only available for api, tracker, pushtx and pushtx-orchest modules):"
echo " -d [VALUE] : select the type of log to be displayed."
echo " VALUE can be output (default) or error."
echo " -n [VALUE] : display the last VALUE lines"
echo " "
echo " onion Display the Tor onion address allowing your wallet to access your dojo."
echo " "
echo " restart Restart your dojo."
echo " "
echo " start Start your dojo."
echo " "
echo " stop Stop your dojo."
echo " "
echo " uninstall Delete your dojo. Be careful! This command will also remove all data."
}
#
# Parse options to the dojo command
#
while getopts ":h" opt; do
case ${opt} in
h )
help
exit 0
;;
\? )
echo "Invalid Option: -$OPTARG" 1>&2
exit 1
;;
esac
done
shift $((OPTIND -1))
subcommand=$1; shift
case "$subcommand" in
bitcoin-cli )
docker exec -it bitcoind bitcoin-cli \
-rpcconnect=bitcoind \
--rpcport=28256 \
--rpcuser="$BITCOIND_RPC_USER" \
--rpcpassword="$BITCOIND_RPC_PASSWORD" \
$1 $2 $3 $4 $5
;;
help )
help
;;
install )
install
;;
logs )
module=$1; shift
display="output"
numlines=0
# Process package options
while getopts ":d:n:" opt; do
case ${opt} in
d )
display=$OPTARG
;;
n )
numlines=$OPTARG
;;
\? )
echo "Invalid Option: -$OPTARG" 1>&2
exit 1
;;
: )
echo "Invalid Option: -$OPTARG requires an argument" 1>&2
exit 1
;;
esac
done
shift $((OPTIND -1))
logs $module $display $numlines
;;
onion )
onion
;;
restart )
restart
;;
start )
start
;;
stop )
stop
;;
uninstall )
uninstall
;;
esac

7
docker/my-dojo/mysql/Dockerfile

@ -0,0 +1,7 @@
FROM mysql:5.7.25
# Copy mysql config
COPY ./docker/my-dojo/mysql/mysql-dojo.cnf /etc/mysql/conf.d/mysql-dojo.cnf
# Copy content of mysql scripts into /docker-entrypoint-initdb.d
COPY ./db-scripts/ /docker-entrypoint-initdb.d

2
docker/my-dojo/mysql/mysql-dojo.cnf

@ -0,0 +1,2 @@
[mysqld]
sql_mode="NO_ENGINE_SUBSTITUTION"

18
docker/my-dojo/nginx/Dockerfile

@ -0,0 +1,18 @@
FROM nginx:1.15.10-alpine
# Create data directory
ENV LOGS_DIR /data/logs
RUN mkdir -p "$LOGS_DIR" && \
chown -R nginx:nginx "$LOGS_DIR"
# Copy configuration files
COPY ./nginx.conf /etc/nginx/nginx.conf
COPY ./dojo.conf /etc/nginx/sites-enabled/dojo.conf
# Copy wait-for script
COPY ./wait-for /wait-for
RUN chmod u+x /wait-for && \
chmod g+x /wait-for

53
docker/my-dojo/nginx/dojo.conf

@ -0,0 +1,53 @@
# Proxy WebSockets
# https://www.nginx.com/blog/websocket-nginx/
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
# WebSocket server listening here
upstream websocket {
server node:8080;
}
# Site Configuration
server {
listen 80;
server_name _;
# Set proxy timeouts for the application
proxy_connect_timeout 600;
proxy_read_timeout 600;
proxy_send_timeout 600;
send_timeout 600;
# Proxy WebSocket connections first
location /v2/inv {
proxy_pass http://websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# PushTX server is separate, so proxy first
location /v2/pushtx/ {
proxy_pass http://node:8081/;
}
# Proxy requests to maintenance tool
location /admin/ {
proxy_pass http://node:8080/static/admin/;
}
# Proxy all other v2 requests to the accounts server
location /v2/ {
proxy_pass http://node:8080/;
}
# Serve remaining requests
location / {
return 200 '{"status":"ok"}';
add_header Content-Type application/json;
}
}

44
docker/my-dojo/nginx/nginx.conf

@ -0,0 +1,44 @@
user nginx;
worker_processes auto;
daemon off;
# Log critical errors and higher
error_log /data/logs/error.log crit;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
# Disable activity logging for privacy.
access_log off;
# Do not reveal the version of server
server_tokens off;
sendfile on;
keepalive_timeout 65;
# Enable response compression
gzip on;
# Compression level: 1-9
gzip_comp_level 1;
# Disable gzip compression for older IE
gzip_disable msie6;
# Minimum length of response before gzip kicks in
gzip_min_length 128;
# Compress these MIME types in addition to text/html
gzip_types application/json;
# Help with proxying by adding the Vary: Accept-Encoding response
gzip_vary on;
include /etc/nginx/sites-enabled/*.conf;
}

79
docker/my-dojo/nginx/wait-for

@ -0,0 +1,79 @@
#!/bin/sh
TIMEOUT=15
QUIET=0
echoerr() {
if [ "$QUIET" -ne 1 ]; then printf "%s\n" "$*" 1>&2; fi
}
usage() {
exitcode="$1"
cat << USAGE >&2
Usage:
$cmdname host:port [-t timeout] [-- command args]
-q | --quiet Do not output any status messages
-t TIMEOUT | --timeout=timeout Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit "$exitcode"
}
wait_for() {
for i in `seq $TIMEOUT` ; do
nc -z "$HOST" "$PORT" > /dev/null 2>&1
result=$?
if [ $result -eq 0 ] ; then
if [ $# -gt 0 ] ; then
exec "$@"
fi
exit 0
fi
sleep 1
done
echo "Operation timed out" >&2
exit 1
}
while [ $# -gt 0 ]
do
case "$1" in
*:* )
HOST=$(printf "%s\n" "$1"| cut -d : -f 1)
PORT=$(printf "%s\n" "$1"| cut -d : -f 2)
shift 1
;;
-q | --quiet)
QUIET=1
shift 1
;;
-t)
TIMEOUT="$2"
if [ "$TIMEOUT" = "" ]; then break; fi
shift 2
;;
--timeout=*)
TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
break
;;
--help)
usage 0
;;
*)
echoerr "Unknown argument: $1"
usage 1
;;
esac
done
if [ "$HOST" = "" -o "$PORT" = "" ]; then
echoerr "Error: you need to provide a host and port to test."
usage 2
fi
wait_for "$@"

43
docker/my-dojo/node/Dockerfile

@ -0,0 +1,43 @@
FROM node:8.12.0-stretch
ENV LOGS_DIR /data/logs
ENV APP_DIR /home/node/app
# Install forever
RUN npm install -g forever
# Create data directory
RUN mkdir -p "$LOGS_DIR" && \
chown -R node:node "$LOGS_DIR"
# Create app directory
RUN mkdir "$APP_DIR" && \
chown -R node:node "$APP_DIR"
# Copy app source files into APP_DIR
COPY . "$APP_DIR"
# Install node modules required by the app
RUN cd "$APP_DIR" && \
npm install --only=prod
# Copy config file
COPY ./docker/my-dojo/node/keys.index.js "$APP_DIR/keys/index.js"
RUN chown node:node "$APP_DIR/keys/index.js"
# Copy restart script
COPY ./docker/my-dojo/node/restart.sh "$APP_DIR/restart.sh"
RUN chown node:node "$APP_DIR/restart.sh" && \
chmod u+x "$APP_DIR/restart.sh" && \
chmod g+x "$APP_DIR/restart.sh"
# Copy wait-for-it script
COPY ./docker/my-dojo/node/wait-for-it.sh "$APP_DIR/wait-for-it.sh"
RUN chown node:node "$APP_DIR/wait-for-it.sh" && \
chmod u+x "$APP_DIR/wait-for-it.sh" && \
chmod g+x "$APP_DIR/wait-for-it.sh"
USER node

244
docker/my-dojo/node/keys.index.js

@ -0,0 +1,244 @@
/*!
* keys/index-example.js
* Copyright (c) 2016-2018, Samourai Wallet (CC BY-NC-ND 4.0 License).
*/
/**
* Desired structure of /keys/index.js, which is ignored in the repository.
*/
module.exports = {
/*
* Mainnet parameters
*/
bitcoin: {
/*
* Dojo version
*/
dojoVersion: process.env.DOJO_VERSION_TAG,
/*
* Bitcoind
*/
bitcoind: {
// RPC API
rpc: {
// Login
user: process.env.BITCOIND_RPC_USER,
// Password
pass: process.env.BITCOIND_RPC_PASSWORD,
// IP address
host: 'bitcoind',
// TCP port
port: 28256
},
// ZMQ Tx notifications
zmqTx: 'tcp://bitcoind:9501',
// ZMQ Block notifications
zmqBlk: 'tcp://bitcoind:9502',
// Fee type (estimatesmartfee)
feeType: process.env.NODE_FEE_TYPE
},
/*
* MySQL database
*/
db: {
// User
user: process.env.MYSQL_USER,
// Password
pass: process.env.MYSQL_PASSWORD,
// IP address
host: 'db',
// TCP port
port: 3306,
// Db name
database: process.env.MYSQL_DATABASE,
// Timeout
acquireTimeout: 15000,
// Max number of concurrent connections
// for each module
connectionLimitApi: 50,
connectionLimitTracker: 10,
connectionLimitPushTxApi: 5,
connectionLimitPushTxOrchestrator: 5
},
/*
* TCP Ports
*/
ports: {
// Port used by the API
account: 8080,
// Port used by pushtx
pushtx: 8081,
// Port used by the tracker for its notifications
tracker: 5555,
// Port used by pushtx for its notifications
notifpushtx: 5556,
// Port used by the pushtx orchestrator for its notifications
orchestrator: 5557
},
/*
* HTTPS
* Activate only if node js is used as frontend web server
* (no nginx proxy server)
*/
https: {
// HTTPS for the API
account: {
// Activate https
active: false,
// Filepath of server private key
// (shoud be stored in keys/sslcert)
keypath: '',
// Passphrase of the private key
passphrase: '',
// Filepath of server certificate
// (shoud be stored in keys/sslcert)
certpath: '',
// Filepath of CA certificate
// (shoud be stored in keys/sslcert)
capath: ''
},
// HTTPS for pushtx
pushtx: {
// Activate https
active: false,
// Filepath of server private key
// (shoud be stored in keys/sslcert)
keypath: '',
// Passphrase of the private key
passphrase: '',
// Filepath of server certificate
// (shoud be stored in keys/sslcert)
certpath: '',
// Filepath of CA certificate
// (shoud be stored in keys/sslcert)
capath: ''
}
},
/*
* Authenticated access to the APIs (account & pushtx)
*/
auth: {
// Name of the authentication strategy used
// Available values:
// null : No authentication
// 'localApiKey' : authentication with a shared local api key
activeStrategy: 'localApiKey',
// Flag indicating if authenticated access is mandatory
// (useful for launch, othewise should be true)
// @todo Set to true !!!
mandatory: true,
// List of available authentication strategies
strategies: {
// Authentication with a shared local api key
localApiKey: {
// List of API keys (alphanumeric characters)
apiKeys: [process.env.NODE_API_KEY],
// Admin key (alphanumeric characters)
adminKey: process.env.NODE_ADMIN_KEY,
// DO NOT MODIFY
configurator: 'localapikey-strategy-configurator'
}
},
// Configuration of Json Web Tokens
// used for the management of authorizations
jwt: {
// Secret passphrase used by the server to sign the jwt
// (alphanumeric characters)
secret: process.env.NODE_JWT_SECRET,
accessToken: {
// Number of seconds after which the jwt expires
expires: parseInt(process.env.NODE_JWT_ACCESS_EXPIRES)
},
refreshToken: {
// Number of seconds after which the jwt expires
expires: parseInt(process.env.NODE_JWT_REFRESH_EXPIRES)
}
}
},
/*
* Prefixes used by the API
* for /support and /status endpoints
*/
prefixes: {
// Prefix for /support endpoint
support: process.env.NODE_PREFIX_SUPPORT,
// Prefix for /status endpoint
status: process.env.NODE_PREFIX_STATUS,
// Prefix for pushtx /status endpoint
statusPushtx: process.env.NODE_PREFIX_STATUS_PUSHTX
},
/*
* Gaps used for derivation of keys
*/
gap: {
// Gap for derivation of external addresses
external: parseInt(process.env.NODE_GAP_EXTERNAL),
// Gap for derivation of internal (change) addresses
internal: parseInt(process.env.NODE_GAP_INTERNAL)
},
/*
* Multiaddr endpoint
*/
multiaddr: {
// Number of transactions returned by the endpoint
transactions: 50
},
/*
* Third party explorers
* used for fast scan of addresses
*/
explorers: {
// Use local bitcoind for imports and rescans
// or use OXT as a fallback
// Values: active | inactive
bitcoind: process.env.NODE_IMPORT_FROM_BITCOIND,
// Use a SOCKS5 proxy for all communications with external services
// Values: null if no socks5 proxy used, otherwise the url of the socks5 proxy
socks5Proxy: 'socks5h://172.28.1.4:9050',
// OXT
oxt: process.env.NODE_URL_OXT_API
},
/*
* Max number of transactions per address
* accepted during fast scan
*/
addrFilterThreshold: parseInt(process.env.NODE_ADDR_FILTER_THRESHOLD),
/*
* Pool of child processes
* for parallel derivation of addresses
* Be careful with these parameters ;)
*/
addrDerivationPool: {
// Min number of child processes always running
minNbChildren: parseInt(process.env.NODE_ADDR_DERIVATION_MIN_CHILD),
// Max number of child processes allowed
maxNbChildren: parseInt(process.env.NODE_ADDR_DERIVATION_MAX_CHILD),
// Max duration
acquireTimeoutMillis: 60000,
// Parallel derivation threshold
// (use parallel derivation if number of addresses to be derived
// is greater than thresholdParalleDerivation)
thresholdParallelDerivation: parseInt(process.env.NODE_ADDR_DERIVATION_THRESHOLD),
},
/*
* PushTx - Scheduler
*/
txsScheduler: {
// Max number of transactions allowed in a single script
maxNbEntries: parseInt(process.env.NODE_TXS_SCHED_MAX_ENTRIES),
// Max number of blocks allowed in the future
maxDeltaHeight: parseInt(process.env.NODE_TXS_SCHED_MAX_DELTA_HEIGHT)
},
/*
* Tracker
*/
tracker: {
// Processing of mempool (periodicity in ms)
mempoolProcessPeriod: parseInt(process.env.NODE_TRACKER_MEMPOOL_PERIOD),
// Processing of unconfirmed transactions (periodicity in ms)
unconfirmedTxsProcessPeriod: parseInt(process.env.NODE_TRACKER_UNCONF_TXS_PERIOD)
}
}
}

13
docker/my-dojo/node/restart.sh

@ -0,0 +1,13 @@
#!/bin/bash
cd /home/node/app/accounts
forever start -a -l /dev/null -o /data/logs/api-output.log -e /data/logs/api-error.log index.js
cd /home/node/app/pushtx
forever start -a -l /dev/null -o /data/logs/pushtx-output.log -e /data/logs/pushtx-error.log index.js
forever start -a -l /dev/null -o /data/logs/pushtx-orchest-output.log -e /data/logs/pushtx-orchest-error.log index-orchestrator.js
cd /home/node/app/tracker
forever start -a -l /dev/null -o /data/logs/tracker-output.log -e /data/logs/tracker-error.log index.js
forever --fifo logs 0

178
docker/my-dojo/node/wait-for-it.sh

@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
WAITFORIT_BUSYTIMEFLAG="-t"
else
WAITFORIT_ISBUSY=0
WAITFORIT_BUSYTIMEFLAG=""
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

55
docker/my-dojo/tor/Dockerfile

@ -0,0 +1,55 @@
FROM debian:stretch
ENV TOR_HOME /var/lib/tor
# Install Tor
RUN set -ex && \
apt-get update && \
apt-get install -y git libevent-dev zlib1g-dev libssl-dev gcc make automake ca-certificates autoconf musl-dev coreutils && \
mkdir -p /usr/local/src/ && \
git clone https://git.torproject.org/tor.git /usr/local/src/tor && \
cd /usr/local/src/tor && \
git checkout tor-0.3.5.8 && \
./autogen.sh && \
./configure \
--disable-asciidoc \
--sysconfdir=/etc \
--disable-unittests && \
make && make install && \
cd .. && \
rm -rf tor
# Create group & user tor
RUN addgroup --system -gid 1107 tor && \
adduser --system --ingroup tor -uid 1104 tor
# Create group & user bitcoin and add user to tor group
RUN addgroup --system -gid 1108 bitcoin && \
adduser --system --ingroup bitcoin -uid 1105 bitcoin && \
usermod -a -G tor bitcoin
# Create /etc/tor directory
RUN mkdir -p /etc/tor/ && \
chown -Rv tor:tor /etc/tor
# Create .tor subdirectory of TOR_HOME
RUN mkdir -p "$TOR_HOME/.tor" && \
chown -Rv tor:tor "$TOR_HOME" && \
chmod -R 700 "$TOR_HOME"
# Copy Tor configuration file
COPY ./torrc /etc/tor/torrc
RUN chown tor:tor /etc/tor/torrc
# Copy wait-for-it script
COPY ./wait-for-it.sh /wait-for-it.sh
RUN chown tor:tor /wait-for-it.sh && \
chmod u+x /wait-for-it.sh && \
chmod g+x /wait-for-it.sh
# Expose socks port
EXPOSE 9050
# Switch to user tor
USER tor

49
docker/my-dojo/tor/torrc

@ -0,0 +1,49 @@
## Tor opens a socks proxy on port 9050 by default -- even if you don't
## configure one below. Set "SocksPort 0" if you plan to run Tor only
## as a relay, and not make any local application connections yourself.
# Socks is only available from dojonet
SocksPort 172.28.1.4:9050
## Entry policies to allow/deny SOCKS requests based on IP address.
## First entry that matches wins. If no SocksPolicy is set, we accept
## all (and only) requests that reach a SocksPort. Untrusted users who
## can access your SocksPort may be able to learn about the connections
## you make.
# Socks is only available from dojonet
SocksPolicy accept 172.28.0.0/16
SocksPolicy reject *
## The directory for keeping all the keys/etc. By default, we store
## things in $HOME/.tor on Unix, and in Application Data\tor on Windows.
DataDirectory /var/lib/tor/.tor
## The port on which Tor will listen for local connections from Tor
## controller applications, as documented in control-spec.txt.
ControlPort 9051
## If you enable the controlport, be sure to enable one of these
## authentication methods, to prevent attackers from accessing it.
CookieAuthentication 1
CookieAuthFileGroupReadable 1
############### This section is just for location-hidden services ###
## Once you have configured a hidden service, you can look at the
## contents of the file ".../hidden_service/hostname" for the address
## to tell people.
## HiddenServicePort x y:z says to redirect requests on port x to the
## address y:z.
HiddenServiceDir /var/lib/tor/hsv2dojo
HiddenServiceVersion 2
HiddenServicePort 80 172.29.1.3:80
HiddenServiceDir /var/lib/tor/hsv3dojo
HiddenServiceVersion 3
HiddenServicePort 80 172.29.1.3:80

178
docker/my-dojo/tor/wait-for-it.sh

@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
WAITFORIT_BUSYTIMEFLAG="-t"
else
WAITFORIT_ISBUSY=0
WAITFORIT_BUSYTIMEFLAG=""
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

349
keys/index-example.js

@ -0,0 +1,349 @@
/*!
* keys/index-example.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
/**
* Desired structure of /keys/index.js, which is ignored in the repository.
*/
module.exports = {
/*
* Mainnet parameters
*/
bitcoin: {
/*
* Dojo version
*/
dojoVersion: '1.0.0',
/*
* Bitcoind
*/
bitcoind: {
// RPC API
rpc: {
// Login
user: 'user',
// Password
pass: 'password',
// IP address
host: '127.0.0.1',
// TCP port
port: 8332
},
// ZMQ Tx notifications
zmqTx: 'tcp://127.0.0.1:9501',
// ZMQ Block notifications
zmqBlk: 'tcp://127.0.0.1:9502',
// Fee type (estimatesmartfee)
feeType: 'ECONOMICAL'
},
/*
* MySQL database
*/
db: {
// User
user: 'user',
// Password
pass: 'password',
// IP address
host: '127.0.0.1',
// TCP port
port: 3306,
// Db name
database: 'db_name',
// Timeout
acquireTimeout: 15000,
// Max number of concurrent connections
// for each module
connectionLimitApi: 50,
connectionLimitTracker: 10,
connectionLimitPushTxApi: 5,
connectionLimitPushTxOrchestrator: 5
},
/*
* TCP Ports
*/
ports: {
// Port used by the API
account: 8080,
// Port used by pushtx
pushtx: 8081,
// Port used by the tracker for its notifications
tracker: 5555,
// Port used by pushtx for its notifications
notifpushtx: 5556,
// Port used by the pushtx orchestrator for its notifications
orchestrator: 5557
},
/*
* HTTPS
* Activate only if node js is used as frontend web server
* (no nginx proxy server)
*/
https: {
// HTTPS for the API
account: {
// Activate https
active: false,
// Filepath of server private key
// (shoud be stored in keys/sslcert)
keypath: '',
// Passphrase of the private key
passphrase: '',
// Filepath of server certificate
// (shoud be stored in keys/sslcert)
certpath: '',
// Filepath of CA certificate
// (shoud be stored in keys/sslcert)
capath: ''
},
// HTTPS for pushtx
pushtx: {
// Activate https
active: false,
// Filepath of server private key
// (shoud be stored in keys/sslcert)
keypath: '',
// Passphrase of the private key
passphrase: '',
// Filepath of server certificate
// (shoud be stored in keys/sslcert)
certpath: '',
// Filepath of CA certificate
// (shoud be stored in keys/sslcert)
capath: ''
}
},
/*
* Authenticated access to the APIs (account & pushtx)
*/
auth: {
// Name of the authentication strategy used
// Available values:
// null : No authentication
// 'localApiKey' : authentication with a shared local api key
activeStrategy: 'localApiKey',
// Flag indicating if authenticated access is mandatory
// (useful for launch, othewise should be true)
mandatory: false,
// List of available authentication strategies
strategies: {
// Authentication with a shared local api key
localApiKey: {
// List of API keys (alphanumeric characters)
apiKeys: ['<myApiKey>', '<myApiKey2>'],
// Admin key (alphanumeric characters)
adminKey: '<myAdminKey>',
// DO NOT MODIFY
configurator: 'localapikey-strategy-configurator'
}
},
// Configuration of Json Web Tokens
// used for the management of authorizations
jwt: {
// Secret passphrase used by the server to sign the jwt
// (alphanumeric characters)
secret: '<my_secret>',
accessToken: {
// Number of seconds after which the jwt expires
expires: 600
},
refreshToken: {
// Number of seconds after which the jwt expires
expires: 7200
}
}
},
/*
* Prefixes used by the API
* for /support and /status endpoints
*/
prefixes: {
// Prefix for /support endpoint
support: 'support',
// Prefix for /status endpoint
status: 'status',
// Prefix for pushtx /status endpoint
statusPushtx: 'status'
},
/*
* Gaps used for derivation of keys
*/
gap: {
// Gap for derivation of external addresses
external: 20,
// Gap for derivation of internal (change) addresses
internal: 20
},
/*
* Multiaddr endpoint
*/
multiaddr: {
// Number of transactions returned by the endpoint
transactions: 50
},
/*
* Third party explorers
* used for fast scan of addresses
*/
explorers: {
// Use local bitcoind for imports and rescans
// or use OXT as a fallback
// Values: active | inactive
bitcoind: 'active',
// Use a SOCKS5 proxy for all communications with external services
// Values: null if no socks5 proxy used, otherwise the url of the socks5 proxy
socks5Proxy: null,
// OXT
oxt: 'https://api.oxt.me'
},
/*
* Max number of transactions per address
* accepted during fast scan
*/
addrFilterThreshold: 1000,
/*
* Pool of child processes
* for parallel derivation of addresses
* Be careful with these parameters ;)
*/
addrDerivationPool: {
// Min number of child processes always running
minNbChildren: 2,
// Max number of child processes allowed
maxNbChildren: 2,
// Max duration
acquireTimeoutMillis: 60000,
// Parallel derivation threshold
// (use parallel derivation if number of addresses to be derived
// is greater than thresholdParalleDerivation)
thresholdParallelDerivation: 10
},
/*
* PushTx - Scheduler
*/
txsScheduler: {
// Max number of transactions allowed in a single script
maxNbEntries: 10,
// Max number of blocks allowed in the future
maxDeltaHeight: 18
},
/*
* Tracker
*/
tracker: {
// Processing of mempool (periodicity in ms)
mempoolProcessPeriod: 2000,
// Processing of unconfirmed transactions (periodicity in ms)
unconfirmedTxsProcessPeriod: 300000
}
},
/*
* Testnet parameters
*/
testnet: {
bitcoind: {
rpc: {
user: 'user',
pass: 'password',
host: '127.0.0.1',
port: 18332
},
zmqTx: 'tcp://127.0.0.1:19501',
zmqBlk: 'tcp://127.0.0.1:19502',
feeType: 'ECONOMICAL'
},
db: {
user: 'user',
pass: 'password',
host: '127.0.0.1',
port: 3306,
database: 'db_name',
acquireTimeout: 15000,
connectionLimitApi: 5,
connectionLimitTracker: 5,
connectionLimitPushTxApi: 1,
connectionLimitPushTxOrchestrator: 5
},
ports: {
account: 18080,
pushtx: 18081,
tracker: 15555,
notifpushtx: 15556,
orchestrator: 15557
},
https: {
account: {
active: false,
keypath: '',
passphrase: '',
certpath: '',
capath: ''
},
pushtx: {
active: false,
keypath: '',
passphrase: '',
certpath: '',
capath: ''
}
},
auth: {
activeStrategy: null,
mandatory: false,
strategies: {
localApiKey: {
apiKeys: ['<myApiKey>', '<myApiKey2>'],
adminKey: '<myAdminKey>',
configurator: 'localapikey-strategy-configurator'
}
},
jwt: {
secret: 'myJwtSecret',
accessToken: {
expires: 600
},
refreshToken: {
expires: 7200
}
}
},
prefixes: {
support: 'support',
status: 'status',
statusPushtx: 'status'
},
gap: {
external: 20,
internal: 20
},
multiaddr: {
transactions: 50
},
explorers: {
bitcoind: 'inactive',
socks5Proxy: null,
insight: [
'https://testnet-api.example.com'
],
btccom: 'https://tchain.api.btc.com/v3'
},
addrFilterThreshold: 1000,
addrDerivationPool: {
minNbChildren: 1,
maxNbChildren: 1,
acquireTimeoutMillis: 60000,
thresholdParallelDerivation: 10
},
txsScheduler: {
maxNbEntries: 10,
maxDeltaHeight: 18
},
tracker: {
mempoolProcessPeriod: 2000,
unconfirmedTxsProcessPeriod: 300000
}
}
}

106
lib/auth/auth-rest-api.js

@ -0,0 +1,106 @@
/*!
* lib/auth/auth-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bodyParser = require('body-parser')
const passport = require('passport')
const network = require('../bitcoin/network')
const keys = require('../../keys/')[network.key]
const HttpServer = require('../http-server/http-server')
const authentMgr = require('./authentication-manager')
const authorzMgr = require('./authorizations-manager')
/**
* Auth API endpoints
*/
class AuthRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
// Initialize passport
this.httpServer.app.use(passport.initialize())
// Check if authentication is activated
if (keys.auth.activeStrategy == null)
return
// Establish routes
const urlencodedParser = bodyParser.urlencoded({ extended: true })
this.httpServer.app.post(
'/auth/login',
urlencodedParser,
authentMgr.authenticate({session: false}),
authentMgr.serialize,
authorzMgr.generateAuthorizations.bind(authorzMgr),
this.login.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/auth/logout',
urlencodedParser,
authorzMgr.revokeAuthorizations.bind(authorzMgr),
this.logout.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/auth/refresh',
urlencodedParser,
authorzMgr.refreshAuthorizations.bind(authorzMgr),
this.refresh.bind(this),
HttpServer.sendAuthError
)
}
/**
* Login
* @param {object} req - http request object
* @param {object} res - http response object
*/
login(req, res) {
try {
const result = {authorizations: req.authorizations}
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
}
}
/**
* Refresh
* @param {object} req - http request object
* @param {object} res - http response object
*/
refresh(req, res) {
try {
const result = {authorizations: req.authorizations}
const ret = JSON.stringify(result, null, 2)
HttpServer.sendRawData(res, ret)
} catch(e) {
HttpServer.sendError(res, e)
}
}
/**
* Logout
* @param {object} req - http request object
* @param {object} res - http response object
*/
logout(req, res) {
HttpServer.sendOk(res)
}
}
module.exports = AuthRestApi

77
lib/auth/authentication-manager.js

@ -0,0 +1,77 @@
/*!
* lib/auth/authentication-manager.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const passport = require('passport')
const network = require('../bitcoin/network')
const keys = require('../../keys/')[network.key]
const errors = require('../errors')
const Logger = require('../logger')
/**
* A singleton managing the authentication to the API
*/
class AuthenticationManager {
/**
* Constructor
*/
constructor() {
this.activeStrategyName = ''
this.activeStrategy = null
// Configure the authentication strategy
this._configureStrategy()
}
/**
* Configure the active strategy
*/
_configureStrategy() {
if (keys.auth.activeStrategy) {
this.activeStrategyName = keys.auth.activeStrategy
try {
const configuratorName = keys.auth.strategies[this.activeStrategyName].configurator
const Configurator= require(`./${configuratorName}`)
if (Configurator) {
this.activeStrategy = new Configurator()
this.activeStrategy.configure()
Logger.info(`Authentication strategy ${this.activeStrategyName} successfully configured`)
}
} catch(e) {
Logger.error(e, errors.auth.INVALID_CONF)
}
}
}
/**
* Authenticate a user
* @param {Object} options
*/
authenticate(options) {
return passport.authenticate(this.activeStrategyName, options)
}
/**
* Serialize user's information
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
*/
serialize(req, res, next) {
if (req.user == null)
req.user = {}
req.user['authenticated'] = true
next()
}
}
module.exports = new AuthenticationManager()

296
lib/auth/authorizations-manager.js

@ -0,0 +1,296 @@
/*!
* lib/auth/authorizations-manager.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const validator = require('validator')
const jwt = require('jsonwebtoken')
const network = require('../bitcoin/network')
const keys = require('../../keys/')[network.key]
const errors = require('../errors')
const Logger = require('../logger')
/**
* A singleton managing authorizations the API
*/
class AuthorizationsManager {
/**
* Constructor
*/
constructor() {
try {
// Constants
this.ISS = 'Samourai Wallet backend'
this.TOKEN_TYPE_ACCESS = 'access-token'
this.TOKEN_TYPE_REFRESH = 'refresh-token'
this.TOKEN_PROFILE_API = 'api'
this.TOKEN_PROFILE_ADMIN = 'admin'
this.authActive = (keys.auth.activeStrategy != null)
this._secret = keys.auth.jwt.secret
this.isMandatory = keys.auth.mandatory
this.accessTokenExpires = keys.auth.jwt.accessToken.expires
this.refreshTokenExpires = keys.auth.jwt.refreshToken.expires
} catch(e) {
this._secret = null
Logger.error(e, errors.auth.INVALID_CONF)
}
}
/**
* Middleware generating authorization token
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
*/
generateAuthorizations(req, res, next) {
if (!(req.user && req.user.authenticated))
return next(errors.auth.TECH_ISSUE)
// Generates an access token
const accessToken = this._generateAccessToken(req.user)
// Generates a refresh token
const refreshToken = this._generateRefreshToken(req.user)
// Stores the tokens in the request
req.authorizations = {
access_token: accessToken,
refresh_token: refreshToken
}
next()
}
/**
* Middleware refreshing authorizations
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
*/
refreshAuthorizations(req, res, next) {
// Check if authentication is activated
if (!this.authActive)
return next()
// Authentication is activated
// A refresh token is required
const refreshToken = this._extractRefreshToken(req)
if (!refreshToken)
return next(errors.auth.MISSING_JWT)
try {
const decodedRefrehToken = this._verifyRefreshToken(refreshToken)
if (req.user == null)
req.user = {}
req.user['profile'] = decodedRefrehToken['prf']
} catch(e) {
Logger.error(e, `${errors.auth.INVALID_JWT}: ${refreshToken}`)
return next(errors.auth.INVALID_JWT)
}
// Generates a new access token
const accessToken = this._generateAccessToken(req.user)
// Stores the access token in the request
req.authorizations = {
access_token: accessToken
}
next()
}
/**
* Middleware revoking authorizations
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
*/
revokeAuthorizations(req, res, next) {
// Nothing to do (for now)
}
/**
* Middleware checking if user is authenticated
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
* @returns {boolean} returns true if user is authenticated, false otherwise
*/
checkAuthentication(req, res, next) {
// Check if authentication is activated
if (!this.authActive)
return next()
// Authentication is activated
// A JSON web token is required
const token = this._extractAccessToken(req)
if (this.isMandatory || token) {
try {
const decodedToken = this.isAuthenticated(token)
req.authorizations = {decoded_access_token: decodedToken}
next()
} catch (e) {
return next(e)
}
} else {
next()
}
}
/**
* Middleware checking if user is authenticated and has admin profile
* @param {Object} req - http request object
* @param {Object} res - http response object
* @param {function} next - callback
* @returns {boolean} returns true if user is authenticated and has admin profile, false otherwise
*/
checkHasAdminProfile(req, res, next) {
// Check if authentication is activated
if (!this.authActive)
return next()
// Authentication is activated
// A JSON web token is required
const token = this._extractAccessToken(req)
try {
const decodedToken = this.isAuthenticated(token)
if (decodedToken['prf'] == this.TOKEN_PROFILE_ADMIN) {
req.authorizations = {decoded_access_token: decodedToken}
next()
} else {
return next(errors.auth.INVALID_PRF)
}
} catch (e) {
return next(e)
}
}
/**
* Check if user is authenticated
* (i.e. we have received a valid json web token)
* @param {string} token - json web token
* @returns {boolean} returns the decoded token if valid
* throws an exception otherwise
*/
isAuthenticated(token) {
if (!token) {
Logger.error(null, `${errors.auth.MISSING_JWT}`)
throw errors.auth.MISSING_JWT
}
try {
return this._verifyAccessToken(token)
} catch(e) {
//Logger.error(e, `${errors.auth.INVALID_JWT}: ${token}`)
throw errors.auth.INVALID_JWT
}
}
/**
* Generate an access token
* @param {Object} user - user's information
* @returns {Object} returns a json web token
*/
_generateAccessToken(user) {
// Builds claims
const claims = {
'iss': this.ISS,
'type': this.TOKEN_TYPE_ACCESS,
'prf': user['profile']
}
// Builds and signs the access token
return jwt.sign(
claims,
this._secret,
{expiresIn: this.accessTokenExpires}
)
}
/**
* Extract the access token from the http request
* @param {Object} req - http request object
* @returns {Object} returns the json web token
*/
_extractAccessToken(req) {
if (req.body && req.body.at && validator.isJWT(req.body.at))
return req.body.at
if (req.query && req.query.at && validator.isJWT(req.query.at))
return req.query.at
return null
}
/**
* Verify an access token
* @param {Object} token - json web token
* @returns {Object} payload of the json web token
*/
_verifyAccessToken(token) {
const payload = jwt.verify(token, this._secret, {})
if (payload['type'] != this.TOKEN_TYPE_ACCESS)
throw errors.auth.INVALID_JWT
return payload
}
/**
* Generate an refresh token
* @param {Object} user - user's information
* @returns {Object} returns a json web token
*/
_generateRefreshToken(user) {
// Builds claims
const claims = {
'iss': this.ISS,
'type': this.TOKEN_TYPE_REFRESH,
'prf': user['profile']
}
// Builds and signs the access token
return jwt.sign(
claims,
this._secret,
{expiresIn: this.refreshTokenExpires}
)
}
/**
* Extract the refresh token from the http request
* @param {Object} req - http request object
* @returns {Object} returns the json web token
*/
_extractRefreshToken(req) {
if (req.body && req.body.rt && validator.isJWT(req.body.rt))
return req.body.rt
if (req.query && req.query.rt && validator.isJWT(req.query.rt))
return req.query.rt
return null
}
/**
* Verify a refresh token
* @param {Object} token - json web token
* @returns {Object} payload of the json web token
*/
_verifyRefreshToken(token) {
const payload = jwt.verify(token, this._secret, {})
if (payload['type'] != this.TOKEN_TYPE_REFRESH)
throw errors.auth.INVALID_JWT
return payload
}
}
module.exports = new AuthorizationsManager()

62
lib/auth/localapikey-strategy-configurator.js

@ -0,0 +1,62 @@
/*!
* lib/auth/localapikey-strategy-configurator.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const passport = require('passport')
const Strategy = require('passport-localapikey-update').Strategy
const network = require('../bitcoin/network')
const keys = require('../../keys/')[network.key]
const errors = require('../errors')
const Logger = require('../logger')
const authorzMgr = require('./authorizations-manager')
/**
* A Passport configurator for a local API key strategy
*/
class LocalApiKeyStrategyConfigurator {
/**
* Constructor
*/
constructor() {}
/**
* Configure the strategy
*/
configure() {
const strategy = new Strategy({apiKeyField: 'apikey'}, this.authenticate)
passport.use(LocalApiKeyStrategyConfigurator.NAME, strategy)
}
/**
* Authentication
* @param {object} req - http request object
* @param {string} apiKey - api key received
* @param {function} done - callback
*/
authenticate(apiKey, done) {
const _adminKey = keys.auth.strategies[LocalApiKeyStrategyConfigurator.NAME].adminKey
const _apiKeys = keys.auth.strategies[LocalApiKeyStrategyConfigurator.NAME].apiKeys
if (apiKey == _adminKey) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an admin key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_ADMIN})
} else if (_apiKeys.indexOf(apiKey) >= 0) {
// Check if received key is a valid api key
Logger.info('Successful authentication with an api key')
return done(null, {'profile': authorzMgr.TOKEN_PROFILE_API})
} else {
Logger.error(null, `Authentication failure (apikey=${apiKey})`)
return done('Invalid API key', false)
}
}
}
LocalApiKeyStrategyConfigurator.NAME = 'localApiKey'
module.exports = LocalApiKeyStrategyConfigurator

106
lib/bitcoin/addresses-helper.js

@ -0,0 +1,106 @@
/*!
* lib/bitcoin/addresses-helper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
const btcMessage = require('bitcoinjs-message')
const activeNet = require('./network').network
/**
* A singleton providing Addresses helper functions
*/
class AddressesHelper {
/**
* Derives a P2PKH from a public key
* @param {Buffer} pubKeyBuffer - Buffer storing a public key
* @returns {string} return the derived address
*/
p2pkhAddress(pubKeyBuffer) {
const pubKeyHash = bitcoin.crypto.hash160(pubKeyBuffer)
return bitcoin.address.toBase58Check(pubKeyHash, activeNet.pubKeyHash)
}
/**
* Derives a P2WPKH-P2SH from a public key
* @param {Buffer} pubKeyBuffer - Buffer storing a public key
* @returns {string} return the derived address
*/
p2wpkhP2shAddress(pubKeyBuffer) {
const pubKeyHash = bitcoin.crypto.hash160(pubKeyBuffer)
const witnessProgram = bitcoin.script.witnessPubKeyHash.output.encode(pubKeyHash)
const scriptPubKey = bitcoin.crypto.hash160(witnessProgram)
const outputScript = bitcoin.script.scriptHash.output.encode(scriptPubKey)
return bitcoin.address.fromOutputScript(outputScript, activeNet)
}
/**
* Derives a P2WPKH from a public key
* @param {Buffer} pubKeyBuffer - Buffer storing a public key
* @returns {string} return the derived address
*/
p2wpkhAddress(pubKeyBuffer) {
const pubKeyHash = bitcoin.crypto.hash160(pubKeyBuffer)
const outputScript = bitcoin.script.witnessPubKeyHash.output.encode(pubKeyHash)
return bitcoin.address.fromOutputScript(outputScript, activeNet).toLowerCase()
}
/**
* Verify the signature of a given message
* @param {string} msg - signed message
* @param {string} address - address used to sign the message
* @param {string} sig - signature of the message
* @returns {boolean} retuns true if signature is valid, otherwise false
*/
verifySignature(msg, address, sig) {
try {
const prefix = activeNet.messagePrefix
return btcMessage.verify(msg, prefix, address, sig)
} catch(e) {
return false
}
}
/**
* Checks if a string seems like a supported pubkey
* @param {string} str - string
* @returns {boolean} return true if str is a supported pubkey format, false otherwise
*/
isSupportedPubKey(str) {
return (str.length == 66 && (str.startsWith('02') || str.startsWith('03')))
}
/**
* Check if string is a Bech32 address
* @param {string} str - string to be checked
* @returns {boolean} return true if str is a Bech32 address, false otherwise
*/
isBech32(str) {
try {
bitcoin.address.fromBech32(str)
return true
} catch(e) {
return false
}
}
/**
* Get the script hash associated to a Bech32 address
* @param {string} str - bech32 address
* @returns {string} script hash in hex format
*/
getScriptHashFromBech32(str) {
try {
return bitcoin.address.fromBech32(str).data.toString('hex')
} catch(e) {
Logger.error(e, 'AddressesHelper.getScriptHashFromBech32()')
return null
}
}
}
module.exports = new AddressesHelper()

44
lib/bitcoin/addresses-service.js

@ -0,0 +1,44 @@
/*!
* lib/bitcoin/addresses-service.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const db = require('../db/mysql-db-wrapper')
const remote = require('../remote-importer/remote-importer')
/**
* A singleton providing an Adresses service
*/
class AddressesService {
/**
* Constructor
*/
constructor() {}
/**
* Rescan the blockchain for an address
* @param {string} address - bitcoin address
* @returns {Promise}
*/
async rescan(address) {
const hdaccount = await db.getUngroupedHDAccountsByAddresses([address])
// Don't filter addresses associated to an HDAccount
const filterAddr = !(hdaccount.length > 0 && hdaccount[0]['hdID'])
return remote.importAddresses([address], filterAddr)
}
/**
* Restore an address in db
* @param {string[]} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - true if addresses should be filter, false otherwise
* @returns {Promise}
*/
async restoreAddresses(address, filterAddr) {
return remote.importAddresses(address, filterAddr)
}
}
module.exports = new AddressesService()

400
lib/bitcoin/hd-accounts-helper.js

@ -0,0 +1,400 @@
/*!
* lib/bitcoin/hd-accounts-helper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const cp = require('child_process')
const LRU = require('lru-cache')
const bitcoin = require('bitcoinjs-lib')
const bs58check = require('bs58check')
const bs58 = require('bs58')
const errors = require('../errors')
const Logger = require('../logger')
const ForkPool = require('../fork-pool')
const network = require('./network')
const activeNet = network.network
const keys = require('../../keys/')[network.key]
const addrHelper = require('./addresses-helper')
/**
* A singleton providing HD Accounts helper functions
*/
class HDAccountsHelper {
/**
* Constructor
*/
constructor() {
// HD accounts types
this.BIP44 = 0
this.BIP49 = 1
this.BIP84 = 2
this.LOCKED = 1<<7
// Magic numbers
this.MAGIC_XPUB = 0x0488b21e
this.MAGIC_TPUB = 0x043587cf
this.MAGIC_YPUB = 0x049d7cb2
this.MAGIC_UPUB = 0x044a5262
this.MAGIC_ZPUB = 0x04b24746
this.MAGIC_VPUB = 0x045f1cf6
// HD accounts cache
this.nodes = LRU({
// Maximum number of nodes to store in cache
max: 1000,
// Function used to compute length of item
length: (n, key) => 1,
// Maximum age for items in the cache. Items do not expire
maxAge: Infinity
})
// Default = external addresses derivation deactivated
this.externalDerivationActivated = false
this.derivationPool = null
}
/**
* Activate external derivation of addresses
* (provides improved performances)
*/
activateExternalDerivation() {
// Pool of child processes used for derivation of addresses
const poolKeys = keys.addrDerivationPool
this.derivationPool = new ForkPool(
`${__dirname}/parallel-address-derivation.js`,
{
networkKey: network.key,
max: poolKeys.maxNbChildren,
min: poolKeys.minNbChildren,
acquireTimeoutMillis: poolKeys.acquireTimeoutMillis
}
)
this.externalDerivationActivated = true
}
/**
* Check if a string encodes a xpub/tpub
* @param {string} xpub - extended public key to be checked
* @returns {boolean} returns true if xpub encodes a xpub/tpub, false otherwise
*/
isXpub(xpub) {
return (xpub.indexOf('xpub') == 0) || (xpub.indexOf('tpub') == 0)
}
/**
* Check if a string encodes a ypub/upub
* @param {string} xpub - extended public key to be checked
* @returns {boolean} returns true if xpub encodes a ypub/upub, false otherwise
*/
isYpub(xpub) {
return (xpub.indexOf('ypub') == 0) || (xpub.indexOf('upub') == 0)
}
/**
* Check if a string encodes a zpub/vpub
* @param {string} xpub - extended public key to be checked
* @returns {boolean} returns true if xpub encodes a zpub/vpub, false otherwise
*/
isZpub(xpub) {
return (xpub.indexOf('zpub') == 0) || (xpub.indexOf('vpub') == 0)
}
/**
* Translates
* - a xpub/ypub/zpub into a xpub
* - a tpub/upub/vpub into a tpub
* @param {string} xpub - extended public key to be translated
* @returns {boolean} returns the translated extended public key
*/
xlatXPUB(xpub) {
const decoded = bs58check.decode(xpub)
const ver = decoded.readInt32BE()
if (
ver != this.MAGIC_XPUB
&& ver != this.MAGIC_TPUB
&& ver != this.MAGIC_YPUB
&& ver != this.MAGIC_UPUB
&& ver != this.MAGIC_ZPUB
&& ver != this.MAGIC_VPUB
) {
//Logger.error(null, 'HdAccountsHelper.xlatXPUB() : Incorrect format')
return ''
}
let xlatVer = 0
switch(ver) {
case this.MAGIC_XPUB:
return xpub
break
case this.MAGIC_YPUB:
xlatVer = this.MAGIC_XPUB
break
case this.MAGIC_ZPUB:
xlatVer = this.MAGIC_XPUB
break
case this.MAGIC_TPUB:
return xpub
break
case this.MAGIC_UPUB:
xlatVer = this.MAGIC_TPUB
break
case this.MAGIC_VPUB:
xlatVer = this.MAGIC_TPUB
break
}
let b = Buffer.alloc(4)
b.writeInt32BE(xlatVer)
decoded.writeInt32BE(xlatVer, 0)
const checksum = bitcoin.crypto.hash256(decoded).slice(0, 4)
const xlatXpub = Buffer.alloc(decoded.length + checksum.length)
decoded.copy(xlatXpub, 0, 0, decoded.length)
checksum.copy(xlatXpub, xlatXpub.length - 4, 0, checksum.length)
const encoded = bs58.encode(xlatXpub)
return encoded
}
/**
* Classify the hd account type retrieved from db
* @param {integer} v - HD Account type (db encoding)
* @returns {object} object storing the type and lock status of the hd account
*/
classify(v) {
const ret = {
type: null,
locked: false,
}
let p = v
if (p >= this.LOCKED) {
ret.locked = true
p -= this.LOCKED
}
switch (p) {
case this.BIP44:
case this.BIP49:
case this.BIP84:
ret.type = p
break
}
return ret
}
/**
* Encode hd account type and lock status in db format
* @param {integer} type - HD Account type (db encoding)
* @param {boolean} locked - lock status of the hd account
* @returns {integer}
*/
makeType(type, locked) {
let p =
(type >= this.LOCKED)
? type - this.LOCKED
: type
locked = !!locked
if (locked)
p += this.LOCKED
return p
}
/**
* Return a string representation of the hd account type
* @param {integer} v - HD Account type (db encoding)
* @returns {string}
*/
typeString(v) {
const info = this.classify(v)
const prefix = info.locked ? 'LOCKED ' : ''
let suffix = ''
switch (info.type) {
case this.BIP44:
suffix = 'BIP44'
break
case this.BIP49:
suffix = 'BIP49'
break
case this.BIP84:
suffix = 'BIP84'
break
default:
suffix = 'UNKNOWN'
break
}
return prefix + suffix
}
/**
* Checks if a hd account is a valid hdnode
* @param {string} xpub - hd account
* @returns {boolean} returns true if hd account is valid, false otherwise
*/
isValid(xpub) {
if (this.nodes.has(xpub))
return true
try {
// Translate the xpub
const xlatedXpub = this.xlatXPUB(xpub)
// Parse input as an HD Node. Throws if invalid
const node = bitcoin.HDNode.fromBase58(xlatedXpub, activeNet)
// Check and see if this is a private key
if (!node.isNeutered())
throw errors.xpub.PRIVKEY
// Store the external and internal chain nodes in the proper indices.
// Store the parent node as well, at index 2.
this.nodes.set(xpub, [node.derive(0), node.derive(1), node])
return true
} catch(e) {
if (e == errors.xpub.PRIVKEY) throw e
return false
}
}
/**
* Get the hd node associated to an hd account
* @param {string} xpub - hd account
* @returns {HDNode}
*/
getNode(xpub) {
if (this.isValid(xpub))
return this.nodes.get(xpub)
else
return null
}
/**
* Derives an address for an hd account
* @param {int} chain - chain to be derived
* must have a value on [0,1] for BIP44/BIP49/BIP84 derivation
* @param {HDNode} chainNode - Parent HDNode used for derivation
* @param {int} index - index to be derived
* @param {int} type - type of derivation
* @returns {Promise - object} returns an object {address: '...', chain: <int>, index: <int>}
*/
async deriveAddress(chain, chainNode, index, type) {
// Derive M/chain/index
const indexNode = chainNode.derive(index)
const addr = {
chain: chain,
index: index
}
switch (type) {
case this.BIP44:
addr.address = indexNode.getAddress()
break
case this.BIP49:
addr.address = addrHelper.p2wpkhP2shAddress(indexNode.getPublicKeyBuffer())
break
case this.BIP84:
addr.address = addrHelper.p2wpkhAddress(indexNode.getPublicKeyBuffer())
break
}
return addr
}
/**
* Derives addresses for an hd account
* @param {string} xpub - hd account to be derived
* @param {int} chain - chain to be derived
* must have a value on [0,1] for BIP44/BIP49/BIP84 derivation
* @param {int[]} indices - array of indices to be derived
* @param {int} type - type of derivation
* @returns {Promise - object[]} array of {address: '...', chain: <int>, index: <int>}
*/
async deriveAddresses(xpub, chain, indices, type) {
const ret = []
try {
const node = this.getNode(xpub)
if (node === null)
throw errors.xpub.INVALID
if (chain > 1 || chain < 0)
throw errors.xpub.CHAIN
if (typeof type == 'undefined')
type = this.makeType(this.BIP44, false)
const info = this.classify(type)
// Node at M/chain
const chainNode = node[chain]
// Optimization: if number of addresses beyond a given treshold
// derivation is done in a child process
if (
!this.externalDerivationActivated
|| indices.length <= keys.addrDerivationPool.thresholdParallelDerivation
) {
// Few addresses to be derived or external derivation deactivated
// Let's do it here
let promises = indices.map(index => {
return this.deriveAddress(chain, chainNode, index, info.type)
})
return Promise.all(promises)
} else {
// Many addresses to be derived
// Let's do it in a child process
return new Promise(async (resolve, reject) => {
try {
const data = {
xpub: this.xlatXPUB(xpub),
chain: chain,
indices: indices,
type: info.type
}
const msg = await this.derivationPool.enqueue(data)
if (msg.status = 'ok') {
resolve(msg.addresses)
} else {
Logger.error(null, 'A problem was met during parallel addresses derivation')
reject()
}
} catch(e) {
Logger.error(e, 'A problem was met during parallel addresses derivation')
reject(e)
}
})
}
} catch(e) {
return Promise.reject(e)
}
}
}
module.exports = new HDAccountsHelper()

250
lib/bitcoin/hd-accounts-service.js

@ -0,0 +1,250 @@
/*!
* lib/bitcoin/hd-accounts-service.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const _ = require('lodash')
const errors = require('../errors')
const Logger = require('../logger')
const db = require('../db/mysql-db-wrapper')
const network = require('../bitcoin/network')
const gap = require('../../keys/')[network.key].gap
const remote = require('../remote-importer/remote-importer')
const hdaHelper = require('./hd-accounts-helper')
const addrHelper = require('./addresses-helper')
/**
* A singleton providing a HD Accounts service
*/
class HDAccountsService {
/**
* Constructor
*/
constructor() {}
/**
* Create a new hd account in db
* @param {string} xpub - xpub
* @param {int} scheme - derivation scheme
* @returns {Promise} returns true if success, false otherwise
*/
async createHdAccount(xpub, scheme) {
try {
await this.newHdAccount(xpub, scheme)
return true
} catch(e) {
const isInvalidXpub = (e == errors.xpub.INVALID || e == errors.xpub.PRIVKEY)
const isLockedXpub = (e == errors.xpub.LOCKED)
const err = (isInvalidXpub || isLockedXpub) ? e : errors.xpub.CREATE
Logger.error(e, 'HdAccountsService.createHdAccount()' + err)
return Promise.reject(err)
}
}
/**
* Restore a hd account in db
* @param {string} xpub - xpub
* @param {int} scheme - derivation scheme
* @param {bool} forceOverride - force override of scheme even if hd account is locked
* @returns {Promise}
*/
async restoreHdAccount(xpub, scheme, forceOverride) {
let isLocked
// Check if hd accounts exists in db and is locked
try {
const account = await db.getHDAccount(xpub)
const info = hdaHelper.classify(account.hdType)
isLocked = info.locked
} catch(e) {}
// Override derivation scheme if needed
await this.derivationOverrideCheck(xpub, scheme, forceOverride)
// Import the hd account
await remote.importHDAccount(xpub, scheme)
// Lock the hd account if needed
if (isLocked)
return this.lockHdAccount(xpub, true)
}
/**
* Lock a hd account
* @param {string} xpub - xpub
* @param {boolean} lock - true for locking, false for unlocking
* @returns {Promise} returns the derivation type as a string
*/
async lockHdAccount(xpub, lock) {
try {
const account = await db.getHDAccount(xpub)
const hdType = account.hdType
const info = hdaHelper.classify(hdType)
if (info.locked === lock)
return hdaHelper.typeString(hdType)
await db.setLockHDAccountType(xpub, lock)
const type = hdaHelper.makeType(hdType, lock)
return hdaHelper.typeString(type)
} catch(e) {
const err = (e == errors.db.ERROR_NO_HD_ACCOUNT) ? errors.get.UNKNXPUB : errors.generic.DB
return Promise.reject(err)
}
}
/**
* Delete a hd account
* @param {string} xpub - xpub
* @returns {Promise}
*/
async deleteHdAccount(xpub) {
try {
await db.deleteHDAccount(xpub)
} catch(e) {
const err = (e == errors.db.ERROR_NO_HD_ACCOUNT) ? errors.get.UNKNXPUB : errors.generic.DB
return Promise.reject(err)
}
}
/**
* Create a new xpub in db
* @param {string} xpub - xpub
* @param {string} scheme - derivation scheme
* @returns {Promise}
*/
async newHdAccount(xpub, scheme) {
// Get the HDNode bitcoinjs object.
// Throws if xpub is actually a private key
const HDNode = hdaHelper.getNode(xpub)
if (HDNode === null)
throw errors.xpub.INVALID
await this.derivationOverrideCheck(xpub, scheme)
await db.ensureHDAccountId(xpub, scheme)
let segwit = ''
if (scheme == hdaHelper.BIP49)
segwit = ' SegWit (BIP49)'
else if (scheme == hdaHelper.BIP84)
segwit = ' SegWit (BIP84)'
Logger.info(`Created HD Account: ${xpub}${segwit}`)
const externalPrm = hdaHelper.deriveAddresses(xpub, 0, _.range(gap.external), scheme)
const internalPrm = hdaHelper.deriveAddresses(xpub, 1, _.range(gap.internal), scheme)
const external = await externalPrm
const internal = await internalPrm
const addresses = _.flatten([external, internal])
return db.addAddressesToHDAccount(xpub, addresses)
}
/**
* Rescan the blockchain for a hd account
* @param {string} xpub - xpub
* @param {integer} gapLimit - (optional) gap limit for derivation
* @param {integer} startIndex - (optional) rescan shall start from this index
* @returns {Promise}
*/
async rescan(xpub, gapLimit, startIndex) {
// Force rescan
remote.clearGuard(xpub)
try {
const account = await db.getHDAccount(xpub)
await remote.importHDAccount(xpub, account.hdType, gapLimit, startIndex)
} catch(e) {
return Promise.reject(e)
}
}
/**
* Check if we try to override an existing xpub
* Delete the old xpub from db if it's the case
* @param {string} xpub - xpub
* @param {string} scheme - derivation scheme
* @param {boolean} forceOverride - force override of scheme even if hd account is locked
* (default = false)
* @returns {Promise}
*/
async derivationOverrideCheck(xpub, scheme, forceOverride) {
let account
// Nothing to do here if hd account doesn't exist in db
try {
account = await db.getHDAccount(xpub)
} catch(e) {
return Promise.resolve()
}
try {
const info = hdaHelper.classify(account.hdType)
// If this account is already known in the database,
// check for a derivation scheme mismatch
if (info.type != scheme) {
if (info.locked && !forceOverride) {
Logger.info(`Attempted override on locked account: ${xpub}`)
return Promise.reject(errors.xpub.LOCKED)
} else {
Logger.info(`Derivation scheme override: ${xpub}`)
return db.deleteHDAccount(xpub)
}
}
} catch(e) {
Logger.error(e, 'HDAccountsService.derivationOverrideCheck()')
return Promise.reject(e)
}
}
/**
* Verify that a given message has been signed
* with the first external key of a known xpub/ypub/zpub
*
* @param {string} xpub - xpub
* @param {string} address - address used to sign the message
* @param {string} sig - signature of the message
* @param {string} msg - signed message
* @param {integer} scheme - derivation scheme to be used for the xpub
* @returns {Promise} returns the xpub if signature is valid, otherwise returns an error
*/
async verifyXpubSignature(xpub, address, sig, msg, scheme) {
// Derive addresses (P2PKH addresse used for signature + expected address)
const sigAddressRecord = await hdaHelper.deriveAddresses(xpub, 1, [0], hdaHelper.BIP44)
const sigAddress = sigAddressRecord[0].address
const expectedAddressRecord = await hdaHelper.deriveAddresses(xpub, 1, [0], scheme)
const expectedAddress = expectedAddressRecord[0].address
try {
// Check that xpub exists in db
await db.getHDAccountId(xpub)
// Check the signature
if (!addrHelper.verifySignature(msg, sigAddress, sig))
return Promise.reject(errors.sig.INVSIG)
// Check that adresses match
if (address != expectedAddress)
return Promise.reject(errors.sig.INVADDR)
// Return the corresponding xpub
return xpub
} catch(err) {
const ret = (err == errors.db.ERROR_NO_HD_ACCOUNT) ? errors.get.UNKNXPUB : errors.generic.DB
return Promise.reject(ret)
}
}
}
module.exports = new HDAccountsService()

44
lib/bitcoin/network.js

@ -0,0 +1,44 @@
/*!
* lib/bitcoin/network.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
/**
* A set of keywords encoding for testnet
*/
const TESTNET_KEY = [
'testnet',
'testing',
'test'
]
/**
* A singleton determining which network to run: bitcoin or testnet
*/
class Network {
/**
* Constructor
*/
constructor() {
this.key = 'bitcoin'
this.network = bitcoin.networks.bitcoin
for (let kw of TESTNET_KEY) {
// Calling like 'node file.js arg1 arg2'
if (process.argv.indexOf(kw) > 1) {
this.key = 'testnet'
this.network = bitcoin.networks.testnet
break
}
}
}
}
module.exports = new Network()

92
lib/bitcoin/parallel-address-derivation.js

@ -0,0 +1,92 @@
/*!
* lib/bitcoin/parallel-address-derivation.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
const errors = require('../errors')
const activeNet = require('./network').network
const addrHelper = require('./addresses-helper')
/**
* Constants duplicated from HDAccountsHelper
*/
const BIP44 = 0
const BIP49 = 1
const BIP84 = 2
/**
* Derives an address for an hd account
* @param {int} chain - chain to be derived
* must have a value on [0,1] for BIP44/BIP49/BIP84 derivation
* @param {HDNode} chainNode - Parent HDNode used for derivation
* @param {int} index - index to be derived
* @param {int} type - type of derivation
* @returns {Promise - object} returns an object {address: '...', chain: <int>, index: <int>}
*/
const deriveAddress = async function(chain, chainNode, index, type) {
// Derive M/chain/index
const indexNode = chainNode.derive(index)
const addr = {
chain: chain,
index: index
}
switch (type) {
case BIP44:
addr.address = indexNode.getAddress()
break
case BIP49:
addr.address = addrHelper.p2wpkhP2shAddress(indexNode.getPublicKeyBuffer())
break
case BIP84:
addr.address = addrHelper.p2wpkhAddress(indexNode.getPublicKeyBuffer())
break
}
return addr
}
/**
* Receive message from parent process
*/
process.on('message', async (msg) => {
try {
const xpub = msg.xpub
const chain = msg.chain
const indices = msg.indices
const type = msg.type
// Parse input as an HD Node. Throws if invalid
const node = bitcoin.HDNode.fromBase58(xpub, activeNet)
// Check and see if this is a private key
if (!node.isNeutered())
throw errors.xpub.PRIVKEY
const chainNode = node.derive(chain)
const promises = indices.map(index => {
return deriveAddress(chain, chainNode, index, type)
})
const addresses = await Promise.all(promises)
// Send response to parent process
process.send({
status: 'ok',
addresses: addresses
})
} catch(e) {
process.send({
status: 'error',
addresses: [],
error: e
})
}
})

71
lib/bitcoind-rpc/fees.js

@ -0,0 +1,71 @@
/*!
* lib/bitcoind-rpc/fees.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const util = require('../util')
const errors = require('../errors')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const RpcClient = require('./rpc-client')
const latestBlock = require('./latest-block')
/**
* A singleton providing information about network fees
*/
class Fees {
/**
* Constructor
*/
constructor() {
this.block = -1
this.targets = [2, 4, 6, 12, 24]
this.fees = {}
this.feeType = keys.bitcoind.feeType
this.rpcClient = new RpcClient()
this.refresh()
}
/**
* Refresh and return the current fees
* @returns {Promise}
*/
async getFees() {
try {
if (latestBlock.height > this.block)
await this.refresh()
return this.fees
} catch(err) {
return Promise.reject(errors.generic.GEN)
}
}
/**
* Refresh the current fees
* @returns {Promise}
*/
async refresh() {
await util.seriesCall(this.targets, async tgt => {
try {
const level = await this.rpcClient.cmd('estimatesmartfee', tgt, this.feeType)
this.fees[tgt] = Math.round(level.feerate * 1e5)
} catch(e) {
Logger.error(e, 'Fees.refresh()')
delete this.fees[tgt]
}
})
this.block = latestBlock.height
}
}
module.exports = new Fees()

56
lib/bitcoind-rpc/headers.js

@ -0,0 +1,56 @@
/*!
* lib/bitcoind-rpc/headers.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const LRU = require('lru-cache')
const errors = require('../errors')
const RpcClient = require('./rpc-client')
/**
* A singleton providing information about block headers
*/
class Headers {
/**
* Constructor
*/
constructor() {
// Cache
this.headers = LRU({
// Maximum number of headers to store in cache
max: 2016,
// Function used to compute length of item
length: (n, key) => 1,
// Maximum age for items in the cache. Items do not expire
maxAge: Infinity
})
// Initialize the rpc client
this.rpcClient = new RpcClient()
}
/**
* Get the block header for a given hash
* @param {string} hash - block hash
* @returns {Promise}
*/
async getHeader(hash) {
if (this.headers.has(hash))
return this.headers.get(hash)
try {
const header = await this.rpcClient.getblockheader(hash, true)
const fmtHeader = JSON.stringify(header, null, 2)
this.headers.set(hash, fmtHeader)
return fmtHeader
} catch(e) {
return Promise.reject(errors.generic.GEN)
}
}
}
module.exports = new Headers()

69
lib/bitcoind-rpc/latest-block.js

@ -0,0 +1,69 @@
/*!
* lib/bitcoind_rpc/latest-block.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const zmq = require('zeromq')
const Logger = require('../logger')
const util = require('../util')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const RpcClient = require('./rpc-client')
/**
* A singleton providing information about the latest block
*/
class LatestBlock {
/**
* Constructor
*/
constructor() {
this.height = null
this.hash = null
this.time = null
this.diff = null
// Initialize the rpc client
this.rpcClient = new RpcClient()
// Gets the latest block from bitcoind
this.rpcClient.getbestblockhash().then(hash => this.onBlockHash(hash))
// Initializes zmq socket
this.sock = zmq.socket('sub')
this.sock.connect(keys.bitcoind.zmqBlk)
this.sock.subscribe('hashblock')
this.sock.on('message', (topic, msg) => {
switch(topic.toString()) {
case 'hashblock':
this.onBlockHash(msg.toString('hex'))
break
default:
Logger.info(topic.toString())
}
})
}
/**
* Retrieve and store information for a given block
* @param {string} hash - txid of the block
* @returns {Promise}
*/
async onBlockHash(hash) {
const header = await this.rpcClient.getblockheader(hash)
this.height = header.height
this.hash = hash
this.time = header.mediantime
this.diff = header.difficulty
Logger.info(`Block ${this.height} ${this.hash}`)
}
}
module.exports = new LatestBlock()

88
lib/bitcoind-rpc/rpc-client.js

@ -0,0 +1,88 @@
/*!
* lib/bitcoind_rpc/rpc-client.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const rpc = require('bitcoind-rpc-client')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const util = require('../util')
const Logger = require('../logger')
/**
* Wrapper for bitcoind rpc client
*/
class RpcClient {
/**
* Constructor
*/
constructor() {
// Initiliaze the rpc client
this.client = new rpc({
host: keys.bitcoind.rpc.host,
port: keys.bitcoind.rpc.port
})
this.client.set('user', keys.bitcoind.rpc.user)
this.client.set('pass', keys.bitcoind.rpc.pass)
// Initialize a proxy postprocessing api calls
return new Proxy(this, {
get: function(target, name, receiver) {
const origMethod = target.client[name]
return async function(...args) {
const result = await origMethod.apply(target.client, args)
if (result.result) {
return result.result
} else if (result.error) {
throw result.error
} else {
throw 'A problem was met with a request sent to bitcoind RPC API'
}
}
}
})
}
/**
* Check if an error returned by bitcoin-rpc-client
* is a connection error.
* @param {string} err - error message
* @returns {boolean} returns true if message related to a connection error
*/
static isConnectionError(err) {
if (typeof err != 'string')
return false
const isTimeoutError = (err.indexOf('connect ETIMEDOUT') != -1)
const isConnRejected = (err.indexOf('Connection Rejected') != -1)
return (isTimeoutError || isConnRejected)
}
/**
* Check if the rpc api is ready to process requests
* @returns {Promise}
*/
static async waitForBitcoindRpcApi() {
let client = new RpcClient()
try {
await client.getblockchaininfo()
} catch(e) {
client = null
Logger.info('Bitcoind RPC API is still unreachable. New attempt in 20s.')
return util.delay(20000).then(() => {
return RpcClient.waitForBitcoindRpcApi()
})
}
}
}
module.exports = RpcClient

215
lib/bitcoind-rpc/transactions.js

@ -0,0 +1,215 @@
/*!
* lib/bitcoind-rpc/transactions.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const _ = require('lodash')
const LRU = require('lru-cache')
const errors = require('../errors')
const Logger = require('../logger')
const util = require('../util')
const RpcClient = require('./rpc-client')
const rpcLatestBlock = require('./latest-block')
/**
* A singleton providing information about transactions
*/
class Transactions {
/**
* Constructor
*/
constructor() {
// Caches
this.txCache = LRU({
// Maximum number of transactions to store
max: 10000,
// Function used to compute length of item
length: (n, key) => 1,
// Maximum age for items in the cache. Items do not expire
maxAge: Infinity
})
this.prevCache = LRU({
// Maximum number of transactions to store
max: 100000,
// Function used to compute length of item
length: (n, key) => 1,
// Maximum age for items in the cache. Items do not expire
maxAge: Infinity
})
// Initialize the rpc client
this.rpcClient = new RpcClient()
}
/**
* Get the transaction for a given txid
* @param {string} txid - txid of the transaction to be retrieved
* @param {boolean} fees - true if fees must be computed, false otherwise
* @returns {Promise}
*/
async getTransaction(txid, fees) {
// Return transaction from cache when possible
if (this.txCache.has(txid))
return this.txCache.get(txid)
try {
const tx = await this.rpcClient.getrawtransaction(txid, true)
const ret = {
txid: tx.txid,
size: tx.size,
vsize: tx.vsize,
version: tx.version,
locktime: tx.locktime,
inputs: [],
outputs: []
}
if (!ret.vsize)
delete ret.vsize
if (tx.time)
ret.created = tx.time
// Process block informations
if (tx.blockhash && tx.confirmations && tx.blocktime) {
ret.block = {
height: rpcLatestBlock.height - tx.confirmations + 1,
hash: tx.blockhash,
time: tx.blocktime
}
}
let inAmount = 0
let outAmount = 0
// Process the inputs
ret.inputs = await this._getInputs(tx, fees)
inAmount = ret.inputs.reduce((prev, cur) => prev + cur.outpoint.value, 0)
// Process the outputs
ret.outputs = await this._getOutputs(tx)
outAmount = ret.outputs.reduce((prev, cur) => prev + cur.value, 0)
// Process the fees (if needed)
if (fees) {
ret.fees = inAmount - outAmount
if (ret.fees > 0 && ret.size)
ret.feerate = Math.round(ret.fees / ret.size)
if (ret.fees > 0 && ret.vsize)
ret.vfeerate = Math.round(ret.fees / ret.vsize)
}
// Store in cache
if (ret.block && ret.block.hash)
this.txCache.set(txid, ret)
return ret
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
return Promise.reject(errors.generic.GEN)
}
}
/**
* Extract information about the inputs of a transaction
* @param {object} tx - transaction
* @param {boolean} fees - true if fees must be computed, false otherwise
* @returns {Promise} return an array of inputs (object[])
*/
async _getInputs(tx, fees) {
const inputs = []
let n = 0
await util.seriesCall(tx.vin, async input => {
const txin = {
n,
seq: input.sequence,
}
if (input.coinbase) {
txin.coinbase = input.coinbase
} else {
txin.outpoint = {
txid: input.txid,
vout: input.vout
}
txin.sig = input.scriptSig.hex
}
if (input.txinwitness)
txin.witness = input.txinwitness
if (fees && txin.outpoint) {
const inTxid = txin.outpoint.txid
let ptx
if (this.prevCache.has(inTxid)) {
ptx = this.prevCache.get(inTxid)
} else {
ptx = await this.rpcClient.getrawtransaction(inTxid, true)
if (ptx.blockhash && ptx.confirmations && ptx.blocktime) {
ptx.height = rpcLatestBlock.height - ptx.confirmations + 1
this.prevCache.set(inTxid, ptx)
}
}
const outpoint = ptx.vout[txin.outpoint.vout]
txin.outpoint.value = Math.round(outpoint.value * 1e8)
txin.outpoint.scriptpubkey = outpoint.scriptPubKey.hex
inputs.push(txin)
n++
} else {
inputs.push(txin)
n++
}
})
return inputs
}
/**
* Extract information about the outputs of a transaction
* @param {object} tx - transaction
* @param {boolean} fees - true if fees must be computed, false otherwise
* @returns {Promise} return an array of outputs (object[])
*/
async _getOutputs(tx) {
const outputs = []
let n = 0
for (let output of tx.vout) {
const pk = output.scriptPubKey
const amount = Math.round(output.value * 1e8)
let o = {
n,
value: amount,
scriptpubkey: pk.hex,
type: pk.type
}
if (pk.addresses) {
if (pk.addresses.length == 1)
o.address = pk.addresses[0]
else
o.addresses = pk.addresses
}
outputs.push(o)
n++
}
return outputs
}
}
module.exports = new Transactions()

1974
lib/db/mysql-db-wrapper.js

File diff suppressed because it is too large

80
lib/errors.js

@ -0,0 +1,80 @@
/*!
* lib/error.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
/**
* Dictionary of error codes
*/
module.exports = {
get: {
UNKNXPUB: 'Unknown xpub. Create with POST /xpub',
DISALLOWED: 'GET not allowed. Use POST',
},
body: {
NODATA: 'No body data',
NOXPUB: 'Missing body parameter "xpub"',
NOTYPE: 'Missing body parameter "type"',
NOADDR: 'Missing body parameter "address"',
NOMSG: 'Missing body parameter "message"',
NOSIG: 'Missing body parameter "signature"',
NOSCRIPT: 'Missing body parameter "script"',
SCRIPTSIZE: 'Too many entries in the script',
NOTX: 'Missing body parameter "tx"',
INVTYPE: 'Invalid value for parameter "type"',
INVDATA: 'Invalid request arguments'
},
sig: {
INVSIG: 'Invalid bitcoin signature',
INVMSG: 'Invalid message content',
INVADDR: 'Incorrect bitcoin address used for signature',
},
tx: {
PARSE: 'Unable to parse transaction hex',
SEND: 'Unable to broadcast transaction',
TXID: 'Malformed txid',
},
address: {
INVALID: 'Invalid address',
},
xpub: {
INVALID: 'Invalid xpub',
CHAIN: 'Invalid chain',
PRIVKEY: 'No private keys',
CREATE: 'Unable to create new HD account',
RESTORE: 'Unable to restore HD account',
OVERLAP: 'Import in progress',
SEGWIT: 'Invalid value for SegWit support type',
LOCKED: 'Unable to complete operation (locked xpub)'
},
txout: {
VOUT: 'Invalid vout',
NOTFOUND: 'Unspent output not found',
},
multiaddr: {
NOACT: 'Missing parameter "active"',
INVALID: 'No valid active entries',
AMBIG: 'Ambiguous "new" parameter: pass only one xpub',
},
generic: {
GEN: 'Error',
DB: 'Database Error',
},
auth: {
INVALID_CONF: 'Missing configuration parameter',
INVALID_JWT: 'Invalid JSON Web Token',
INVALID_PRF: 'Your current access rights do not allow this operation',
MISSING_JWT: 'Missing JSON Web Token',
TECH_ISSUE: 'A technical problem was encountered. Unable to authenticate the user'
},
db: {
ERROR_NO_ADDRESS: 'ERROR_NO_ADDRESS',
ERROR_NO_HD_ACCOUNT: 'ERROR_NO_HD_ACCOUNT'
},
pushtx: {
NLOCK_MISMATCH: 'nLockTime in script does not match nLockTime in transaction',
SCHEDULED_TOO_FAR: 'nLockTime is set to far in the future',
SCHEDULED_BAD_ORDER: 'Order of hop and nLockTime values must be consistent'
}
}

85
lib/fork-pool.js

@ -0,0 +1,85 @@
/*!
* lib/fork-pool.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const os = require('os')
const childProcess = require('child_process')
const genericPool = require('generic-pool')
const Logger = require('./logger')
/**
* A class managing a pool of child processes
* Inspired from fork-pool by Andrew Sliwinski
* https://github.com/thisandagain/fork-pool/
*/
class ForkPool {
/**
* Constructor
*/
constructor(path, options) {
if (!options) {
this._networkKey = ''
this._options = {
max: os.cpus().length / 2,
min: os.cpus().length / 2,
acquireTimeoutMillis: 60000
}
} else {
this._networkKey = options.networkKey
this._options = options
}
const factory = {
create: () => {
return childProcess.fork(path, [this._networkKey])
},
destroy: (cp) => {
cp.kill()
}
}
this.pool = genericPool.createPool(factory, this._options)
Logger.info(`Created ${this._options.min} child processes for addresses derivation (max = ${this._options.max})`)
}
/**
* Enqueue a new task to be processed by a child process
* @param {object} data - data to be passed to the child process
* @returns {Promise}
*/
async enqueue(data) {
let cp
const pool = this.pool
return new Promise(async (resolve, reject) => {
try {
cp = await pool.acquire()
cp.send(data)
cp.once('message', async msg => {
pool.release(cp)
resolve(msg)
})
} catch(e) {
reject(e)
}
})
}
/**
* Drain the pool
*/
async drain() {
await this.pool.drain()
await this.pool.clear()
}
}
module.exports = ForkPool

242
lib/http-server/http-server.js

@ -0,0 +1,242 @@
/*!
* lib/http-server/http-server.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const fs = require('fs')
const https = require('https')
const express = require('express')
const helmet = require('helmet')
const Logger = require('../logger')
/**
* HTTP server
*/
class HttpServer {
/**
* Constructor
* @param {int} port - port used by the http server
* @param {object} httpsOptions - https options
*/
constructor(port, httpsOptions) {
// Initialize server port
this.port = port
// Store https options
this.httpsOptions = httpsOptions
// Listening server instance
this.server = null
// Initialize the express app
this.app = express()
this.app.set('trust proxy', 'loopback')
// Middlewares for json responses and requests logging
this.app.use('/static', express.static('../static'));
this.app.use(HttpServer.setJSONResponse)
this.app.use(HttpServer.requestLogger)
this.app.use(HttpServer.setCrossOrigin)
this.app.use(HttpServer.setConnection)
this.app.use(helmet(HttpServer.HELMET_POLICY))
}
/**
* Start the http server
* @returns {object} returns the listening server instance
*/
start() {
// Error handler, should be final middleware
this.app.use(function(err, req, res, next) {
if (res.headersSent) return next(err)
Logger.error(err.stack, 'HttpServer.start()')
const ret = {status: 'Server error'}
HttpServer.sendError(res, ret, 500)
})
if (this.httpsOptions == null || !this.httpsOptions.active) {
// Start a http server
this.server = this.app.listen(this.port, () => {
Logger.info('HTTP server listening on port ' + this.port)
})
} else {
// Start a https server
const options = {
key: fs.readFileSync(this.httpsOptions.keypath),
cert: fs.readFileSync(this.httpsOptions.certpath),
requestCert: false,
rejectUnauthorized: false
}
if (this.httpsOptions.capath)
options.ca = fs.readFileSync(this.httpsOptions.capath)
if (this.httpsOptions.passphrase)
options.passphrase = this.httpsOptions.passphrase
this.server = https.createServer(options, this.app).listen(this.port, () => {
Logger.info('HTTPS server listening on port ' + this.port)
})
}
this.server.timeout = 600 * 1000
// @see https://github.com/nodejs/node/issues/13391
this.server.keepAliveTimeout = 0
return this.server
}
/**
* Stop the http server
*/
stop() {
if (this.app === null) return
this.app.close()
}
/**
* Return a http response without data
* @param {object} res - http response object
*/
static sendOk(res) {
const ret = {status: 'ok'}
res.status(200).json(ret)
}
/**
* Return a http response without status
* @param {object} res - http response object
*/
static sendOkDataOnly(res, data) {
res.status(200).json(data)
}
/**
* Return a http response with status and data
* @param {object} res - http response object
* @param {object} data - data object
*/
static sendOkData(res, data) {
const ret = {
status: 'ok',
data: data
}
res.status(200).json(ret)
}
/**
* Return a http response with raw data
* @param {object} res - http response object
* @param {object} data - data object
*/
static sendRawData(res, data) {
res.status(200).send(data)
}
/**
* Return an error response
* @param {object} res - http response object
* @param {object} data - data object
*/
static sendError(res, data, errorCode) {
if (errorCode == null)
errorCode = 400
const ret = {
status: 'error',
error: data
}
res.status(errorCode).json(ret)
}
/*
* A middleware returning an authorization error response
* @param {string} err - error
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - callback function
*/
static sendAuthError(err, req, res, next) {
if (err) {
HttpServer.sendError(res, err, 401)
}
}
/**
* Express middleware returnsing a json response
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next middleware
*/
static setJSONResponse(req, res, next) {
res.set('Content-Type', 'application/json')
next()
}
/**
* Express middleware adding cors header
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next middleware
*/
static setCrossOrigin(req, res, next) {
res.set('Access-Control-Allow-Origin', '*')
next()
}
/**
* Express middleware adding connection header
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next middleware
*/
static setConnection(req, res, next) {
res.set('Connection', 'close')
next()
}
/**
* Express middleware logging url and methods called
* @param {object} req - http request object
* @param {object} res - http response object
* @param {function} next - next middleware
*/
static requestLogger(req, res, next) {
Logger.info(`${req.method} ${req.url}`)
next()
}
}
/**
* Helmet Policy
*/
HttpServer.HELMET_POLICY = {
'contentSecurityPolicy' : {
'directives': {
'defaultSrc': ['"self"'],
'styleSrc' : ['"self"', '"unsafe-inline"'],
'img-src' : ['"self" data:']
},
'browserSniff': false,
'disableAndroid': true
},
'dnsPrefetchControl': true,
'frameguard': true,
'hidePoweredBy': true,
'hpkp': false,
'hsts': true,
'ieNoOpen': true,
'noCache': true,
'noSniff': true,
'referrerPolicy': true,
'xssFilter': true
}
module.exports = HttpServer

67
lib/logger.js

@ -0,0 +1,67 @@
/*!
* lib/logger.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const util = require('./util')
/**
* Class providing static methods for logging
*/
class Logger {
/**
* Log a message in the console
* @param {string/object} msg
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static info(msg, json) {
const logEntry = Logger._formatLog(msg, json)
console.log(logEntry)
}
/**
* Log an error message
* @param {object} e - error
* @param {string} msg - message associated to the error
*/
static error(e, msg) {
const logEntry = Logger._formatLog(msg)
console.error(logEntry)
//const errorEntry = Logger._formatLog(e)
if (e) {
console.error(e)
}
}
/**
* Format log entry
* @param {string/object} msg
* @param {boolean} json - true if msg is a json object, false otherwise
*/
static _formatLog(msg, json) {
json = json || false
const data = json ? JSON.stringify(msg, null, 2) : msg
const memUse = process.memoryUsage()
const mib = util.pad100(util.toMb(memUse.rss))
const D = new Date()
const y = D.getUTCFullYear()
const m = util.pad10(D.getUTCMonth() + 1)
const d = util.pad10(D.getUTCDate())
const h = util.pad10(D.getUTCHours())
const mn = util.pad10(D.getUTCMinutes())
const s = util.pad10(D.getUTCSeconds())
const ms = util.pad100(D.getUTCMilliseconds())
const parts = ['[', y, m, d, ' ', h, ':', mn, ':', s, '.', ms, ' ', mib, ' MiB', '] ', data]
return parts.join('')
}
}
module.exports = Logger

129
lib/remote-importer/bitcoind-wrapper.js

@ -0,0 +1,129 @@
/*!
* lib/remote-importer/bitcoind-wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
const RpcClient = require('../bitcoind-rpc/rpc-client')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const activeNet = network.network
const keys = require('../../keys')[network.key]
const Wrapper = require('./wrapper')
/**
* Wrapper for a local bitcoind RPC API
*/
class BitcoindWrapper extends Wrapper {
/**
* Constructor
*/
constructor() {
super(null, null)
// RPC client
this.client = new RpcClient()
}
/**
* Send a request to the RPC API
* @param {array} descriptors - array of output descriptors
* expected by scantxoutset()
* @returns {Promise}
*/
async _get(descriptors) {
return this.client.cmd('scantxoutset', 'start', descriptors)
}
/**
* Translate a scriptPubKey into an address
* @param {string} scriptPubKey - ScriptPubKey in hex format
* @returns {string} returns the bitcoin address corresponding to the scriptPubKey
*/
_xlatScriptPubKey(scriptPubKey) {
const bScriptPubKey = Buffer.from(scriptPubKey, 'hex')
return bitcoin.address.fromOutputScript(bScriptPubKey, activeNet)
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
const ret = {
address: address,
ntx: 0,
txids: []
}
const descriptor = `addr(${address})`
const results = await this._get([descriptor])
for (let r of results.unspents) {
ret.txids.push(r.txid)
ret.ntx++
}
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${address} rejected (too many transactions - ${ret.ntx})`)
return {
address: address,
ntx: 0,
txids: []
}
}
return ret
}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
const ret = {}
// Send a batch request for all the addresses
const descriptors = addresses.map(a => `addr(${a})`)
const results = await this._get(descriptors)
for (let r of results.unspents) {
const addr = this._xlatScriptPubKey(r.scriptPubKey)
if (!ret[addr]) {
ret[addr] = {
address: addr,
ntx: 0,
txids: []
}
}
ret[addr].txids.push(r.txid)
ret[addr].ntx++
}
const aRet = Object.values(ret)
for (let i in aRet) {
if (filterAddr && aRet[i].ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${aRet[i].address} rejected (too many transactions - ${aRet[i].ntx})`)
aRet.splice(i, 1)
}
}
return aRet
}
}
module.exports = BitcoindWrapper

122
lib/remote-importer/btccom-wrapper.js

@ -0,0 +1,122 @@
/*!
* lib/remote-importer\btccom-wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const rp = require('request-promise-native')
const addrHelper = require('../bitcoin/addresses-helper')
const util = require('../util')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const Wrapper = require('./wrapper')
/**
* Wrapper for the btc.com block explorer APIs
*/
class BtcComWrapper extends Wrapper {
/**
* Constructor
*/
constructor(url) {
super(url, keys.explorers.socks5Proxy)
}
/**
* Send a GET request to the API
* @param {string} route
* @returns {Promise}
*/
async _get(route) {
const params = {
url: `${this.base}${route}`,
method: 'GET',
json: true,
timeout: 15000
}
// Sets socks proxy agent if required
if (keys.explorers.socks5Proxy != null)
params['agent'] = this.socksProxyAgent
return rp(params)
}
/**
* Get a page of transactions related to a given address
* @param {string} address - bitcoin address
* @param {integer} page - page index
* @returns {Promise}
*/
async _getTxsForAddress(address, page) {
const uri = `/address/${address}/tx?page=${page}&verbose=1`
const results = await this._get(uri)
return results.data.list.map(tx => tx.hash)
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
// Extracts the scripthash from the bech32 address
// (btc.com api manages scripthashes, not bech32 addresses)
const scripthash = addrHelper.getScriptHashFromBech32(address)
const uri = `/address/${scripthash}`
const result = await this._get(uri)
const ret = {
address: address,
ntx: result.data.tx_count,
txids: []
}
// Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret
}
const nbPagesApi = Math.ceil(ret.ntx / BtcComWrapper.NB_TXS_PER_PAGE)
const nbPages = Math.min(20, nbPagesApi)
const aPages = new Array(nbPages)
const listPages = Array.from(aPages, (val, idx) => idx + 1)
const results = await util.seriesCall(listPages, idx => {
return this._getTxsForAddress(scripthash, idx)
})
for (let txids of results)
ret.txids = ret.txids.concat(txids)
return ret
}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
// Not implemented for this api
throw "Not implemented"
}
}
// BTC.COM acepts a max of 50txs per page
BtcComWrapper.NB_TXS_PER_PAGE = 50
module.exports = BtcComWrapper

90
lib/remote-importer/insight-wrapper.js

@ -0,0 +1,90 @@
/*!
* lib/remote-importer/insight-wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const rp = require('request-promise-native')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const Wrapper = require('./wrapper')
/**
* Wrapper for the Insight block explorer APIs
*/
class InsightWrapper extends Wrapper {
/**
* Constructor
*/
constructor(url) {
super(url, keys.explorers.socks5Proxy)
}
/**
* Send a GET request to the API
* @param {string} route
* @returns {Promise}
*/
async _get(route) {
const params = {
url: `${this.base}${route}`,
method: 'GET',
json: true,
timeout: 15000
}
// Sets socks proxy agent if required
if (keys.explorers.socks5Proxy != null)
params['agent'] = this.socksProxyAgent
return rp(params)
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
const uri = `/addr/${address}`
// Param filterAddr isn't used for insight
const result = await this._get(uri)
const ret = {
address: result.addrStr,
txids: [],
ntx: result.txApperances
}
// Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret
}
ret.txids = result.transactions
return ret
}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
// Not implemented for this api
throw "Not implemented"
}
}
module.exports = InsightWrapper

114
lib/remote-importer/oxt-wrapper.js

@ -0,0 +1,114 @@
/*!
* lib/remote-importer/oxt-wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const rp = require('request-promise-native')
const Logger = require('../logger')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const Wrapper = require('./wrapper')
/**
* Wrapper for the oxt.me block explorer APIs
*/
class OxtWrapper extends Wrapper {
/**
* Constructor
*/
constructor(url) {
super(url, keys.explorers.socks5Proxy)
}
/**
* Send a GET request to the API
* @param {string} route
* @returns {Promise}
*/
async _get(route) {
const params = {
url: `${this.base}${route}`,
method: 'GET',
json: true,
timeout: 15000
}
// Sets socks proxy agent if required
if (keys.explorers.socks5Proxy != null)
params['agent'] = this.socksProxyAgent
return rp(params)
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
// Try to retrieve more txs than the 1000 managed by the backend
const uri = `/addresses/${address}/txids?count=${keys.addrFilterThreshold + 1}`
const result = await this._get(uri)
const ret = {
address: address,
ntx: result.count,
txids: []
}
// Check if we should filter this address
if (filterAddr && ret.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${ret.address} rejected (too many transactions - ${ret.ntx})`)
return ret
}
ret.txids = result.data.map(t => t.txid)
return ret
}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
const ret = []
// Send a batch request for all the addresses
// For each address, try to retrieve more txs than the 1000 managed by the backend
const strAddr = addresses.join(',')
const uri = `/addresses/multi/txids?count=${keys.addrFilterThreshold + 1}&addresses=${strAddr}`
const results = await this._get(uri)
for (let r of results.data) {
const retAddr = {
address: r.address,
ntx: r.txids.length,
txids: []
}
// Check if we should filter this address
if (filterAddr && retAddr.ntx > keys.addrFilterThreshold) {
Logger.info(` import of ${retAddr.address} rejected (too many transactions - ${retAddr.ntx})`)
} else {
retAddr.txids = r.txids
}
ret.push(retAddr)
}
return ret
}
}
module.exports = OxtWrapper

436
lib/remote-importer/remote-importer.js

@ -0,0 +1,436 @@
/*!
* lib/remote-importer/remote-importer.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const _ = require('lodash')
const Logger = require('../logger')
const errors = require('../errors')
const util = require('../util')
const db = require('../db/mysql-db-wrapper')
const rpcTxns = require('../bitcoind-rpc/transactions')
const hdaHelper = require('../bitcoin/hd-accounts-helper')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
const gap = keys.gap
let Sources
if (network.key == 'bitcoin') {
Sources = require('./sources-mainnet')
} else {
Sources = require('./sources-testnet')
}
/**
* A singleton providing tools
* for importing HD and loose addresses from remote sources
*/
class RemoteImporter {
/**
* Constructor
*/
constructor() {
// Guard against overlapping imports
this.importing = {}
this.sources = new Sources()
}
/**
* Clear the guard
* @param {string} xpub - HDAccount
*/
clearGuard(xpub) {
if (this.importing[xpub])
delete this.importing[xpub]
}
/**
* Process the relations between a list of transactions
* @param {object[]} txs - array of transaction objects
* @returns {object} returns a object with 3 mappings
* {txMap: {], txChildren: {}, txParents: {}}
*/
_processTxsRelations(txs) {
const txMap = {}
const txChildren = {}
const txParents = {}
for (let tx of txs) {
let txid = tx.txid
// Populate txMap
txMap[txid] = tx
// Create parent-child transaction associations
if (!txChildren[txid])
txChildren[txid] = []
if (!txParents[txid])
txParents[txid] = []
for (let i in tx.inputs) {
const input = tx.inputs[i]
let prev = input.outpoint.txid
if (!txMap[prev]) continue
if (txParents[txid].indexOf(prev) == -1)
txParents[txid].push(prev)
if (!txChildren[prev])
txChildren[prev] = []
if (txChildren[prev].indexOf(txid) == -1)
txChildren[prev].push(txid)
}
}
return {
txMap: txMap,
txChildren: txChildren,
txParents: txParents
}
}
/**
* Import a list of transactions associated to a list of addresses
* @param {object[]} addresses - array of addresses objects
* @param {object[]} txns - array of transaction objects
* @returns {Promise}
*/
async _importTransactions(addresses, txns) {
const addrIdMap = await db.getAddressesIds(addresses)
// The transactions array must be topologically ordered, such that
// entries earlier in the array MUST NOT depend upon any entry later
// in the array.
const txMaps = this._processTxsRelations(txns)
const txOrdered = util.topologicalOrdering(txMaps.txParents, txMaps.txChildren)
const aTxs = []
for (let txid of txOrdered)
if (txMaps.txMap[txid])
aTxs.push(txMaps.txMap[txid])
return util.seriesCall(aTxs, tx => this.addTransaction(tx, addrIdMap))
}
/**
* Import an HD account from remote sources
* @param {string} xpub - HD Account
* @param {string} type - type of HD Account
* @param {integer} gapLimit - (optional) gap limit for derivation
* @param {integer} startIndex - (optional) rescan shall start from this index
*/
async importHDAccount(xpub, type, gapLimit, startIndex) {
if (!hdaHelper.isValid(xpub))
return Promise.reject(errors.xpub.INVALID)
if (this.importing[xpub]) {
Logger.info(` Import overlap for ${xpub}`)
return Promise.reject(errors.xpub.OVERLAP)
}
this.importing[xpub] = true
const ts = hdaHelper.typeString(type)
Logger.info(`Importing ${xpub} ${ts}`)
const t0 = Date.now()
const chains = [0,1]
let gaps = [gap.external, gap.internal]
// Allow custom higher gap limits
// for local scans relying on bitcoind
if (gapLimit && keys.explorers.bitcoind)
gaps = [gapLimit, gapLimit]
startIndex = (startIndex == null) ? -1 : startIndex - 1
const addrIdMap = {}
let txns = []
let addresses = []
try {
const results = await util.seriesCall(chains, chain => {
return this.xpubScan(xpub, chain, startIndex, startIndex, gaps[chain], type)
})
// Accumulate addresses and transactions from all chains
for (let result of results) {
txns = txns.concat(result.transactions)
addresses = addresses.concat(result.addresses)
}
// Store the hdaccount and the addresses into the database
await db.ensureHDAccountId(xpub, type)
await db.addAddressesToHDAccount(xpub, addresses)
// Store the transaction into the database
const aAddresses = addresses.map(a => a.address)
await this._importTransactions(aAddresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importHDAccount() : xpub ${xpub}`)
} finally {
Logger.info(` xpub import done in ${((Date.now() - t0)/1000).toFixed(1)}s`)
delete this.importing[xpub]
return true
}
}
/**
* Recursive scan of xpub addresses & transactions
*
* 0. HD chain c on [0,1]
* Gap limit G
* Last derived d = -1
* Last used u = -1
* 1. Derive addresses M/c/{A}, with A on [d+1, u+G], set d = u + G
* 2. Look up transactions T for M/c/{A} from remote
* 3. If |T| = 0, go to 5
* 4. Set u = highest chain index of used address, go to 1
* 5. Store all in database
*
* @returns {object} returns
* {
* addresses: [{address, chain, index}],
* transactions: [{
* txid,
* version,
* locktime,
* created, // if known
* block: 'abcdef', // if confirmed
* outputs: [{index, amount, script, address}],
* inputs: [{index,outpoint:{txid,index},seq}]
* }],
* }
*/
async xpubScan(xpub, c, d, u, G, type, txids) {
txids = txids || {}
const ret = {
addresses: [],
transactions: [],
}
// Check that next derived isn't after last used + gap limit
if (d + 1 > u + G) return ret
// Derive the required number of new addresses
const A = _.range(d + 1, u + G + 1)
ret.addresses = await hdaHelper.deriveAddresses(xpub, c, A, type)
// Update derived index
d = u + G
Logger.info(` derived M/${c}/${A.join(',')}`)
const addrMap = {}
for (let a of ret.addresses)
addrMap[a.address] = a
const aAddresses = ret.addresses.map(a => a.address)
try {
const results = await this.sources.getAddresses(aAddresses)
let gotTransactions = false
const scanTx = []
for (let r of results) {
if (r.ntx == 0) continue
// Address is used. Update used parameter
u = Math.max(u, addrMap[r.address].index)
gotTransactions = true
// TODO: Handle pathological case of many address transactions
while (r.txids.length > 0) {
let txid = r.txids.pop()
if (!txids[txid])
scanTx.push(txid)
}
}
Logger.info(` Got ${scanTx.length} transactions`)
await util.seriesCall(scanTx, async txid => {
try {
const tx = await rpcTxns.getTransaction(txid, false)
if (tx == null) {
Logger.info(` got null for ${txid}`)
return null
}
ret.transactions.push(tx)
txids[tx.txid] = true
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : rawTransaction error, txid ${txid}`)
}
})
if (gotTransactions) {
// We must go deeper
const result = await this.xpubScan(xpub, c, d, u, G, type, txids)
// Accumulate results from further down the rabbit hole
for (let a of result.addresses)
ret.addresses.push(a)
for (let t of result.transactions)
ret.transactions.push(t)
}
} catch(e) {
Logger.error(e, `RemoteImporter.xpubScan() : xpub ${xpub} ${c} ${d} ${u} ${G}`)
} finally {
// Push everything up the rabbit hole
return ret
}
}
/**
* Import a list of addresses
* @param {string[]} candidates - addresses to be imported
* @param {boolean} filterAddr - True if addresses should be filtered, False otherwise
*/
async importAddresses(candidates, filterAddr) {
const t0 = Date.now()
const txns = []
const addresses = []
const imported = []
for (let address of candidates) {
if (!this.importing[address]) {
addresses.push(address)
this.importing[address] = true
} else {
Logger.info(`Note: Import overlap for ${address}. Skipping`)
}
}
if (addresses.length == 0)
return true
Logger.info(`Importing ${addresses.join(',')}`)
try {
const scanTx = []
const results = await this.sources.getAddresses(addresses, filterAddr)
for (let r of results) {
// Mark the address as imported
imported.push(r.address)
if (r.ntx == 0) continue
// TODO: Handle pathological case of many address transactions
while (r.txids.length > 0) {
let txid = r.txids.pop()
if (scanTx.indexOf(txid) == -1)
scanTx.push(txid)
}
}
Logger.info(` Got ${scanTx.length} transactions`)
// Get transaction s data from bitcoind
await util.seriesCall(scanTx, async txid => {
const tx = await rpcTxns.getTransaction(txid, false)
if (tx == null) {
Logger.info(` got null for ${txid}`)
return null
}
txns.push(tx)
})
// Import addresses and transactions into the database
await db.addAddresses(imported)
await this._importTransactions(addresses, txns)
} catch(e) {
Logger.error(e, `RemoteImporter.importAddresses() : ${candidates.join(',')}`)
} finally {
const dt = Date.now() - t0
const ts = (dt/1000).toFixed(1)
const N = addresses.length
if (N > 0)
Logger.info(` Imported ${N} addresses in ${ts}s (${(dt/N).toFixed(0)} ms/addr)`)
for (let address of addresses)
delete this.importing[address]
return true
}
}
/**
* Add a transaction to the database.
* @param {object} tx - transaction object
* @params {Promise}
*/
async addTransaction(tx, addrIdMap) {
const outputs = []
try {
// Store the transaction into the database
await db.addTransaction(tx)
// Confirm the transaction
if (tx.block) {
const block = await db.getBlockByHash(tx.block.hash)
if (block)
await db.confirmTransactions([tx.txid], block.blockID)
}
// Retrieve the database id for the transaction
let txnID = await db.ensureTransactionId(tx.txid)
// Process the outputs
for (let output of tx.outputs) {
if (addrIdMap[output.address]) {
outputs.push({
txnID,
addrID: addrIdMap[output.address],
outIndex: output.n,
outAmount: output.value,
outScript: output.scriptpubkey,
})
}
}
await db.addOutputs(outputs)
// Process the inputs
// Get any outputs spent by the inputs of this transaction, add those
// database outIDs to the corresponding transaction inputs, and store.
const res = await db.getOutputIds(tx.inputs.map(input => input.outpoint))
const spent = {}
const inputs = []
for (let r of res)
spent[`${r.txnTxid}-${r.outIndex}`] = r.outID
for (let input of tx.inputs) {
let key = `${input.outpoint.txid}-${input.outpoint.vout}`
if (spent[key]) {
inputs.push({
outID: spent[key],
txnID,
inIndex: input.n,
inSequence: input.seq
})
}
}
await db.addInputs(inputs)
} catch(e) {
Logger.error(e, `RemoteImporter.addTransaction() : xpub ${tx.txid}`)
Logger.error(null, JSON.stringify(tx,null,2))
}
}
}
module.exports = new RemoteImporter()

112
lib/remote-importer/sources-mainnet.js

@ -0,0 +1,112 @@
/*!
* lib/remote-importer/sources.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const addrHelper = require('../bitcoin/addresses-helper')
const network = require('../bitcoin/network')
const util = require('../util')
const Logger = require('../logger')
const keys = require('../../keys')[network.key]
const Sources = require('./sources')
const BitcoindWrapper = require('./bitcoind-wrapper')
const OxtWrapper = require('./oxt-wrapper')
/**
* Remote data sources for mainnet
*/
class SourcesMainnet extends Sources {
/**
* Constructor
*/
constructor() {
super()
// Initializes external source
this.source = null
this._initSource()
}
/**
* Initialize the external data source
*/
_initSource() {
if (keys.explorers.bitcoind == 'active') {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.source = new BitcoindWrapper()
Logger.info('Activated Bitcoind as the data source for imports')
} else {
// Otherwise, we'll use the rest api provided by OXT
this.source = new OxtWrapper(keys.explorers.oxt)
Logger.info('Activated OXT API as the data source for imports')
}
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
const ret = {
address,
txids: [],
ntx: 0
}
try {
const result = await this.source.getAddress(address, filterAddr)
if (result.ntx)
ret.ntx = result.ntx
else if (result.txids)
ret.ntx = result.txids.length
if (result.txids)
ret.txids = result.txids
} catch(e) {
//Logger.error(e, `SourcesMainnet.getAddress() : ${address} from ${this.source.base}`)
Logger.error(null, `SourcesMainnet.getAddress() : ${address} from ${this.source.base}`)
} finally {
return ret
}
}
/**
* Retrieve information for a list of addresses
* @param {string[]} addresses - array of bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
const ret = []
try {
const results = await this.source.getAddresses(addresses, filterAddr)
for (let r of results) {
// Filter addresses with too many txs
if (!filterAddr || (r.ntx <= keys.addrFilterThreshold))
ret.push(r)
}
} catch(e) {
//Logger.error(e, `SourcesMainnet.getAddresses() : ${addresses} from ${this.source.base}`)
Logger.error(null, `SourcesMainnet.getAddresses() : ${addresses} from ${this.source.base}`)
} finally {
return ret
}
}
}
module.exports = SourcesMainnet

153
lib/remote-importer/sources-testnet.js

@ -0,0 +1,153 @@
/*!
* lib/remote-importer/sources-testnet.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const addrHelper = require('../bitcoin/addresses-helper')
const network = require('../bitcoin/network')
const util = require('../util')
const Logger = require('../logger')
const keys = require('../../keys')[network.key]
const Sources = require('./sources')
const BitcoindWrapper = require('./bitcoind-wrapper')
const InsightWrapper = require('./insight-wrapper')
const BtcComWrapper = require('./btccom-wrapper')
/**
* Remote data sources for testnet polled round-robin to spread load
*/
class SourcesTestnet extends Sources {
/**
* Constructor
*/
constructor() {
super()
this.sources = []
this.index = 0
this.sourceBech32 = null
this.isBitcoindActive = false
// Initializes external sources
this._initSources()
}
/**
* Initialize the external data sources
*/
_initSources() {
if (keys.explorers.bitcoind == 'active') {
// If local bitcoind option is activated
// we'll use the local node as our unique source
this.sourceBech32 = new BitcoindWrapper()
this.sources.push(this.sourceBech32)
this.isBitcoindActive = true
} else {
// Otherwise, we use a set of insight servers + btc.com for bech32 addresses
this.sourceBech32 = new BtcComWrapper(keys.explorers.btccom)
for (let url of keys.explorers.insight)
this.sources.push(new InsightWrapper(url))
this.isBitcoindActive = false
}
}
/**
* Get the next source index
* @returns {integer} returns the next source index
*/
nextIndex() {
this.index++
if (this.index >= this.sources.length)
this.index = 0
return this.index
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {
let source = ''
const isBech32 = addrHelper.isBech32(address)
const ret = {
address,
txids: [],
ntx: 0
}
try {
source = isBech32 ? this.sourceBech32 : this.sources[this.nextIndex()]
const result = await source.getAddress(address, filterAddr)
if (result.ntx)
ret.ntx = result.ntx
else if (result.txids)
ret.ntx = result.txids.length
if (result.txids)
ret.txids = result.txids
return ret
} catch(e) {
Logger.error(e, `SourcesTestnet.getAddress() : ${address} from ${source.base}`)
if (!isBech32 && this.sources.length > 1) {
// Try again with another source
return this.getAddress(address, filterAddr)
} else {
return ret
}
}
}
/**
* Retrieve information for a list of addresses
* @param {string[]} addresses - array of bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {
const ret = []
try {
if (this.isBitcoindActive) {
const source = this.sources[0]
const results = await source.getAddresses(addresses, filterAddr)
for (let r of results) {
// Filter addresses with too many txs
if (!filterAddr || (r.ntx <= keys.addrFilterThreshold))
ret.push(r)
}
} else {
const lists = util.splitList(addresses, this.sources.length)
await util.seriesCall(lists, async list => {
const results = await Promise.all(list.map(a => {
return this.getAddress(a, filterAddr)
}))
for (let r of results) {
// Filter addresses with too many txs
if (!filterAddr || (r.ntx <= keys.addrFilterThreshold))
ret.push(r)
}
})
}
} catch (e) {
Logger.error(e, `SourcesTestnet.getAddresses() : Addr list = ${addresses}`)
} finally {
return ret
}
}
}
module.exports = SourcesTestnet

40
lib/remote-importer/sources.js

@ -0,0 +1,40 @@
/*!
* lib/remote-importer/sources.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
/**
* Abstract class defining a list of blockchain explorer providing a remote API
*/
class Sources {
/**
* Constructor
*/
constructor() {}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {}
/**
* Retrieve information for a list of addresses
* @param {string[]} addresses - array of bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {}
}
module.exports = Sources

47
lib/remote-importer/wrapper.js

@ -0,0 +1,47 @@
/*!
* lib/remote-importer/wrapper.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const SocksProxyAgent = require('socks-proxy-agent')
const network = require('../bitcoin/network')
const keys = require('../../keys')[network.key]
/**
* Abstract class defining a wrapper for a remote API
*/
class Wrapper {
/**
* Constructor
*/
constructor(url, socks5Proxy) {
this.base = url
this.socksProxyAgent = socks5Proxy ? new SocksProxyAgent(socks5Proxy) : null
}
/**
* Retrieve information for a given address
* @param {string} address - bitcoin address
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an object
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddress(address, filterAddr) {}
/**
* Retrieve information for a given list of addresses
* @param {string} addresses - array of bitcoin addresses
* @param {boolean} filterAddr - True if an upper bound should be used
* for #transactions associated to the address, False otherwise
* @returns {Promise} returns an array of objects
* { address: <bitcoin_address>, txids: <txids>, ntx: <total_nb_txs>}
*/
async getAddresses(addresses, filterAddr) {}
}
module.exports = Wrapper

368
lib/util.js

@ -0,0 +1,368 @@
/*!
* lib/util.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
/**
* Class providing utility functions as static methods
*/
class Util {
/**
* Constructor
*/
constructor() {}
/**
* Topological ordering of DAG
* https://en.wikipedia.org/wiki/Topological_sorting
*
* Kahn's algorithm
*
* L Empty list that will contain the sorted elements
* S Set of all nodes with no incoming edge
* while S is non-empty do
* remove a node n from S
* add n to tail of L
* for each node m with an edge e from n to m do
* remove edge e from the graph
* if m has no other incoming edges then
* insert m into S
*
* @param {object} parents - map of {[key]: [incoming edge keys]}
* @param {object} children - a map of {[key]: [outgoing edge keys]}
* @returns {object}
* if graph has edges then
* return error (graph has at least one cycle)
* else
* return L (a topologically sorted order)
*/
static topologicalOrdering(parents, children) {
const S = []
for (let node in parents) {
if (parents[node].length == 0) {
// Node has no parent (incoming edges)
S.push(node)
}
}
const L = []
while (S.length > 0) {
const node = S.pop()
L.push(node)
// Loop over nodes that depend on node
for (let child of children[node]) {
let i = parents[child].indexOf(node)
if (i > -1)
parents[child].splice(i, 1)
if (parents[child].length == 0)
S.push(child)
}
}
return L
}
/**
* Serialize a series of asynchronous calls to a function
* over a list of objects
* ref: http://www.joezimjs.com/javascript/patterns-asynchronous-programming-promises/
*/
static seriesCall(list, fn) {
const results = []
return list.reduce((memo, item) => {
return memo.then(() => {
return fn(item)
}).then(result => {
results.push(result)
})
},
Promise.resolve()
).then(function() {
return results
})
}
/**
* Delay the call to a function
*/
static delay(ms, v) {
return new Promise(resolve => {
setTimeout(resolve.bind(null, v), ms)
})
}
/**
* Splits a list into a list of lists each with maximum length LIMIT
*/
static splitList(list, limit) {
if (list.length <= limit) {
return [list]
} else {
const lists = []
// How many lists to create?
const count = Math.ceil(list.length / limit)
// How many elements per list (max)?
const els = Math.ceil(list.length / count)
for (let i=0; i < count; i++) {
lists.push(list.slice(i * els, (i+1) * els))
}
return lists
}
}
/**
* Check if a string is a valid hex value
*/
static isHashStr(hash) {
const hexRegExp = new RegExp(/^[0-9a-f]*$/, 'i')
return (typeof hash !== "string") ? false : hexRegExp.test(hash)
}
/**
* Check if a string is a well formed 256 bits hash
*/
static is256Hash(hash) {
return Util.isHashStr(hash) && hash.length == 64
}
/**
* Sum an array of values
*/
static sum(arr) {
return arr.reduce((memo, val) => { return memo + val }, 0)
}
/**
* Mean of an array of values
*/
static mean(arr) {
if (arr.length == 0)
return NaN
return sum(arr) / arr.length
}
/**
* Compare 2 values (asc order)
*/
static cmpAsc(a, b) {
return a - b
}
/**
* Compare 2 values (desc order)
*/
static cmpDesc(a,b) {
return b - a
}
/**
* Median of an array of values
*/
static median(arr, sorted) {
if (arr.length == 0) return NaN
if (arr.length == 1) return arr[0]
if (!sorted)
arr.sort(Util.cmpAsc)
const midpoint = Math.floor(arr.length / 2)
if (arr.length % 2) {
// Odd-length array
return arr[midpoint]
} else {
// Even-length array
return (arr[midpoint-1] + arr[midpoint]) / 2.0
}
}
/**
* Median Absolute Deviation of an array of values
*/
static mad(arr, sorted) {
const med = Util.median(arr, sorted)
// Deviations from the median
const dev = []
for (let val of arr)
dev.push(Math.abs(val - med))
return Util.median(dev)
}
/**
* Quartiles of an array of values
*/
static quartiles(arr, sorted) {
const q = [NaN,NaN,NaN]
if (arr.length < 3) return q
if (!sorted)
arr.sort(Util.cmpAsc)
// Set median
q[1] = Util.median(arr, true)
const midpoint = Math.floor(arr.length / 2)
if (arr.length % 2) {
// Odd-length array
const mod4 = arr.length % 4
const n = Math.floor(arr.length / 4)
if (mod4 == 1) {
q[0] = (arr[n-1] + 3 * arr[n]) / 4
q[2] = (3 * arr[3*n] + arr[3*n+1]) / 4
} else if (mod4 == 3) {
q[0] = (3 * arr[n] + arr[n+1]) / 4
q[2] = (arr[3*n+1] + 3 * arr[3*n+2]) / 4
}
} else {
// Even-length array. Slices are already sorted
q[0] = Util.median(arr.slice(0, midpoint), true)
q[2] = Util.median(arr.slice(midpoint), true)
}
return q
}
/**
* Obtain the value of the PCT-th percentile, where PCT on [0,100]
*/
static percentile(arr, pct, sorted) {
if (arr.length < 2) return NaN
if (!sorted)
arr.sort(Util.cmpAsc)
const N = arr.length
const p = pct/100.0
let x // target rank
if (p <= 1 / (N + 1)) {
x = 1
} else if (p < N / (N + 1)) {
x = p * (N + 1)
} else {
x = N
}
// "Floor-x"
const fx = Math.floor(x) - 1
// "Mod-x"
const mx = x % 1
if (fx + 1 >= N) {
return arr[fx]
} else {
// Linear interpolation between two array values
return arr[fx] + mx * (arr[fx+1] - arr[fx])
}
}
/**
* Convert bytes to Mb
*/
static toMb(bytes) {
return +(bytes / Util.MB).toFixed(0)
}
/**
* Convert a date to a unix timestamp
*/
static unix() {
return (Date.now() / 1000) | 0
}
/**
* Convert a value to a padded string (10 chars)
*/
static pad10(v) {
return (v < 10) ? `0${v}` : `${v}`
}
/**
* Convert a value to a padded string (100 chars)
*/
static pad100(v) {
if (v < 10) return `00${v}`
if (v < 100) return `0${v}`
return `${v}`
}
/**
* Convert a value to a padded string (1000 chars)
*/
static pad1000(v) {
if (v < 10) return `000${v}`
if (v < 100) return `00${v}`
if (v < 1000) return `0${v}`
return `${v}`
}
/**
* Left pad
*/
static leftPad(number, places, fill) {
number = Math.round(number)
places = Math.round(places)
fill = fill || ' '
if (number < 0) return number
const mag = (number > 0) ? (Math.floor(Math.log10(number)) + 1) : 1
const parts = []
for(let i=0; i < (places - mag); i++) {
parts.push(fill)
}
parts.push(number)
return parts.join('')
}
/**
* Display a time period, in seconds, as DDD:HH:MM:SS[.MS]
*/
static timePeriod(period, milliseconds) {
milliseconds = !!milliseconds
const whole = Math.floor(period)
const ms = 1000*(period - whole)
const s = whole % 60
const m = (whole >= 60) ? Math.floor(whole / 60) % 60 : 0
const h = (whole >= 3600) ? Math.floor(whole / 3600) % 24 : 0
const d = (whole >= 86400) ? Math.floor(whole / 86400) : 0
const parts = [Util.pad10(h), Util.pad10(m), Util.pad10(s)]
if (d > 0)
parts.splice(0, 0, Util.pad100(d))
const str = parts.join(':')
if (milliseconds) {
return str + '.' + Util.pad100(ms)
} else {
return str
}
}
}
/**
* 1Mb in bytes
*/
Util.MB = 1024*1024
module.exports = Util

152
lib/wallet/address-info.js

@ -0,0 +1,152 @@
/*!
* lib/wallet/address-info.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const db = require('../db/mysql-db-wrapper')
const hdaHelper = require('../bitcoin/hd-accounts-helper')
/**
* A class storing information about the actibity of an address
*/
class AddressInfo {
/**
* Constructor
* @param {object} address - bitcoin address
*/
constructor(address) {
// Initializes properties
this.address = address
this.pubkey = null
this.finalBalance = 0
this.nTx = 0
this.unspentOutputs = []
this.tracked = false,
this.type = 'untracked'
this.xpub = null
this.path = null
this.segwit = null
this.txs = []
}
/**
* Load information about the address
* @returns {Promise}
*/
async loadInfo() {
const balance = await db.getAddressBalance(this.address)
if (balance !== null)
this.finalBalance = balance
const nbTxs = await db.getAddressNbTransactions(this.address)
if (nbTxs !== null)
this.nTx = nbTxs
}
/**
* Load information about the address
* (extended form)
* @returns {Promise}
*/
async loadInfoExtended() {
const res = await db.getHDAccountsByAddresses([this.address])
for (let xpub in res.hd) {
const xpubType = hdaHelper.classify(res.hd[xpub].hdType).type
const info = res.hd[xpub].addresses[0]
this.tracked = true
this.type = 'hd'
this.xpub = xpub
this.segwit = (xpubType === hdaHelper.BIP49 || xpubType === hdaHelper.BIP84)
this.path = ['M', info.hdAddrChain, info.hdAddrIndex].join('/')
}
if (res.loose.indexOf(this.address) > -1) {
this.tracked = true
this.type = 'loose'
}
return this.loadInfo()
}
/**
* Loads a partial list of transactions for this address
* @param {integer} page - page index
* @param {integer} count - number of transactions per page
* @returns {Promise}
*/
async loadTransactions(page, count) {
this.txs = await db.getTxsByAddrAndXpubs([this.address])
}
/**
* Load the utxos associated to the address
* @returns {Promise - object[]}
*/
async loadUtxos() {
this.unspentOutputs = []
const res = await db.getUnspentOutputs([this.address])
for (let r of res) {
this.unspentOutputs.push({
txid: r.txnTxid,
vout: r.outIndex,
amount: r.outAmount,
})
}
// Order the utxos
this.unspentOutputs.sort((a,b) => b.confirmations - a.confirmations)
return this.unspentOutputs
}
/**
* Return a plain old js object with address properties
* @returns {object}
*/
toPojo() {
const ret = {
address: this.address,
final_balance: this.finalBalance,
n_tx: this.nTx
}
if (this.pubkey)
ret.pubkey = this.pubkey
return ret
}
/**
* Return a plain old js object with address properties
* (extended version)
* @returns {object}
*/
toPojoExtended() {
const ret = {
address: this.address,
tracked: this.tracked,
type: this.type,
balance: this.finalBalance,
xpub: this.xpub,
path: this.path,
segwit: this.segwit,
n_tx: this.nTx,
txids: this.txs.map(t => t.hash),
utxo: this.unspentOutputs
}
if (this.pubkey)
ret.pubkey = this.pubkey
return ret
}
}
module.exports = AddressInfo

187
lib/wallet/hd-account-info.js

@ -0,0 +1,187 @@
/*!
* lib/wallet/hd-account-info.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const errors = require('../errors')
const db = require('../db/mysql-db-wrapper')
const hdaHelper = require('../bitcoin/hd-accounts-helper')
const hdaService = require('../bitcoin/hd-accounts-service')
const rpcLatestBlock = require('../bitcoind-rpc/latest-block')
/**
* A class storing information about the actibity of a hd account
*/
class HdAccountInfo {
/**
* Constructor
* @param {object} xpub - xpub
*/
constructor(xpub) {
// Initializes properties
this.xpub = xpub
this.address = xpub
this.account = 0
this.depth = 0
this.finalBalance = 0
this.accountIndex = 0
this.changeIndex = 0
this.accountDerivedIndex = 0
this.changeDerivedIndex = 0
this.nTx = 0
this.unspentOutputs = []
this.derivation = null
this.created = null
this.tracked = false
}
/**
* Ensure the hd account exists in database
* Otherwise, tries to import it with BIP44 derivation
* @returns {Promise - integer} return the internal id of the hd account
* or null if it doesn't exist
*/
async ensureHdAccount() {
try {
const id = await db.getHDAccountId(this.xpub)
return id
} catch(e) {
if (e == errors.db.ERROR_NO_HD_ACCOUNT) {
try {
// Default to BIP44 import
return hdaService.restoreHdAccount(this.xpub, hdaHelper.BIP44)
} catch(e) {
return null
}
}
return null
}
}
/**
* Load information about the hd account
* @returns {Promise}
*/
async loadInfo() {
try {
const id = await db.getHDAccountId(this.xpub)
//if (id == null) return false
const account = await db.getHDAccount(this.xpub)
this.created = account.hdCreated
this.derivation = hdaHelper.typeString(account.hdType)
this.tracked = true
this.finalBalance = await db.getHDAccountBalance(this.xpub)
const unusedIdx = await db.getHDAccountNextUnusedIndices(this.xpub)
this.accountIndex = unusedIdx[0]
this.changeIndex = unusedIdx[1]
const derivedIdx = await db.getHDAccountDerivedIndices(this.xpub)
this.accountDerivedIndex = derivedIdx[0]
this.changeDerivedIndex = derivedIdx[1]
this.nTx = await db.getHDAccountNbTransactions(this.xpub)
const node = hdaHelper.getNode(this.xpub)
const index = node[2].index
const threshold = Math.pow(2,31)
const hardened = (index >= threshold)
this.account = hardened ? (index - threshold) : index
this.depth = node[2].depth
return true
} catch(e) {
return false
}
}
/**
* Load the utxos associated to the hd account
* @returns {Promise - object[]}
*/
async loadUtxos() {
this.unspentOutputs = []
const utxos = await db.getHDAccountUnspentOutputs(this.xpub)
for (let utxo of utxos) {
const conf =
(utxo.blockHeight == null)
? 0
: (rpcLatestBlock.height - utxo.blockHeight + 1)
const entry = {
tx_hash: utxo.txnTxid,
tx_output_n: utxo.outIndex,
tx_version: utxo.txnVersion,
tx_locktime: utxo.txnLocktime,
value: utxo.outAmount,
script: utxo.outScript,
addr: utxo.addrAddress,
confirmations: conf,
xpub: {
m: this.xpub,
path: ['M', utxo.hdAddrChain, utxo.hdAddrIndex].join('/')
}
}
this.unspentOutputs.push(entry)
}
// Order the utxos
this.unspentOutputs.sort((a,b) => b.confirmations - a.confirmations)
return this.unspentOutputs
}
/**
* Return a plain old js object with hd account properties
* @returns {object}
*/
toPojo() {
return {
address: this.address,
final_balance: this.finalBalance,
account_index: this.accountIndex,
change_index: this.changeIndex,
n_tx: this.nTx,
derivation: this.derivation,
created: this.created
}
}
/**
* Return a plain old js object with hd account properties
* (extended version)
* @returns {object}
*/
toPojoExtended() {
return {
xpub: this.xpub,
tracked: this.tracked,
balance: this.finalBalance,
unused: {
external: this.accountIndex,
internal: this.changeIndex,
},
derived: {
external: this.accountDerivedIndex,
internal: this.changeDerivedIndex,
},
n_tx: this.nTx,
derivation: this.derivation,
account: this.account,
depth: this.depth,
created: (new Date(this.created * 1000)).toGMTString()
}
}
}
module.exports = HdAccountInfo

88
lib/wallet/wallet-entities.js

@ -0,0 +1,88 @@
/*!
* lib/wallet/wallet-entities.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
/**
* A class storing entities (xpubs, addresses, pubkeys)
* defining a (full|partial) wallet
*/
class WalletEntities {
/**
* Constructor
*/
constructor() {
this.pubkeys = []
this.addrs = []
this.xpubs = []
this.ypubs = []
this.zpubs = []
}
/**
* Add a new hd account
* with its translation as an xpub
* @param {string} xpub - xpub or tpub
* @param {string} ypub - ypub or upub or false
* @param {string} zpub - zpub or vpub or false
*/
addHdAccount(xpub, ypub, zpub) {
this.xpubs.push(xpub)
this.ypubs.push(ypub)
this.zpubs.push(zpub)
}
/**
* Add a new address/pubkey
* @param {string} address - bitcoin address
* @param {string} pubkey - pubkey associated to the address or false
*/
addAddress(address, pubkey) {
this.addrs.push(address)
this.pubkeys.push(pubkey)
}
/**
* Update the pubkey associated to a given address
* @param {string} address - bitcoin address
* @param {string} pubkey - public key
*/
updatePubKey(address, pubkey) {
const idxAddr = this.addrs.indexOf(address)
if (idxAddr > -1)
this.pubkeys[idxAddr] = pubkey
}
/**
* Checks if a xpub is already listed
* @param {string} xpub
* @returns {boolean} returns true if the xpub is already listed, false otherwise
*/
hasXPub(xpub) {
return (this.xpubs.indexOf(xpub) > -1)
}
/**
* Checks if an address is already listed
* @param {string} address - bitcoin address
* @returns {boolean} returns true if the address is already listed, false otherwise
*/
hasAddress(address) {
return (this.addrs.indexOf(address) > -1)
}
/**
* Checks if a pubkey is already listed
* @param {string} pubkey - public key
* @returns {boolean} returns true if the pubkey is already listed, false otherwise
*/
hasPubKey(pubkey) {
return (this.pubkeys.indexOf(pubkey) > -1)
}
}
module.exports = WalletEntities

309
lib/wallet/wallet-info.js

@ -0,0 +1,309 @@
/*!
* lib/wallet/wallet-info.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const db = require('../db/mysql-db-wrapper')
const util = require('../util')
const rpcLatestBlock = require('../bitcoind-rpc/latest-block')
const addrService = require('../bitcoin/addresses-service')
const HdAccountInfo = require('./hd-account-info')
const AddressInfo = require('./address-info')
/**
* A class storing information about a (full|partial) wallet
* Provides a set of methods allowing to retrieve specific information
*/
class WalletInfo {
/**
* Constructor
* @param {object} entities - wallet entities (hdaccounts, addresses, pubkeys)
*/
constructor(entities) {
// Initializes wallet properties
this.entities = entities
this.wallet = {
finalBalance: 0
}
this.info = {
latestBlock: {
height: rpcLatestBlock.height,
hash: rpcLatestBlock.hash,
time: rpcLatestBlock.time,
}
}
this.addresses = []
this.txs = []
this.unspentOutputs = []
this.nTx = 0
}
/**
* Ensure hd accounts exist in database
* @returns {Promise}
*/
async ensureHdAccounts() {
return util.seriesCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
return hdaInfo.ensureHdAccount()
})
}
/**
* Load information about the hd accounts
* @returns {Promise}
*/
async loadHdAccountsInfo() {
return util.seriesCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
await hdaInfo.loadInfo()
this.wallet.finalBalance += hdaInfo.finalBalance
this.addresses.push(hdaInfo)
})
}
/**
* Ensure addresses exist in database
* @returns {Promise}
*/
async ensureAddresses() {
const importAddrs = []
const addrIdMap = await db.getAddressesIds(this.entities.addrs)
for (let addr of this.entities.addrs) {
if (!addrIdMap[addr])
importAddrs.push(addr)
}
// Import new addresses
return addrService.restoreAddresses(importAddrs, true)
}
/**
* Filter addresses that belong to an active hd account
* @returns {Promise}
*/
async filterAddresses() {
const res = await db.getXpubByAddresses(this.entities.addrs)
for (let addr in res) {
let xpub = res[addr]
if (this.entities.xpubs.indexOf(xpub) > -1) {
let i = this.entities.addrs.indexOf(addr)
if (i > -1) {
this.entities.addrs.splice(i, 1)
this.entities.pubkeys.splice(i, 1)
}
}
}
}
/**
* Load information about the addresses
* @returns {Promise}
*/
async loadAddressesInfo() {
return util.seriesCall(this.entities.addrs, async address => {
const addrInfo = new AddressInfo(address)
await addrInfo.loadInfo()
this.wallet.finalBalance += addrInfo.finalBalance
this.addresses.push(addrInfo)
})
}
/**
* Loads a partial list of transactions for this wallet
* @param {integer} page - page index
* @param {integer} count - number of transactions per page
* @param {boolean} txBalance - True if past wallet balance
* should be computed for each transaction
* @returns {Promise}
*/
async loadTransactions(page, count, txBalance) {
this.txs = await db.getTxsByAddrAndXpubs(
this.entities.addrs,
this.entities.xpubs,
page,
count
)
if (txBalance) {
// Computes wallet balance after each transaction
let balance = this.wallet.finalBalance
for (let i = 0; i < this.txs.length; i++) {
this.txs[i].balance = balance
balance -= this.txs[i].result
}
}
}
/**
* Loads the number of transactions for this wallet
* @returns {Promise}
*/
async loadNbTransactions() {
const nbTxs = await db.getAddrAndXpubsNbTransactions(
this.entities.addrs,
this.entities.xpubs
)
if (nbTxs !== null)
this.nTx = nbTxs
}
/**
* Loads the list of unspent outputs for this wallet
* @returns {Promise}
*/
async loadUtxos() {
// Load the utxos for the hd accounts
await util.seriesCall(this.entities.xpubs, async xpub => {
const hdaInfo = new HdAccountInfo(xpub)
const utxos = await hdaInfo.loadUtxos()
for (let utxo of utxos)
this.unspentOutputs.push(utxo)
})
// Load the utxos for the addresses
const utxos = await db.getUnspentOutputs(this.entities.addrs)
for (let utxo of utxos) {
const conf =
(utxo.blockHeight == null)
? 0
: (rpcLatestBlock.height - utxo.blockHeight + 1)
const entry = {
tx_hash: utxo.txnTxid,
tx_output_n: utxo.outIndex,
tx_version: utxo.txnVersion,
tx_locktime: utxo.txnLocktime,
value: utxo.outAmount,
script: utxo.outScript,
addr: utxo.addrAddress,
confirmations: conf
}
this.unspentOutputs.push(entry)
}
// Order the utxos
this.unspentOutputs.sort((a,b) => b.confirmations - a.confirmations)
}
/**
* Post process addresses and public keys
*/
postProcessAddresses() {
for (let b = 0; b < this.entities.pubkeys.length; b++) {
const pk = this.entities.pubkeys[b]
if (pk) {
const address = this.entities.addrs[b]
// Add pubkeys in this.addresses
for (let c = 0; c < this.addresses.length; c++) {
if (address == this.addresses[c].address)
this.addresses[c].pubkey = pk
}
// Add pubkeys in this.txs
for (let d = 0; d < this.txs.length; d++) {
// inputs
for (let e = 0; e < this.txs[d].inputs.length; e++) {
if (address == this.txs[d].inputs[e].prev_out.addr)
this.txs[d].inputs[e].prev_out.pubkey = pk
}
// outputs
for (let e = 0; e < this.txs[d].out.length; e++) {
if (address == this.txs[d].out[e].addr)
this.txs[d].out[e].pubkey = pk
}
}
// Add pubkeys in this.unspentOutputs
for (let f = 0; f < this.unspentOutputs.length; f++) {
if (address == this.unspentOutputs[f].addr) {
this.unspentOutputs[f].pubkey = pk
}
}
}
}
}
/**
* Post process hd accounts (xpubs translations)
*/
postProcessHdAccounts() {
for (let b = 0; b < this.entities.xpubs.length; b++) {
const entityXPub = this.entities.xpubs[b]
const entityYPub = this.entities.ypubs[b]
const entityZPub = this.entities.zpubs[b]
if (entityYPub || entityZPub) {
const tgtXPub = entityYPub ? entityYPub : entityZPub
// Translate xpub => ypub/zpub in this.addresses
for (let c = 0; c < this.addresses.length; c++) {
if (entityXPub == this.addresses[c].address)
this.addresses[c].address = tgtXPub
}
// Translate xpub => ypub/zpub in this.txs
for (let d = 0; d < this.txs.length; d++) {
// inputs
for (let e = 0; e < this.txs[d].inputs.length; e++) {
const xpub = this.txs[d].inputs[e].prev_out.xpub
if (xpub && (xpub.m == entityXPub))
this.txs[d].inputs[e].prev_out.xpub.m = tgtXPub
}
// outputs
for (let e = 0; e < this.txs[d].out.length; e++) {
const xpub = this.txs[d].out[e].xpub
if (xpub && (xpub.m == entityXPub))
this.txs[d].out[e].xpub.m = tgtXPub
}
}
// Translate xpub => ypub/zpub in this.unspentOutputs
for (let f = 0; f < this.unspentOutputs.length; f++) {
const xpub = this.unspentOutputs[f].xpub
if (xpub && (xpub.m == entityXPub)) {
this.unspentOutputs[f].xpub.m = tgtXPub
}
}
}
}
}
/**
* Return a plain old js object with wallet properties
* @returns {object}
*/
toPojo() {
return {
wallet: {
final_balance: this.wallet.finalBalance
},
info: {
latest_block: this.info.latestBlock
},
addresses: this.addresses.map(a => a.toPojo()),
txs: this.txs,
unspent_outputs: this.unspentOutputs,
n_tx: this.nTx
}
}
}
module.exports = WalletInfo

301
lib/wallet/wallet-service.js

@ -0,0 +1,301 @@
/*!
* lib/wallet/wallet-service.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const util = require('../util')
const Logger = require('../logger')
const db = require('../db/mysql-db-wrapper')
const hdaService = require('../bitcoin/hd-accounts-service')
const hdaHelper = require('../bitcoin/hd-accounts-helper')
const WalletInfo = require('./wallet-info')
/**
* A singleton providing a wallets service
*/
class WalletService {
/**
* Constructor
*/
constructor() {}
/**
* Get wallet information
* @param {object} active - mapping of active entities
* @param {object} legacy - mapping of new legacy addresses
* @param {object} bip49 - mapping of new bip49 addresses
* @param {object} bip84 - mapping of new bip84 addresses
* @param {object} pubkeys - mapping of new pubkeys/addresses
* @returns {Promise}
*/
async getWalletInfo(active, legacy, bip49, bip84, pubkeys) {
// Check parameters
const validParams = this._checkEntities(active, legacy, bip49, bip84, pubkeys)
if (!validParams) {
const info = new WalletInfo()
const ret = this._formatGetWalletInfoResult(info)
return Promise.resolve(ret)
}
// Merge all entities into active mapping
active = this._mergeEntities(active, legacy, bip49, bip84, pubkeys)
// Initialize a WalletInfo object
const walletInfo = new WalletInfo(active)
try {
// Add the new xpubs
await util.seriesCall(legacy.xpubs, this._newBIP44)
await util.seriesCall(bip49.xpubs, this._newBIP49)
await util.seriesCall(bip84.xpubs, this._newBIP84)
// Load hd accounts info
await walletInfo.ensureHdAccounts()
await walletInfo.loadHdAccountsInfo()
// Add the new addresses
await db.addAddresses(legacy.addrs)
await db.addAddresses(bip49.addrs)
await db.addAddresses(bip84.addrs)
await db.addAddresses(pubkeys.addrs)
// Ensure addresses exist and filter them
await walletInfo.ensureAddresses()
//await this._forceEnsureAddressesForActivePubkeys(active)
await walletInfo.filterAddresses()
await walletInfo.loadAddressesInfo()
// Load the most recent transactions
await walletInfo.loadTransactions(0, null, true)
// Postprocessing
await walletInfo.postProcessAddresses()
await walletInfo.postProcessHdAccounts()
// Format the result
return this._formatGetWalletInfoResult(walletInfo)
} catch(e) {
Logger.error(e, 'WalletService.getWalletInfo()')
return Promise.reject({status:'error', error:'internal server error'})
}
}
/**
* Prepares the result to be returned by getWalletInfo()
* @param {WalletInfo} info
* @returns {object}
*/
_formatGetWalletInfoResult(info) {
let ret = info.toPojo()
delete ret['n_tx']
delete ret['unspent_outputs']
ret.addresses = ret.addresses.map(x => {
delete x['derivation']
delete x['created']
return x
})
return ret
}
/**
* Get wallet unspent outputs
* @param {object} active - mapping of active entities
* @param {object} legacy - mapping of new legacy addresses
* @param {object} bip49 - mapping of new bip49 addresses
* @param {object} bip84 - mapping of new bip84 addresses
* @param {object} pubkeys - mapping of new pubkeys/addresses
* @returns {Promise}
*/
async getWalletUtxos(active, legacy, bip49, bip84, pubkeys) {
const ret = {
unspent_outputs: []
}
// Check parameters
const validParams = this._checkEntities(active, legacy, bip49, bip84, pubkeys)
if (!validParams)
return Promise.resolve(ret)
// Merge all entities into active mapping
active = this._mergeEntities(active, legacy, bip49, bip84, pubkeys)
// Initialize a WalletInfo object
const walletInfo = new WalletInfo(active)
try {
// Add the new xpubs
await util.seriesCall(legacy.xpubs, this._newBIP44)
await util.seriesCall(bip49.xpubs, this._newBIP49)
await util.seriesCall(bip84.xpubs, this._newBIP84)
// Ensure hd accounts exist
await walletInfo.ensureHdAccounts()
// Add the new addresses
await db.addAddresses(legacy.addrs)
await db.addAddresses(bip49.addrs)
await db.addAddresses(bip84.addrs)
await db.addAddresses(pubkeys.addrs)
// Ensure addresses exist and filter them
await walletInfo.ensureAddresses()
//await this._forceEnsureAddressesForActivePubkeys(active)
await walletInfo.filterAddresses()
// Load the utxos
await walletInfo.loadUtxos()
// Postprocessing
await walletInfo.postProcessAddresses()
await walletInfo.postProcessHdAccounts()
// Format the result
ret.unspent_outputs = walletInfo.unspentOutputs
return ret
} catch(e) {
Logger.error(e, 'WalletService.getWalletUtxos()')
return Promise.reject({status: 'error', error: 'internal server error'})
}
}
/**
* Get a subset of wallet transaction
* @param {object} entities - mapping of active entities
* @param {integer} page - page of transactions to be returned
* @param {integer} count - number of transactions returned per page
* @returns {Promise}
*/
async getWalletTransactions(entities, page, count) {
const ret = {
n_tx: 0,
page: page,
n_tx_page: count,
txs: []
}
// Check parameters
if (entities.xpubs.length == 0 && entities.addrs.length == 0)
return ret
// Initialize a WalletInfo object
const walletInfo = new WalletInfo(entities)
try {
// Filter the addresses
await walletInfo.filterAddresses()
// Load the number of transactions
await walletInfo.loadNbTransactions()
// Load the requested page of transactions
await walletInfo.loadTransactions(page, count, false)
// Postprocessing
await walletInfo.postProcessAddresses()
await walletInfo.postProcessHdAccounts()
// Format the result
ret.n_tx = walletInfo.nTx
ret.txs = walletInfo.txs
return ret
} catch(e) {
Logger.error(e, 'WalletService.getWalletTransactions()')
return Promise.reject({status:'error', error:'internal server error'})
}
}
/**
* Force addresses derived from an active pubkey to be stored in database
* @param {object} active - mapping of active entities
* @returns {Promise}
*/
async _forceEnsureAddressesForActivePubkeys(active) {
const filteredAddrs = []
for (let i in active.addrs) {
if (active.pubkeys[i]) {
filteredAddrs.push(active.addrs[i])
}
}
return db.addAddresses(filteredAddrs)
}
/**
* Check entities
* @param {object} active - mapping of active entities
* @param {object} legacy - mapping of new legacy addresses
* @param {object} bip49 - mapping of new bip49 addresses
* @param {object} bip84 - mapping of new bip84 addresses
* @param {object} pubkeys - mapping of new pubkeys/addresses
* @returns {boolean} return true if conditions are met, false otherwise
*/
_checkEntities(active, legacy, bip49, bip84, pubkeys) {
const allEmpty = active.xpubs.length == 0
&& active.addrs.length == 0
&& legacy.xpubs.length == 0
&& legacy.addrs.length == 0
&& pubkeys.addrs.length == 0
&& bip49.xpubs.length == 0
&& bip84.xpubs.length == 0
return !allEmpty
}
/**
* Merge all entities into active mapping
* @param {object} active - mapping of active entities
* @param {object} legacy - mapping of new legacy entities
* @param {object} bip49 - mapping of new bip49 entities
* @param {object} bip84 - mapping of new bip84 entities
* @param {object} pubkeys - mapping of new pubkeys
*/
_mergeEntities(active, legacy, bip49, bip84, pubkeys) {
// Put all xpub into active.xpubs
active.xpubs = active.xpubs
.concat(legacy.xpubs)
.concat(bip49.xpubs)
.concat(bip84.xpubs)
// Put addresses and pubkeys into active
// but avoid duplicates
for (let source of [legacy, pubkeys]) {
for (let idxSource in source.addrs) {
const addr = source.addrs[idxSource]
const pubkey = source.pubkeys[idxSource]
const idxActive = active.addrs.indexOf(addr)
if (idxActive == -1) {
active.addrs.push(addr)
active.pubkeys.push(pubkey)
} else if (pubkey) {
active.pubkeys[idxActive] = pubkey
}
}
}
return active
}
/**
* Create a new BIP44 hd account into the database
* @param {string} xpub
* @returns {Promise}
*/
async _newBIP44(xpub) {
return hdaService.createHdAccount(xpub, hdaHelper.BIP44)
}
/**
* Create a new BIP49 hd account into the database
* @param {string} xpub
* @returns {Promise}
*/
async _newBIP49(xpub) {
return hdaService.createHdAccount(xpub, hdaHelper.BIP49)
}
/**
* Create a new BIP84 hd account into the database
* @param {string} xpub
* @returns {Promise}
*/
async _newBIP84(xpub) {
return hdaService.createHdAccount(xpub, hdaHelper.BIP84)
}
}
module.exports = new WalletService()

42
package.json

@ -0,0 +1,42 @@
{
"name": "samourai-dojo",
"version": "1.0.0",
"description": "Backend server for Samourai Wallet",
"main": "accounts/index.js",
"scripts": {
"test": "mocha --recursive --reporter spec"
},
"repository": {
"type": "git",
"url": "git+ssh://git@github.com:Samourai-Wallet/samourai-dojo.git"
},
"author": "Katana Cryptographic Ltd.",
"license": "AGPL-3.0-only",
"homepage": "https://github.com/Samourai-Wallet/samourai-dojo",
"dependencies": {
"async-sema": "2.1.2",
"bip39": "2.4.0",
"bitcoind-rpc-client": "0.3.1",
"bitcoinjs-lib": "3.2.0",
"bitcoinjs-message": "1.0.1",
"body-parser": "1.18.3",
"express": "4.16.3",
"express-jwt": "5.3.1",
"generic-pool": "3.4.2",
"heapdump": "0.3.9",
"helmet": "3.12.1",
"lru-cache": "4.0.2",
"mysql": "2.16.0",
"passport": "0.4.0",
"passport-localapikey-update": "0.6.0",
"request": "2.88.0",
"request-promise-native": "1.0.5",
"socks-proxy-agent": "4.0.1",
"validator": "10.8.0",
"websocket": "1.0.28",
"zeromq": "4.2.0"
},
"devDependencies": {
"mocha": "^3.5.0"
}
}

49
pushtx/index-orchestrator.js

@ -0,0 +1,49 @@
/*!
* pushtx/index-orchestrator.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
(async () => {
'use strict'
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const Orchestrator = require('./orchestrator')
const pushTxProcessor = require('./pushtx-processor')
/**
* PushTx Orchestrator
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx Orchestrator')
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {
connectionLimit: keys.db.connectionLimitPushTxOrchestrator,
acquireTimeout: keys.db.acquireTimeout,
host: keys.db.host,
user: keys.db.user,
password: keys.db.pass,
database: keys.db.database
}
db.connect(dbConfig)
// Initialize notification sockets of singleton pushTxProcessor
pushTxProcessor.initNotifications({
uriSocket: `tcp://*:${keys.ports.orchestrator}`
})
// Initialize and start the orchestrator
const orchestrator = new Orchestrator()
orchestrator.start()
})()

57
pushtx/index.js

@ -0,0 +1,57 @@
/*!
* pushtx/index.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
(async () => {
'use strict'
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const HttpServer = require('../lib/http-server/http-server')
const PushTxRestApi = require('./pushtx-rest-api')
const pushTxProcessor = require('./pushtx-processor')
/**
* PushTx API
*/
Logger.info('Process ID: ' + process.pid)
Logger.info('Preparing the pushTx API')
// Wait for Bitcoind RPC API
// being ready to process requests
await RpcClient.waitForBitcoindRpcApi()
// Initialize the db wrapper
const dbConfig = {
connectionLimit: keys.db.connectionLimitPushTxApi,
acquireTimeout: keys.db.acquireTimeout,
host: keys.db.host,
user: keys.db.user,
password: keys.db.pass,
database: keys.db.database
}
db.connect(dbConfig)
// Initialize notification sockets of singleton pushTxProcessor
pushTxProcessor.initNotifications({
uriSocket: `tcp://*:${keys.ports.notifpushtx}`
})
// Initialize the http server
const port = keys.ports.pushtx
const httpsOptions = keys.https.pushtx
const httpServer = new HttpServer(port, httpsOptions)
// Initialize the PushTx rest api
const pushtxRestApi = new PushTxRestApi(httpServer)
// Start the http server
httpServer.start()
})()

182
pushtx/orchestrator.js

@ -0,0 +1,182 @@
/*!
* pushtx/orchestrator.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const zmq = require('zeromq')
const Sema = require('async-sema')
const Logger = require('../lib/logger')
const db = require('../lib/db/mysql-db-wrapper')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const pushTxProcessor = require('./pushtx-processor')
/**
* A class orchestrating the push of scheduled transactions
*/
class Orchestrator {
/**
* Constructor
*/
constructor() {
// RPC client
this.rpcClient = new RpcClient()
// ZeroMQ socket for bitcoind blocks messages
this.blkSock = null
// Initialize a semaphor protecting the onBlockHash() method
this._onBlockHashSemaphor = new Sema(1, { capacity: 50 })
}
/**
* Start processing the blockchain
* @returns {Promise}
*/
start() {
this.initSockets()
}
/**
* Start processing the blockchain
*/
async stop() {}
/**
* Initialiaze ZMQ sockets
*/
initSockets() {
// Socket listening to bitcoind Blocks messages
this.blkSock = zmq.socket('sub')
this.blkSock.connect(keys.bitcoind.zmqBlk)
this.blkSock.subscribe('hashblock')
this.blkSock.on('message', (topic, message) => {
switch (topic.toString()) {
case 'hashblock':
this.onBlockHash(message)
break
default:
Logger.info(topic.toString())
}
})
Logger.info('Listening for blocks')
}
/**
* Push Transactions if triggered by new block
* @param {Buffer} buf - block hash
*/
async onBlockHash(buf) {
try {
// Acquire the semaphor
await this._onBlockHashSemaphor.acquire()
// Retrieve the block height
const blockHash = buf.toString('hex')
const header = await this.rpcClient.getblockheader(blockHash, true)
const height = header.height
Logger.info(`Block ${height} ${blockHash}`)
// Retrieve the transactions triggered by this block
let txs = await db.getActivatedScheduledTransactions(height)
while (txs && txs.length > 0) {
let rpcConnOk = true
for (let tx of txs) {
let hasParentTx = (tx.schParentTxid != null) && (tx.schParentTxid != '')
let parentTx = null
// Check if previous transaction has been confirmed
if (hasParentTx) {
try {
parentTx = await this.rpcClient.getrawtransaction(tx.schParentTxid, true)
} catch(e) {
Logger.error(e, 'Transaction.getTransaction()')
}
}
if ((!hasParentTx) || (parentTx && parentTx.confirmations && (parentTx.confirmations >= tx.schDelay))) {
// Push the transaction
try {
await pushTxProcessor.pushTx(tx.schRaw)
Logger.info(`Pushed scheduled transaction ${tx.schTxid}`)
} catch(e) {
const msg = 'A problem was met while trying to push a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
// Check if it's an issue with the connection to the RPC API
// (=> immediately stop the loop)
if (RpcClient.isConnectionError(e)) {
Logger.info('Connection issue')
rpcConnOk = false
break
}
}
// Update triggers of next transactions if needed
if (tx.schTrigger < height) {
const shift = height - tx.schTrigger
try {
await this.updateTriggers(tx.schID, shift)
} catch(e) {
const msg = 'A problem was met while shifting scheduled transactions'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
}
}
// Delete the transaction
try {
await db.deleteScheduledTransaction(tx.schTxid)
} catch(e) {
const msg = 'A problem was met while trying to delete a scheduled transaction'
Logger.error(e, `Orchestrator.onBlockHash() : ${msg}`)
}
}
}
// If a connection issue was detected, then stop the loop
if (!rpcConnOk)
break
// Check if more transactions have to be pushed
txs = await db.getActivatedScheduledTransactions(height)
}
} catch(e) {
Logger.error(e, 'Orchestrator.onBlockHash() : Error')
} finally {
// Release the semaphor
await this._onBlockHashSemaphor.release()
}
}
/**
* Update triggers in chain of transactions
* following a transaction identified by its txid
* @param {integer} parentId - parent id
* @param {integer} shift - delta to be added to the triggers
*/
async updateTriggers(parentId, shift) {
if (shift == 0)
return
const txs = await db.getNextScheduledTransactions(parentId)
for (let tx of txs) {
// Update the trigger of the transaction
const newTrigger = tx.schTrigger + shift
await db.updateTriggerScheduledTransaction(tx.schID, newTrigger)
// Update the triggers of next transactions in the chain
await this.updateTriggers(tx.schID, shift)
Logger.info(`Rescheduled tx ${tx.schTxid} (trigger=${newTrigger})`)
}
}
}
module.exports = Orchestrator

77
pushtx/pushtx-processor.js

@ -0,0 +1,77 @@
/*!
* pushtx/pushtx-processor.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const bitcoin = require('bitcoinjs-lib')
const zmq = require('zeromq')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const RpcClient = require('../lib/bitcoind-rpc/rpc-client')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const status = require('./status')
/**
* A singleton providing a wrapper
* for pushing transactions with the local bitcoind
*/
class PushTxProcessor {
/**
* Constructor
*/
constructor() {
this.notifSock = null
// Initialize the rpc client
this.rpcClient = new RpcClient()
}
/**
* Initialize the sockets for notifications
*/
initNotifications(config) {
// Notification socket for the tracker
this.notifSock = zmq.socket('pub')
this.notifSock.bindSync(config.uriSocket)
}
/**
* Push transactions to the Bitcoin network
* @param {string} rawtx - raw bitcoin transaction in hex format
* @returns {string} returns the txid of the transaction
*/
async pushTx(rawtx) {
let value = 0
// Attempt to parse incoming TX hex as a bitcoin Transaction
try {
const tx = bitcoin.Transaction.fromHex(rawtx)
for (let output of tx.outs)
value += output.value
Logger.info('Push for ' + (value / 1e8).toFixed(8) + ' BTC')
} catch(e) {
throw errors.tx.PARSE
}
// At this point, the raw hex parses as a legitimate transaction.
// Attempt to send via RPC to the bitcoind instance
try {
const txid = await this.rpcClient.sendrawtransaction(rawtx)
Logger.info('Pushed!')
// Update the stats
status.updateStats(value)
// Notify the tracker
this.notifSock.send(['pushtx', rawtx])
return txid
} catch(err) {
Logger.info('Push failed')
throw err
}
}
}
module.exports = new PushTxProcessor()

223
pushtx/pushtx-rest-api.js

@ -0,0 +1,223 @@
/*!
* pushtx/pushtx-rest-api.js
* Copyright © 2019 Katana Cryptographic Ltd. All Rights Reserved.
*/
'use strict'
const qs = require('querystring')
const validator = require('validator')
const bodyParser = require('body-parser')
const Logger = require('../lib/logger')
const errors = require('../lib/errors')
const authMgr = require('../lib/auth/authorizations-manager')
const HttpServer = require('../lib/http-server/http-server')
const network = require('../lib/bitcoin/network')
const keys = require('../keys')[network.key]
const status = require('./status')
const pushTxProcessor = require('./pushtx-processor')
const TransactionsScheduler = require('./transactions-scheduler')
/**
* PushTx API endpoints
*/
class PushTxRestApi {
/**
* Constructor
* @param {pushtx.HttpServer} httpServer - HTTP server
*/
constructor(httpServer) {
this.httpServer = httpServer
this.scheduler = new TransactionsScheduler()
// Establish routes
const jsonParser = bodyParser.json()
// Establish routes. Proxy server strips /pushtx
this.httpServer.app.post(
'/schedule',
jsonParser,
authMgr.checkAuthentication.bind(authMgr),
this.postScheduleTxs.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.post(
'/',
authMgr.checkAuthentication.bind(authMgr),
this.postPushTx.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
'/',
authMgr.checkAuthentication.bind(authMgr),
this.getPushTx.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.statusPushtx}/`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.getStatus.bind(this),
HttpServer.sendAuthError
)
this.httpServer.app.get(
`/${keys.prefixes.statusPushtx}/schedule`,
authMgr.checkHasAdminProfile.bind(authMgr),
this.getStatusSchedule.bind(this),
HttpServer.sendAuthError
)
// Handle unknown paths, returning a help message
this.httpServer.app.get(
'/*',
authMgr.checkAuthentication.bind(authMgr),
this.getHelp.bind(this),
HttpServer.sendAuthError
)
}
/**
* Handle Help GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
getHelp(req, res) {
const ret = {endpoints: ['/pushtx', '/pushtx/schedule']}
HttpServer.sendError(res, ret, 404)
}
/**
* Handle Status GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getStatus(req, res) {
try {
const currStatus = await status.getCurrent()
HttpServer.sendOkData(res, currStatus)
} catch(e) {
this._traceError(res, e)
}
}
/**
* Handle status/schedule GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
async getStatusSchedule(req, res) {
try {
const ret = await status.getScheduledTransactions()
HttpServer.sendOkData(res, ret)
} catch(e) {
this._traceError(res, e)
}
}
/**
* Handle pushTx GET request
* @param {object} req - http request object
* @param {object} res - http response object
*/
getPushTx(req, res) {
const ret = errors.get.DISALLOWED
HttpServer.sendError(res, ret, 405)
}
/**
* Handle POST requests
* Push transactions to the Bitcoin network
* @param {object} req - http request object
* @param {object} res - http response object
*/
postPushTx(req, res) {
// Accumulate POST data
const chunks = []
req.on('data', chunk => {
chunks.push(chunk)
})
req.on('end', async () => {
const body = chunks.join('')
const query = qs.parse(body)
if (!query.tx)
return this._traceError(res, errors.body.NOTX)
if (!validator.isHexadecimal(query.tx))
return this._traceError(res, errors.body.INVDATA)
try {
const txid = await pushTxProcessor.pushTx(query.tx)
HttpServer.sendOkData(res, txid)
} catch(e) {
this._traceError(res, e)
}
})
}
/**
* Schedule a list of transactions
* for delayed pushes
*/
async postScheduleTxs(req, res) {
// Check request arguments
if (!req.body)
return this._traceError(res, errors.body.NODATA)
if (!req.body.script)
return this._traceError(res, errors.body.NOSCRIPT)
try {
await this.scheduler.schedule(req.body.script)
HttpServer.sendOk(res)
} catch(e) {
this._traceError(res, e)
}
}
/**
* Trace an error during push
* @param {object} res - http response object
* @param {object} err - error object
*/
_traceError(res, err) {
let ret = null
try {
if (err.message) {
let msg = {}
try {
msg = JSON.parse(err.message)
} catch(e) {}
if (msg.code && msg.message) {
Logger.error(null, 'Error ' + msg.code + ': ' + msg.message)
ret = {
message: msg.message,
code: msg.code
}
} else {
Logger.error(err.message, 'ERROR')
ret = err.message
}
} else {
Logger.error(err, 'ERROR')
ret = err
}
} catch (e) {
Logger.error(e, 'ERROR')
ret = e
} finally {
HttpServer.sendError(res, ret)
}
}
}
module.exports = PushTxRestApi

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save