|
|
@ -14,11 +14,12 @@ import json |
|
|
|
import ssl |
|
|
|
import time |
|
|
|
import traceback |
|
|
|
from collections import namedtuple |
|
|
|
from collections import defaultdict, namedtuple |
|
|
|
from functools import partial |
|
|
|
|
|
|
|
from lib.hash import sha256, double_sha256, hash_to_str, hex_str_to_hash |
|
|
|
from lib.jsonrpc import JSONRPC, json_notification_payload |
|
|
|
from lib.tx import Deserializer |
|
|
|
from lib.util import LoggedClass |
|
|
|
from server.block_processor import BlockProcessor |
|
|
|
from server.daemon import DaemonError |
|
|
@ -27,19 +28,25 @@ from server.version import VERSION |
|
|
|
|
|
|
|
|
|
|
|
class BlockServer(BlockProcessor): |
|
|
|
'''Like BlockProcessor but also has a server manager and starts |
|
|
|
servers when caught up.''' |
|
|
|
'''Like BlockProcessor but also has a mempool and a server manager. |
|
|
|
|
|
|
|
Servers are started immediately the block processor first catches |
|
|
|
up with the daemon. |
|
|
|
''' |
|
|
|
|
|
|
|
def __init__(self, env): |
|
|
|
super().__init__(env) |
|
|
|
self.server_mgr = ServerManager(self, env) |
|
|
|
self.bs_caught_up = False |
|
|
|
self.mempool = MemPool(self) |
|
|
|
self.caught_up_yet = False |
|
|
|
|
|
|
|
async def caught_up(self, mempool_hashes): |
|
|
|
# Call the base class to flush before doing anything else. |
|
|
|
await super().caught_up(mempool_hashes) |
|
|
|
if not self.bs_caught_up: |
|
|
|
if not self.caught_up_yet: |
|
|
|
await self.server_mgr.start_servers() |
|
|
|
self.bs_caught_up = True |
|
|
|
self.caught_up_yet = True |
|
|
|
self.touched.update(await self.mempool.update(mempool_hashes)) |
|
|
|
self.server_mgr.notify(self.height, self.touched) |
|
|
|
|
|
|
|
def on_cancel(self): |
|
|
@ -47,6 +54,185 @@ class BlockServer(BlockProcessor): |
|
|
|
self.server_mgr.stop() |
|
|
|
super().on_cancel() |
|
|
|
|
|
|
|
def mempool_transactions(self, hash168): |
|
|
|
'''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool |
|
|
|
entries for the hash168. |
|
|
|
|
|
|
|
unconfirmed is True if any txin is unconfirmed. |
|
|
|
''' |
|
|
|
return self.mempool.transactions(hash168) |
|
|
|
|
|
|
|
def mempool_value(self, hash168): |
|
|
|
'''Return the unconfirmed amount in the mempool for hash168. |
|
|
|
|
|
|
|
Can be positive or negative. |
|
|
|
''' |
|
|
|
return self.mempool.value(hash168) |
|
|
|
|
|
|
|
|
|
|
|
class MemPool(LoggedClass): |
|
|
|
'''Representation of the daemon's mempool. |
|
|
|
|
|
|
|
Updated regularly in caught-up state. Goal is to enable efficient |
|
|
|
response to the value() and transactions() calls. |
|
|
|
|
|
|
|
To that end we maintain the following maps: |
|
|
|
|
|
|
|
tx_hash -> [txin_pairs, txout_pairs, unconfirmed] |
|
|
|
hash168 -> set of all tx hashes in which the hash168 appears |
|
|
|
|
|
|
|
A pair is a (hash168, value) tuple. Unconfirmed is true if any of the |
|
|
|
tx's txins are unconfirmed. tx hashes are hex strings. |
|
|
|
''' |
|
|
|
|
|
|
|
def __init__(self, bp): |
|
|
|
super().__init__() |
|
|
|
self.txs = {} |
|
|
|
self.hash168s = defaultdict(set) # None can be a key |
|
|
|
self.bp = bp |
|
|
|
self.count = -1 |
|
|
|
|
|
|
|
async def update(self, hex_hashes): |
|
|
|
'''Update state given the current mempool to the passed set of hashes. |
|
|
|
|
|
|
|
Remove transactions that are no longer in our mempool. |
|
|
|
Request new transactions we don't have then add to our mempool. |
|
|
|
''' |
|
|
|
hex_hashes = set(hex_hashes) |
|
|
|
touched = set() |
|
|
|
missing_utxos = [] |
|
|
|
|
|
|
|
initial = self.count < 0 |
|
|
|
if initial: |
|
|
|
self.logger.info('beginning import of {:,d} mempool txs' |
|
|
|
.format(len(hex_hashes))) |
|
|
|
|
|
|
|
# Remove gone items |
|
|
|
gone = set(self.txs).difference(hex_hashes) |
|
|
|
for hex_hash in gone: |
|
|
|
txin_pairs, txout_pairs, unconfirmed = self.txs.pop(hex_hash) |
|
|
|
hash168s = set(hash168 for hash168, value in txin_pairs) |
|
|
|
hash168s.update(hash168 for hash168, value in txout_pairs) |
|
|
|
for hash168 in hash168s: |
|
|
|
self.hash168s[hash168].remove(hex_hash) |
|
|
|
if not self.hash168s[hash168]: |
|
|
|
del self.hash168s[hash168] |
|
|
|
touched.update(hash168s) |
|
|
|
|
|
|
|
# Get the raw transactions for the new hashes. Ignore the |
|
|
|
# ones the daemon no longer has (it will return None). Put |
|
|
|
# them into a dictionary of hex hash to deserialized tx. |
|
|
|
hex_hashes.difference_update(self.txs) |
|
|
|
raw_txs = await self.bp.daemon.getrawtransactions(hex_hashes) |
|
|
|
if initial: |
|
|
|
self.logger.info('analysing {:,d} mempool txs' |
|
|
|
.format(len(raw_txs))) |
|
|
|
new_txs = {hex_hash: Deserializer(raw_tx).read_tx() |
|
|
|
for hex_hash, raw_tx in zip(hex_hashes, raw_txs) if raw_tx} |
|
|
|
del raw_txs, hex_hashes |
|
|
|
|
|
|
|
# The mempool is unordered, so process all outputs first so |
|
|
|
# that looking for inputs has full info. |
|
|
|
script_hash168 = self.bp.coin.hash168_from_script() |
|
|
|
db_utxo_lookup = self.bp.db_utxo_lookup |
|
|
|
|
|
|
|
def txout_pair(txout): |
|
|
|
return (script_hash168(txout.pk_script), txout.value) |
|
|
|
|
|
|
|
for n, (hex_hash, tx) in enumerate(new_txs.items()): |
|
|
|
# Yield to process e.g. signals |
|
|
|
if n % 100 == 0: |
|
|
|
await asyncio.sleep(0) |
|
|
|
txout_pairs = [txout_pair(txout) for txout in tx.outputs] |
|
|
|
self.txs[hex_hash] = (None, txout_pairs, None) |
|
|
|
|
|
|
|
def txin_info(txin): |
|
|
|
hex_hash = hash_to_str(txin.prev_hash) |
|
|
|
mempool_entry = self.txs.get(hex_hash) |
|
|
|
if mempool_entry: |
|
|
|
return mempool_entry[1][txin.prev_idx], True |
|
|
|
pair = db_utxo_lookup(txin.prev_hash, txin.prev_idx) |
|
|
|
return pair, False |
|
|
|
|
|
|
|
if initial: |
|
|
|
next_log = time.time() |
|
|
|
self.logger.info('processed outputs, now examining inputs. ' |
|
|
|
'This can take some time...') |
|
|
|
|
|
|
|
# Now add the inputs |
|
|
|
for n, (hex_hash, tx) in enumerate(new_txs.items()): |
|
|
|
# Yield to process e.g. signals |
|
|
|
if n % 10 == 0: |
|
|
|
await asyncio.sleep(0) |
|
|
|
|
|
|
|
if initial and time.time() > next_log: |
|
|
|
next_log = time.time() + 20 |
|
|
|
self.logger.info('{:,d} done ({:d}%)' |
|
|
|
.format(n, int(n / len(new_txs) * 100))) |
|
|
|
|
|
|
|
txout_pairs = self.txs[hex_hash][1] |
|
|
|
try: |
|
|
|
infos = (txin_info(txin) for txin in tx.inputs) |
|
|
|
txin_pairs, unconfs = zip(*infos) |
|
|
|
except self.bp.MissingUTXOError: |
|
|
|
# Drop this TX. If other mempool txs depend on it |
|
|
|
# it's harmless - next time the mempool is refreshed |
|
|
|
# they'll either be cleaned up or the UTXOs will no |
|
|
|
# longer be missing. |
|
|
|
del self.txs[hex_hash] |
|
|
|
continue |
|
|
|
self.txs[hex_hash] = (txin_pairs, txout_pairs, any(unconfs)) |
|
|
|
|
|
|
|
# Update touched and self.hash168s for the new tx |
|
|
|
for hash168, value in txin_pairs: |
|
|
|
self.hash168s[hash168].add(hex_hash) |
|
|
|
touched.add(hash168) |
|
|
|
for hash168, value in txout_pairs: |
|
|
|
self.hash168s[hash168].add(hex_hash) |
|
|
|
touched.add(hash168) |
|
|
|
|
|
|
|
if missing_utxos: |
|
|
|
self.logger.info('{:,d} txs had missing UTXOs; probably the ' |
|
|
|
'daemon is a block or two ahead of us.' |
|
|
|
.format(len(missing_utxos))) |
|
|
|
first = ', '.join('{} / {:,d}'.format(hash_to_str(txin.prev_hash), |
|
|
|
txin.prev_idx) |
|
|
|
for txin in sorted(missing_utxos)[:3]) |
|
|
|
self.logger.info('first ones are {}'.format(first)) |
|
|
|
|
|
|
|
self.count += 1 |
|
|
|
if self.count % 25 == 0 or gone: |
|
|
|
self.count = 0 |
|
|
|
self.logger.info('{:,d} txs touching {:,d} addresses' |
|
|
|
.format(len(self.txs), len(self.hash168s))) |
|
|
|
|
|
|
|
# Might include a None |
|
|
|
return touched |
|
|
|
|
|
|
|
def transactions(self, hash168): |
|
|
|
'''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool |
|
|
|
entries for the hash168. |
|
|
|
|
|
|
|
unconfirmed is True if any txin is unconfirmed. |
|
|
|
''' |
|
|
|
for hex_hash in self.hash168s[hash168]: |
|
|
|
txin_pairs, txout_pairs, unconfirmed = self.txs[hex_hash] |
|
|
|
tx_fee = (sum(v for hash168, v in txin_pairs) |
|
|
|
- sum(v for hash168, v in txout_pairs)) |
|
|
|
yield (hex_hash, tx_fee, unconfirmed) |
|
|
|
|
|
|
|
def value(self, hash168): |
|
|
|
'''Return the unconfirmed amount in the mempool for hash168. |
|
|
|
|
|
|
|
Can be positive or negative. |
|
|
|
''' |
|
|
|
value = 0 |
|
|
|
for hex_hash in self.hash168s[hash168]: |
|
|
|
txin_pairs, txout_pairs, unconfirmed = self.txs[hex_hash] |
|
|
|
value -= sum(v for h168, v in txin_pairs if h168 == hash168) |
|
|
|
value += sum(v for h168, v in txout_pairs if h168 == hash168) |
|
|
|
return value |
|
|
|
|
|
|
|
|
|
|
|
class ServerManager(LoggedClass): |
|
|
|
'''Manages the servers.''' |
|
|
@ -163,8 +349,8 @@ class ServerManager(LoggedClass): |
|
|
|
now = time.time() |
|
|
|
return [(session.kind, |
|
|
|
session.peername(for_log=False), |
|
|
|
len(session.hash168s), |
|
|
|
'RPC' if isinstance(session, LocalRPC) else session.client, |
|
|
|
session.sub_count(), |
|
|
|
session.client, |
|
|
|
session.recv_count, session.recv_size, |
|
|
|
session.send_count, session.send_size, |
|
|
|
session.error_count, |
|
|
@ -197,9 +383,7 @@ class Session(JSONRPC): |
|
|
|
self.daemon = bp.daemon |
|
|
|
self.coin = bp.coin |
|
|
|
self.kind = kind |
|
|
|
self.hash168s = set() |
|
|
|
self.jobs = asyncio.Queue() |
|
|
|
self.current_task = None |
|
|
|
self.client = 'unknown' |
|
|
|
|
|
|
|
def connection_made(self, transport): |
|
|
@ -251,6 +435,16 @@ class Session(JSONRPC): |
|
|
|
return 'xx.xx.xx.xx:xx' |
|
|
|
return '{}:{}'.format(self.peer_info[0], self.peer_info[1]) |
|
|
|
|
|
|
|
def sub_count(self): |
|
|
|
return 0 |
|
|
|
|
|
|
|
async def daemon_request(self, method, *args): |
|
|
|
'''Catch a DaemonError and convert it to an RPCError.''' |
|
|
|
try: |
|
|
|
return await getattr(self.daemon, method)(*args) |
|
|
|
except DaemonError as e: |
|
|
|
raise RPCError('daemon error: {}'.format(e)) |
|
|
|
|
|
|
|
def tx_hash_from_param(self, param): |
|
|
|
'''Raise an RPCError if the parameter is not a valid transaction |
|
|
|
hash.''' |
|
|
@ -309,6 +503,7 @@ class ElectrumX(Session): |
|
|
|
self.subscribe_headers = False |
|
|
|
self.subscribe_height = False |
|
|
|
self.notified_height = None |
|
|
|
self.hash168s = set() |
|
|
|
rpcs = [ |
|
|
|
('blockchain', |
|
|
|
'address.get_balance address.get_history address.get_mempool ' |
|
|
@ -324,6 +519,9 @@ class ElectrumX(Session): |
|
|
|
for prefix, suffixes in rpcs |
|
|
|
for suffix in suffixes.split()} |
|
|
|
|
|
|
|
def sub_count(self): |
|
|
|
return len(self.hash168s) |
|
|
|
|
|
|
|
async def notify(self, height, touched, cache): |
|
|
|
'''Notify the client about changes in height and touched addresses. |
|
|
|
|
|
|
@ -392,8 +590,8 @@ class ElectrumX(Session): |
|
|
|
|
|
|
|
async def tx_merkle(self, tx_hash, height): |
|
|
|
'''tx_hash is a hex string.''' |
|
|
|
hex_hashes = await self.daemon.block_hex_hashes(height, 1) |
|
|
|
block = await self.daemon.deserialised_block(hex_hashes[0]) |
|
|
|
hex_hashes = await self.daemon_request('block_hex_hashes', height, 1) |
|
|
|
block = await self.daemon_request('deserialised_block', hex_hashes[0]) |
|
|
|
tx_hashes = block['tx'] |
|
|
|
try: |
|
|
|
pos = tx_hashes.index(tx_hash) |
|
|
@ -503,7 +701,7 @@ class ElectrumX(Session): |
|
|
|
return self.electrum_header(height) |
|
|
|
|
|
|
|
async def estimatefee(self, params): |
|
|
|
return await self.daemon.estimatefee(params) |
|
|
|
return await self.daemon_request('estimatefee', params) |
|
|
|
|
|
|
|
async def headers_subscribe(self, params): |
|
|
|
self.require_empty_params(params) |
|
|
@ -519,7 +717,7 @@ class ElectrumX(Session): |
|
|
|
'''The minimum fee a low-priority tx must pay in order to be accepted |
|
|
|
to the daemon's memory pool.''' |
|
|
|
self.require_empty_params(params) |
|
|
|
return await self.daemon.relayfee() |
|
|
|
return await self.daemon_request('relayfee') |
|
|
|
|
|
|
|
async def transaction_broadcast(self, params): |
|
|
|
'''Pass through the parameters to the daemon. |
|
|
@ -555,7 +753,7 @@ class ElectrumX(Session): |
|
|
|
# in anticipation it might be dropped in the future. |
|
|
|
if 1 <= len(params) <= 2: |
|
|
|
tx_hash = self.tx_hash_from_param(params[0]) |
|
|
|
return await self.daemon.getrawtransaction(tx_hash) |
|
|
|
return await self.daemon_request('getrawtransaction', tx_hash) |
|
|
|
|
|
|
|
raise self.RPCError('params wrong length: {}'.format(params)) |
|
|
|
|
|
|
@ -629,3 +827,4 @@ class LocalRPC(Session): |
|
|
|
cmds = 'getinfo sessions numsessions peers numpeers'.split() |
|
|
|
self.handlers = {cmd: getattr(self.manager, 'rpc_{}'.format(cmd)) |
|
|
|
for cmd in cmds} |
|
|
|
self.client = 'RPC' |
|
|
|