Browse Source

Move to hashX

The hashX of a script is the first 11 bytes of its sha256 hash.

Closes #72.
master
Neil Booth 8 years ago
parent
commit
2b2909f1c4
  1. 92
      lib/coins.py
  2. 2
      lib/jsonrpc.py
  3. 22
      lib/script.py
  4. 27
      query.py
  5. 124
      server/block_processor.py
  6. 26
      server/controller.py
  7. 51
      server/db.py
  8. 10
      server/env.py
  9. 66
      server/mempool.py
  10. 95
      server/protocol.py

92
lib/coins.py

@ -1,4 +1,4 @@
# Copyright (c) 2016, Neil Booth # Copyright (c) 2016-2017, Neil Booth
# #
# All rights reserved. # All rights reserved.
# #
@ -13,13 +13,14 @@ necessary for appropriate handling.
from decimal import Decimal from decimal import Decimal
from functools import partial from functools import partial
from hashlib import sha256
import inspect import inspect
import re import re
import struct import struct
import sys import sys
from lib.hash import Base58, hash160, double_sha256, hash_to_str from lib.hash import Base58, hash160, ripemd160, double_sha256, hash_to_str
from lib.script import ScriptPubKey, Script from lib.script import ScriptPubKey
from lib.tx import Deserializer from lib.tx import Deserializer
from lib.util import cachedproperty, subclasses from lib.util import cachedproperty, subclasses
@ -37,9 +38,9 @@ class Coin(object):
RPC_URL_REGEX = re.compile('.+@[^:]+(:[0-9]+)?') RPC_URL_REGEX = re.compile('.+@[^:]+(:[0-9]+)?')
VALUE_PER_COIN = 100000000 VALUE_PER_COIN = 100000000
CHUNK_SIZE=2016 CHUNK_SIZE=2016
STRANGE_VERBYTE = 0xff
IRC_SERVER = "irc.freenode.net" IRC_SERVER = "irc.freenode.net"
IRC_PORT = 6667 IRC_PORT = 6667
HASHX_LEN = 11
@classmethod @classmethod
def lookup_coin_class(cls, name, net): def lookup_coin_class(cls, name, net):
@ -70,20 +71,28 @@ class Coin(object):
def daemon_urls(cls, urls): def daemon_urls(cls, urls):
return [cls.sanitize_url(url) for url in urls.split(',')] return [cls.sanitize_url(url) for url in urls.split(',')]
@classmethod
def hashX_from_script(cls, script):
'''Returns a hashX from a script.'''
script = ScriptPubKey.hashX_script(script)
if script is None:
return None
return sha256(script).digest()[:cls.HASHX_LEN]
@cachedproperty @cachedproperty
def hash168_handlers(cls): def address_handlers(cls):
return ScriptPubKey.PayToHandlers( return ScriptPubKey.PayToHandlers(
address = cls.P2PKH_hash168_from_hash160, address = cls.P2PKH_address_from_hash160,
script_hash = cls.P2SH_hash168_from_hash160, script_hash = cls.P2SH_address_from_hash160,
pubkey = cls.P2PKH_hash168_from_pubkey, pubkey = cls.P2PKH_address_from_pubkey,
unspendable = cls.hash168_from_unspendable, unspendable = lambda : None,
strange = cls.hash168_from_strange, strange = lambda script: None,
) )
@classmethod @classmethod
def hash168_from_script(cls): def address_from_script(cls, script):
'''Returns a function that is passed a script to return a hash168.''' '''Given a pk_script, return the adddress it pays to, or None.'''
return partial(ScriptPubKey.pay_to, cls.hash168_handlers) return ScriptPubKey.pay_to(cls.address_handlers, script)
@staticmethod @staticmethod
def lookup_xverbytes(verbytes): def lookup_xverbytes(verbytes):
@ -97,65 +106,26 @@ class Coin(object):
raise CoinError('version bytes unrecognised') raise CoinError('version bytes unrecognised')
@classmethod @classmethod
def address_to_hash168(cls, addr): def address_to_hashX(cls, address):
'''Return a 21-byte hash given an address. '''Return a hashX given a coin address.'''
return cls.hashX_from_script(cls.pay_to_address_script(address))
This is the hash160 prefixed by the address version byte.
'''
result = Base58.decode_check(addr)
if len(result) != 21:
raise CoinError('invalid address: {}'.format(addr))
return result
@classmethod
def hash168_to_address(cls, hash168):
'''Return an address given a 21-byte hash.'''
return Base58.encode_check(hash168)
@classmethod
def hash168_from_unspendable(cls):
'''Return a hash168 for an unspendable script.'''
return None
@classmethod
def hash168_from_strange(cls, script):
'''Return a hash168 for a strange script.'''
return bytes([cls.STRANGE_VERBYTE]) + hash160(script)
@classmethod
def P2PKH_hash168_from_hash160(cls, hash160):
'''Return a hash168 if hash160 is 160 bits otherwise None.'''
if len(hash160) == 20:
return bytes([cls.P2PKH_VERBYTE]) + hash160
return None
@classmethod
def P2PKH_hash168_from_pubkey(cls, pubkey):
return cls.P2PKH_hash168_from_hash160(hash160(pubkey))
@classmethod @classmethod
def P2PKH_address_from_hash160(cls, hash160): def P2PKH_address_from_hash160(cls, hash160):
'''Return a P2PKH address given a public key.''' '''Return a P2PKH address given a public key.'''
assert len(hash160) == 20 assert len(hash160) == 20
return Base58.encode_check(cls.P2PKH_hash168_from_hash160(hash160)) return Base58.encode_check(bytes([cls.P2PKH_VERBYTE]) + hash160)
@classmethod @classmethod
def P2PKH_address_from_pubkey(cls, pubkey): def P2PKH_address_from_pubkey(cls, pubkey):
'''Return a coin address given a public key.''' '''Return a coin address given a public key.'''
return cls.P2PKH_address_from_hash160(hash160(pubkey)) return cls.P2PKH_address_from_hash160(hash160(pubkey))
@classmethod
def P2SH_hash168_from_hash160(cls, hash160):
'''Return a hash168 if hash160 is 160 bits otherwise None.'''
if len(hash160) == 20:
return bytes([cls.P2SH_VERBYTE]) + hash160
return None
@classmethod @classmethod
def P2SH_address_from_hash160(cls, hash160): def P2SH_address_from_hash160(cls, hash160):
'''Return a coin address given a hash160.''' '''Return a coin address given a hash160.'''
assert len(hash160) == 20 assert len(hash160) == 20
return Base58.encode_check(cls.P2SH_hash168_from_hash160(hash160)) return Base58.encode_check(bytes([cls.P2SH_VERBYTE]) + hash160)
@classmethod @classmethod
def multisig_address(cls, m, pubkeys): def multisig_address(cls, m, pubkeys):
@ -195,7 +165,7 @@ class Coin(object):
if len(raw) == 21: if len(raw) == 21:
verbyte, hash_bytes = raw[0], raw[1:] verbyte, hash_bytes = raw[0], raw[1:]
if verbyte == cls.P2PKH_VERYBYTE: if verbyte == cls.P2PKH_VERBYTE:
return ScriptPubKey.P2PKH_script(hash_bytes) return ScriptPubKey.P2PKH_script(hash_bytes)
if verbyte == cls.P2SH_VERBYTE: if verbyte == cls.P2SH_VERBYTE:
return ScriptPubKey.P2SH_script(hash_bytes) return ScriptPubKey.P2SH_script(hash_bytes)
@ -262,9 +232,9 @@ class Bitcoin(Coin):
WIF_BYTE = 0x80 WIF_BYTE = 0x80
GENESIS_HASH=(b'000000000019d6689c085ae165831e93' GENESIS_HASH=(b'000000000019d6689c085ae165831e93'
b'4ff763ae46a2a6c172b3f1b60a8ce26f') b'4ff763ae46a2a6c172b3f1b60a8ce26f')
TX_COUNT = 142791895 TX_COUNT = 156335304
TX_COUNT_HEIGHT = 420976 TX_COUNT_HEIGHT = 429972
TX_PER_BLOCK = 1600 TX_PER_BLOCK = 1800
IRC_PREFIX = "E_" IRC_PREFIX = "E_"
IRC_CHANNEL = "#electrum" IRC_CHANNEL = "#electrum"
RPC_PORT = 8332 RPC_PORT = 8332

2
lib/jsonrpc.py

@ -290,7 +290,7 @@ class JSONRPC(asyncio.Protocol, LoggedClass):
except TypeError: except TypeError:
msg = 'JSON encoding failure: {}'.format(payload) msg = 'JSON encoding failure: {}'.format(payload)
self.log_error(msg) self.log_error(msg)
return self.json_error(msg, self.INTERNAL_ERROR, return self.send_json_error(msg, self.INTERNAL_ERROR,
self.payload_id(payload)) self.payload_id(payload))
self.check_oversized_request(len(binary)) self.check_oversized_request(len(binary))

22
lib/script.py

@ -12,6 +12,7 @@ import struct
from collections import namedtuple from collections import namedtuple
from lib.enum import Enumeration from lib.enum import Enumeration
from lib.hash import hash160
class ScriptError(Exception): class ScriptError(Exception):
@ -82,6 +83,27 @@ class ScriptPubKey(object):
PayToHandlers = namedtuple('PayToHandlers', 'address script_hash pubkey ' PayToHandlers = namedtuple('PayToHandlers', 'address script_hash pubkey '
'unspendable strange') 'unspendable strange')
@classmethod
def hashX_script(cls, script):
'''Return None if the script is provably unspendable. Return a
pay-to-pubkey-hash script if it is pay-to-pubkey, otherwise
return script.
'''
if script:
op = script[0]
if op == OpCodes.OP_RETURN:
return None
if op <= OpCodes.OP_PUSHDATA4:
try:
ops = Script.get_ops(script)
except ScriptError:
pass
else:
if _match_ops(ops, cls.TO_PUBKEY_OPS):
pubkey = ops[0][1]
script = ScriptPubKey.P2PKH_script(hash160(pubkey))
return script
@classmethod @classmethod
def pay_to(cls, handlers, script): def pay_to(cls, handlers, script):
'''Parse a script, invoke the appropriate handler and '''Parse a script, invoke the appropriate handler and

27
query.py

@ -26,15 +26,17 @@ def count_entries(db):
utxos += 1 utxos += 1
print("UTXO count:", utxos) print("UTXO count:", utxos)
hash168 = 0 hashX = 0
for key in db.iterator(prefix=b'h', include_value=False): for key in db.iterator(prefix=b'h', include_value=False):
hash168 += 1 hashX += 1
print("Hash168 count:", hash168) print("HashX count:", hashX)
hist = 0 hist = 0
for key in db.iterator(prefix=b'H', include_value=False): hist_len = 0
for key, value in db.iterator(prefix=b'H'):
hist += 1 hist += 1
print("History rows:", hist) hist_len += len(value) // 4
print("History rows {:,d} entries {:,d}", hist, hist_len)
def main(): def main():
@ -52,27 +54,20 @@ def main():
limit = 10 limit = 10
for addr in sys.argv[argc:]: for addr in sys.argv[argc:]:
print('Address: ', addr) print('Address: ', addr)
hash168 = coin.address_to_hash168(addr) hashX = coin.address_to_hashX(addr)
hist = 0
hist_len = 0
for key, value in bp.db.iterator(prefix=b'H'+hash168):
hist += 1
hist_len += len(value) // 4
print("History: {:,d} rows with {:,d} entries".format(hist, hist_len))
n = None n = None
for n, (tx_hash, height) in enumerate(bp.get_history(hash168, limit)): for n, (tx_hash, height) in enumerate(bp.get_history(hashX, limit)):
print('History #{:d}: hash: {} height: {:d}' print('History #{:d}: hash: {} height: {:d}'
.format(n + 1, hash_to_str(tx_hash), height)) .format(n + 1, hash_to_str(tx_hash), height))
n = None n = None
for n, utxo in enumerate(bp.get_utxos(hash168, limit)): for n, utxo in enumerate(bp.get_utxos(hashX, limit)):
print('UTXOs #{:d}: hash: {} pos: {:d} height: {:d} value: {:d}' print('UTXOs #{:d}: hash: {} pos: {:d} height: {:d} value: {:d}'
.format(n + 1, hash_to_str(utxo.tx_hash), .format(n + 1, hash_to_str(utxo.tx_hash),
utxo.tx_pos, utxo.height, utxo.value)) utxo.tx_pos, utxo.height, utxo.value))
if n is None: if n is None:
print('No UTXOs') print('No UTXOs')
balance = bp.get_balance(hash168) balance = bp.get_balance(hashX)
print('Balance: {} {}'.format(coin.decimal_value(balance), print('Balance: {} {}'.format(coin.decimal_value(balance),
coin.SHORTNAME)) coin.SHORTNAME))

124
server/block_processor.py

@ -172,8 +172,7 @@ class BlockProcessor(server.db.DB):
self.caught_up_event = asyncio.Event() self.caught_up_event = asyncio.Event()
# Meta # Meta
self.utxo_MB = env.utxo_MB self.cache_MB = env.cache_MB
self.hist_MB = env.hist_MB
self.next_cache_check = 0 self.next_cache_check = 0
# Headers and tx_hashes have one entry per block # Headers and tx_hashes have one entry per block
@ -194,10 +193,8 @@ class BlockProcessor(server.db.DB):
# Log state # Log state
if self.first_sync: if self.first_sync:
self.logger.info('flushing UTXO cache at {:,d} MB' self.logger.info('flushing DB cache at {:,d} MB'
.format(self.utxo_MB)) .format(self.cache_MB))
self.logger.info('flushing history cache at {:,d} MB'
.format(self.hist_MB))
async def main_loop(self, touched): async def main_loop(self, touched):
'''Main loop for block processing.''' '''Main loop for block processing.'''
@ -242,7 +239,7 @@ class BlockProcessor(server.db.DB):
touched.clear() touched.clear()
if time.time() > self.next_cache_check: if time.time() > self.next_cache_check:
self.check_cache_size() self.check_cache_size()
self.next_cache_check = time.time() + 60 self.next_cache_check = time.time() + 30
if not self.first_sync: if not self.first_sync:
s = '' if len(blocks) == 1 else 's' s = '' if len(blocks) == 1 else 's'
@ -391,8 +388,8 @@ class BlockProcessor(server.db.DB):
flush_id = pack('>H', self.flush_count) flush_id = pack('>H', self.flush_count)
for hash168, hist in self.history.items(): for hashX, hist in self.history.items():
key = b'H' + hash168 + flush_id key = b'H' + hashX + flush_id
batch.put(key, hist.tobytes()) batch.put(key, hist.tobytes())
if self.first_sync: if self.first_sync:
@ -415,10 +412,10 @@ class BlockProcessor(server.db.DB):
self.tx_hashes = [] self.tx_hashes = []
self.headers = [] self.headers = []
def backup_flush(self, hash168s): def backup_flush(self, hashXs):
'''Like flush() but when backing up. All UTXOs are flushed. '''Like flush() but when backing up. All UTXOs are flushed.
hash168s - sequence of hash168s which were touched by backing hashXs - sequence of hashXs which were touched by backing
up. Searched for history entries to remove after the backup up. Searched for history entries to remove after the backup
height. height.
''' '''
@ -430,7 +427,7 @@ class BlockProcessor(server.db.DB):
with self.db.write_batch() as batch: with self.db.write_batch() as batch:
# Flush state last as it reads the wall time. # Flush state last as it reads the wall time.
self.backup_history(batch, hash168s) self.backup_history(batch, hashXs)
self.flush_utxos(batch) self.flush_utxos(batch)
self.flush_state(batch) self.flush_state(batch)
@ -444,10 +441,10 @@ class BlockProcessor(server.db.DB):
self.last_flush - flush_start, self.last_flush - flush_start,
self.height, self.tx_count)) self.height, self.tx_count))
def backup_history(self, batch, hash168s): def backup_history(self, batch, hashXs):
nremoves = 0 nremoves = 0
for hash168 in sorted(hash168s): for hashX in sorted(hashXs):
prefix = b'H' + hash168 prefix = b'H' + hashX
deletes = [] deletes = []
puts = {} puts = {}
for key, hist in self.db.iterator(prefix=prefix, reverse=True): for key, hist in self.db.iterator(prefix=prefix, reverse=True):
@ -472,17 +469,15 @@ class BlockProcessor(server.db.DB):
assert not self.tx_hashes assert not self.tx_hashes
self.logger.info('backing up removed {:,d} history entries from ' self.logger.info('backing up removed {:,d} history entries from '
'{:,d} addresses'.format(nremoves, len(hash168s))) '{:,d} addresses'.format(nremoves, len(hashXs)))
def check_cache_size(self): def check_cache_size(self):
'''Flush a cache if it gets too big.''' '''Flush a cache if it gets too big.'''
# Good average estimates based on traversal of subobjects and # Good average estimates based on traversal of subobjects and
# requesting size from Python (see deep_getsizeof). For # requesting size from Python (see deep_getsizeof).
# whatever reason Python O/S mem usage is typically +30% or one_MB = 1000*1000
# more, so we scale our already bloated object sizes. utxo_cache_size = len(self.utxo_cache) * 205
one_MB = int(1048576 / 1.3) db_deletes_size = len(self.db_deletes) * 57
utxo_cache_size = len(self.utxo_cache) * 187
db_deletes_size = len(self.db_deletes) * 61
hist_cache_size = len(self.history) * 180 + self.history_size * 4 hist_cache_size = len(self.history) * 180 + self.history_size * 4
tx_hash_size = (self.tx_count - self.fs_tx_count) * 74 tx_hash_size = (self.tx_count - self.fs_tx_count) * 74
utxo_MB = (db_deletes_size + utxo_cache_size) // one_MB utxo_MB = (db_deletes_size + utxo_cache_size) // one_MB
@ -493,9 +488,10 @@ class BlockProcessor(server.db.DB):
.format(self.height, self.daemon.cached_height(), .format(self.height, self.daemon.cached_height(),
utxo_MB, hist_MB)) utxo_MB, hist_MB))
# Flush if a cache is too big # Flush history if it takes up over 20% of cache memory.
if utxo_MB >= self.utxo_MB or hist_MB >= self.hist_MB: # Flush UTXOs once they take up 80% of cache memory.
self.flush(utxo_MB >= self.utxo_MB) if utxo_MB + hist_MB >= self.cache_MB or hist_MB >= self.cache_MB // 5:
self.flush(utxo_MB >= self.cache_MB * 4 // 5)
def fs_advance_block(self, header, tx_hashes, txs): def fs_advance_block(self, header, tx_hashes, txs):
'''Update unflushed FS state for a new block.''' '''Update unflushed FS state for a new block.'''
@ -525,15 +521,15 @@ class BlockProcessor(server.db.DB):
history = self.history history = self.history
history_size = self.history_size history_size = self.history_size
tx_num = self.tx_count tx_num = self.tx_count
script_hash168 = self.coin.hash168_from_script() script_hashX = self.coin.hashX_from_script
s_pack = pack s_pack = pack
put_utxo = self.utxo_cache.__setitem__ put_utxo = self.utxo_cache.__setitem__
spend_utxo = self.spend_utxo spend_utxo = self.spend_utxo
undo_info_append = undo_info.append undo_info_append = undo_info.append
for tx, tx_hash in zip(txs, tx_hashes): for tx, tx_hash in zip(txs, tx_hashes):
hash168s = set() hashXs = set()
add_hash168 = hash168s.add add_hashX = hashXs.add
tx_numb = s_pack('<I', tx_num) tx_numb = s_pack('<I', tx_num)
# Spend the inputs # Spend the inputs
@ -541,21 +537,21 @@ class BlockProcessor(server.db.DB):
for txin in tx.inputs: for txin in tx.inputs:
cache_value = spend_utxo(txin.prev_hash, txin.prev_idx) cache_value = spend_utxo(txin.prev_hash, txin.prev_idx)
undo_info_append(cache_value) undo_info_append(cache_value)
add_hash168(cache_value[:21]) add_hashX(cache_value[:-12])
# Add the new UTXOs # Add the new UTXOs
for idx, txout in enumerate(tx.outputs): for idx, txout in enumerate(tx.outputs):
# Get the hash168. Ignore unspendable outputs # Get the hashX. Ignore unspendable outputs
hash168 = script_hash168(txout.pk_script) hashX = script_hashX(txout.pk_script)
if hash168: if hashX:
add_hash168(hash168) add_hashX(hashX)
put_utxo(tx_hash + s_pack('<H', idx), put_utxo(tx_hash + s_pack('<H', idx),
hash168 + tx_numb + s_pack('<Q', txout.value)) hashX + tx_numb + s_pack('<Q', txout.value))
for hash168 in hash168s: for hashX in hashXs:
history[hash168].append(tx_num) history[hashX].append(tx_num)
history_size += len(hash168s) history_size += len(hashXs)
touched.update(hash168s) touched.update(hashXs)
tx_num += 1 tx_num += 1
self.tx_count = tx_num self.tx_count = tx_num
@ -605,7 +601,8 @@ class BlockProcessor(server.db.DB):
s_pack = pack s_pack = pack
put_utxo = self.utxo_cache.__setitem__ put_utxo = self.utxo_cache.__setitem__
spend_utxo = self.spend_utxo spend_utxo = self.spend_utxo
script_hash168 = self.coin.hash168_from_script() script_hashX = self.coin.hashX_from_script
undo_entry_len = 12 + self.coin.HASHX_LEN
rtxs = reversed(txs) rtxs = reversed(txs)
rtx_hashes = reversed(tx_hashes) rtx_hashes = reversed(tx_hashes)
@ -614,19 +611,19 @@ class BlockProcessor(server.db.DB):
for idx, txout in enumerate(tx.outputs): for idx, txout in enumerate(tx.outputs):
# Spend the TX outputs. Be careful with unspendable # Spend the TX outputs. Be careful with unspendable
# outputs - we didn't save those in the first place. # outputs - we didn't save those in the first place.
hash168 = script_hash168(txout.pk_script) hashX = script_hashX(txout.pk_script)
if hash168: if hashX:
cache_value = spend_utxo(tx_hash, idx) cache_value = spend_utxo(tx_hash, idx)
touched.add(cache_value[:21]) touched.add(cache_value[:-12])
# Restore the inputs # Restore the inputs
if not tx.is_coinbase: if not tx.is_coinbase:
for txin in reversed(tx.inputs): for txin in reversed(tx.inputs):
n -= 33 n -= undo_entry_len
undo_item = undo_info[n:n + 33] undo_item = undo_info[n:n + undo_entry_len]
put_utxo(txin.prev_hash + s_pack('<H', txin.prev_idx), put_utxo(txin.prev_hash + s_pack('<H', txin.prev_idx),
undo_item) undo_item)
touched.add(undo_item[:21]) touched.add(undo_item[:-12])
assert n == 0 assert n == 0
self.tx_count -= len(txs) self.tx_count -= len(txs)
@ -642,12 +639,13 @@ class BlockProcessor(server.db.DB):
binary keys and values. binary keys and values.
Key: TX_HASH + TX_IDX (32 + 2 = 34 bytes) Key: TX_HASH + TX_IDX (32 + 2 = 34 bytes)
Value: HASH168 + TX_NUM + VALUE (21 + 4 + 8 = 33 bytes) Value: HASHX + TX_NUM + VALUE (11 + 4 + 8 = 23 bytes)
That's 67 bytes of raw data. Python dictionary overhead means That's 57 bytes of raw data in-memory. Python dictionary overhead
each entry actually uses about 187 bytes of memory. So over 5 means each entry actually uses about 205 bytes of memory. So
million UTXOs can fit in 1GB of RAM. There are approximately 42 almost 5 million UTXOs can fit in 1GB of RAM. There are
million UTXOs on bitcoin mainnet at height 433,000. approximately 42 million UTXOs on bitcoin mainnet at height
433,000.
Semantics: Semantics:
@ -670,11 +668,11 @@ class BlockProcessor(server.db.DB):
To this end we maintain two "tables", one for each point above: To this end we maintain two "tables", one for each point above:
1. Key: b'u' + address_hash168 + tx_idx + tx_num 1. Key: b'u' + address_hashX + tx_idx + tx_num
Value: the UTXO value as a 64-bit unsigned integer Value: the UTXO value as a 64-bit unsigned integer
2. Key: b'h' + compressed_tx_hash + tx_idx + tx_num 2. Key: b'h' + compressed_tx_hash + tx_idx + tx_num
Value: hash168 Value: hashX
The compressed tx hash is just the first few bytes of the hash of The compressed tx hash is just the first few bytes of the hash of
the tx in which the UTXO was created. As this is not unique there the tx in which the UTXO was created. As this is not unique there
@ -700,12 +698,12 @@ class BlockProcessor(server.db.DB):
# Spend it from the DB. # Spend it from the DB.
# Key: b'h' + compressed_tx_hash + tx_idx + tx_num # Key: b'h' + compressed_tx_hash + tx_idx + tx_num
# Value: hash168 # Value: hashX
prefix = b'h' + tx_hash[:4] + idx_packed prefix = b'h' + tx_hash[:4] + idx_packed
candidates = {db_key: hash168 for db_key, hash168 candidates = {db_key: hashX for db_key, hashX
in self.db.iterator(prefix=prefix)} in self.db.iterator(prefix=prefix)}
for hdb_key, hash168 in candidates.items(): for hdb_key, hashX in candidates.items():
tx_num_packed = hdb_key[-4:] tx_num_packed = hdb_key[-4:]
if len(candidates) > 1: if len(candidates) > 1:
@ -715,15 +713,15 @@ class BlockProcessor(server.db.DB):
assert hash is not None # Should always be found assert hash is not None # Should always be found
continue continue
# Key: b'u' + address_hash168 + tx_idx + tx_num # Key: b'u' + address_hashX + tx_idx + tx_num
# Value: the UTXO value as a 64-bit unsigned integer # Value: the UTXO value as a 64-bit unsigned integer
udb_key = b'u' + hash168 + hdb_key[-6:] udb_key = b'u' + hashX + hdb_key[-6:]
utxo_value_packed = self.db.get(udb_key) utxo_value_packed = self.db.get(udb_key)
if utxo_value_packed: if utxo_value_packed:
# Remove both entries for this UTXO # Remove both entries for this UTXO
self.db_deletes.append(hdb_key) self.db_deletes.append(hdb_key)
self.db_deletes.append(udb_key) self.db_deletes.append(udb_key)
return hash168 + tx_num_packed + utxo_value_packed return hashX + tx_num_packed + utxo_value_packed
raise ChainError('UTXO {} / {:,d} not found in "h" table' raise ChainError('UTXO {} / {:,d} not found in "h" table'
.format(hash_to_str(tx_hash), tx_idx)) .format(hash_to_str(tx_hash), tx_idx))
@ -743,11 +741,11 @@ class BlockProcessor(server.db.DB):
batch_put = batch.put batch_put = batch.put
for cache_key, cache_value in self.utxo_cache.items(): for cache_key, cache_value in self.utxo_cache.items():
# suffix = tx_num + tx_idx # suffix = tx_idx + tx_num
hash168 = cache_value[:21] hashX = cache_value[:-12]
suffix = cache_key[-2:] + cache_value[21:25] suffix = cache_key[-2:] + cache_value[-12:-8]
batch_put(b'h' + cache_key[:4] + suffix, hash168) batch_put(b'h' + cache_key[:4] + suffix, hashX)
batch_put(b'u' + hash168 + suffix, cache_value[25:]) batch_put(b'u' + hashX + suffix, cache_value[-8:])
if self.first_sync: if self.first_sync:
self.logger.info('flushed {:,d} blocks with {:,d} txs, {:,d} UTXO ' self.logger.info('flushed {:,d} blocks with {:,d} txs, {:,d} UTXO '

26
server/controller.py

@ -73,20 +73,20 @@ class Controller(util.LoggedClass):
env.max_send = max(350000, env.max_send) env.max_send = max(350000, env.max_send)
self.setup_bands() self.setup_bands()
async def mempool_transactions(self, hash168): async def mempool_transactions(self, hashX):
'''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool '''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool
entries for the hash168. entries for the hashX.
unconfirmed is True if any txin is unconfirmed. unconfirmed is True if any txin is unconfirmed.
''' '''
return await self.mempool.transactions(hash168) return await self.mempool.transactions(hashX)
def mempool_value(self, hash168): def mempool_value(self, hashX):
'''Return the unconfirmed amount in the mempool for hash168. '''Return the unconfirmed amount in the mempool for hashX.
Can be positive or negative. Can be positive or negative.
''' '''
return self.mempool.value(hash168) return self.mempool.value(hashX)
def sent_tx(self, tx_hash): def sent_tx(self, tx_hash):
'''Call when a TX is sent. Tells mempool to prioritize it.''' '''Call when a TX is sent. Tells mempool to prioritize it.'''
@ -255,8 +255,8 @@ class Controller(util.LoggedClass):
# Invalidate caches # Invalidate caches
hc = self.history_cache hc = self.history_cache
for hash168 in set(hc).intersection(touched): for hashX in set(hc).intersection(touched):
del hc[hash168] del hc[hashX]
if self.bp.db_height != self.height: if self.bp.db_height != self.height:
self.height = self.bp.db_height self.height = self.bp.db_height
self.header_cache.clear() self.header_cache.clear()
@ -286,10 +286,10 @@ class Controller(util.LoggedClass):
self.header_cache[height] = header self.header_cache[height] = header
return header return header
async def async_get_history(self, hash168): async def async_get_history(self, hashX):
'''Get history asynchronously to reduce latency.''' '''Get history asynchronously to reduce latency.'''
if hash168 in self.history_cache: if hashX in self.history_cache:
return self.history_cache[hash168] return self.history_cache[hashX]
def job(): def job():
# History DoS limit. Each element of history is about 99 # History DoS limit. Each element of history is about 99
@ -297,11 +297,11 @@ class Controller(util.LoggedClass):
# on bloated history requests, and uses a smaller divisor # on bloated history requests, and uses a smaller divisor
# so large requests are logged before refusing them. # so large requests are logged before refusing them.
limit = self.env.max_send // 97 limit = self.env.max_send // 97
return list(self.bp.get_history(hash168, limit=limit)) return list(self.bp.get_history(hashX, limit=limit))
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
history = await loop.run_in_executor(None, job) history = await loop.run_in_executor(None, job)
self.history_cache[hash168] = history self.history_cache[hashX] = history
return history return history
async def shutdown(self): async def shutdown(self):

51
server/db.py

@ -31,7 +31,7 @@ class DB(LoggedClass):
it was shutdown uncleanly. it was shutdown uncleanly.
''' '''
DB_VERSIONS = [3] DB_VERSIONS = [4]
class MissingUTXOError(Exception): class MissingUTXOError(Exception):
'''Raised if a mempool tx input UTXO couldn't be found.''' '''Raised if a mempool tx input UTXO couldn't be found.'''
@ -319,7 +319,7 @@ class DB(LoggedClass):
assert isinstance(limit, int) and limit >= 0 assert isinstance(limit, int) and limit >= 0
return limit return limit
def get_history(self, hash168, limit=1000): def get_history(self, hashX, limit=1000):
'''Generator that returns an unpruned, sorted list of (tx_hash, '''Generator that returns an unpruned, sorted list of (tx_hash,
height) tuples of confirmed transactions that touched the address, height) tuples of confirmed transactions that touched the address,
earliest in the blockchain first. Includes both spending and earliest in the blockchain first. Includes both spending and
@ -327,7 +327,7 @@ class DB(LoggedClass):
Set limit to None to get them all. Set limit to None to get them all.
''' '''
limit = self._resolve_limit(limit) limit = self._resolve_limit(limit)
prefix = b'H' + hash168 prefix = b'H' + hashX
for key, hist in self.db.iterator(prefix=prefix): for key, hist in self.db.iterator(prefix=prefix):
a = array.array('I') a = array.array('I')
a.frombytes(hist) a.frombytes(hist)
@ -337,20 +337,20 @@ class DB(LoggedClass):
yield self.fs_tx_hash(tx_num) yield self.fs_tx_hash(tx_num)
limit -= 1 limit -= 1
def get_balance(self, hash168): def get_balance(self, hashX):
'''Returns the confirmed balance of an address.''' '''Returns the confirmed balance of an address.'''
return sum(utxo.value for utxo in self.get_utxos(hash168, limit=None)) return sum(utxo.value for utxo in self.get_utxos(hashX, limit=None))
def get_utxos(self, hash168, limit=1000): def get_utxos(self, hashX, limit=1000):
'''Generator that yields all UTXOs for an address sorted in no '''Generator that yields all UTXOs for an address sorted in no
particular order. By default yields at most 1000 entries. particular order. By default yields at most 1000 entries.
Set limit to None to get them all. Set limit to None to get them all.
''' '''
limit = self._resolve_limit(limit) limit = self._resolve_limit(limit)
s_unpack = unpack s_unpack = unpack
# Key: b'u' + address_hash168 + tx_idx + tx_num # Key: b'u' + address_hashX + tx_idx + tx_num
# Value: the UTXO value as a 64-bit unsigned integer # Value: the UTXO value as a 64-bit unsigned integer
prefix = b'u' + hash168 prefix = b'u' + hashX
for db_key, db_value in self.db.iterator(prefix=prefix): for db_key, db_value in self.db.iterator(prefix=prefix):
if limit == 0: if limit == 0:
return return
@ -360,56 +360,43 @@ class DB(LoggedClass):
tx_hash, height = self.fs_tx_hash(tx_num) tx_hash, height = self.fs_tx_hash(tx_num)
yield UTXO(tx_num, tx_pos, tx_hash, height, value) yield UTXO(tx_num, tx_pos, tx_hash, height, value)
def get_utxo_hash168(self, tx_hash, index): def db_hashX(self, tx_hash, idx_packed):
'''Returns the hash168 for a UTXO. '''Return (hashX, tx_num_packed) for the given TXO.
Used only for electrum client command-line requests.
'''
hash168 = None
if 0 <= index <= 65535:
idx_packed = pack('<H', index)
hash168, tx_num_packed = self.db_hash168(tx_hash, idx_packed)
return hash168
def db_hash168(self, tx_hash, idx_packed):
'''Return (hash168, tx_num_packed) for the given TXO.
Both are None if not found.''' Both are None if not found.'''
# Key: b'h' + compressed_tx_hash + tx_idx + tx_num # Key: b'h' + compressed_tx_hash + tx_idx + tx_num
# Value: hash168 # Value: hashX
prefix = b'h' + tx_hash[:4] + idx_packed prefix = b'h' + tx_hash[:4] + idx_packed
# Find which entry, if any, the TX_HASH matches. # Find which entry, if any, the TX_HASH matches.
for db_key, hash168 in self.db.iterator(prefix=prefix): for db_key, hashX in self.db.iterator(prefix=prefix):
assert len(hash168) == 21
tx_num_packed = db_key[-4:] tx_num_packed = db_key[-4:]
tx_num, = unpack('<I', tx_num_packed) tx_num, = unpack('<I', tx_num_packed)
hash, height = self.fs_tx_hash(tx_num) hash, height = self.fs_tx_hash(tx_num)
if hash == tx_hash: if hash == tx_hash:
return hash168, tx_num_packed return hashX, tx_num_packed
return None, None return None, None
def db_utxo_lookup(self, tx_hash, tx_idx): def db_utxo_lookup(self, tx_hash, tx_idx):
'''Given a prevout return a (hash168, value) pair. '''Given a prevout return a (hashX, value) pair.
Raises MissingUTXOError if the UTXO is not found. Used by the Raises MissingUTXOError if the UTXO is not found. Used by the
mempool code. mempool code.
''' '''
idx_packed = pack('<H', tx_idx) idx_packed = pack('<H', tx_idx)
hash168, tx_num_packed = self.db_hash168(tx_hash, idx_packed) hashX, tx_num_packed = self.db_hashX(tx_hash, idx_packed)
if not hash168: if not hashX:
# This can happen when the daemon is a block ahead of us # This can happen when the daemon is a block ahead of us
# and has mempool txs spending outputs from that new block # and has mempool txs spending outputs from that new block
raise self.MissingUTXOError raise self.MissingUTXOError
# Key: b'u' + address_hash168 + tx_idx + tx_num # Key: b'u' + address_hashX + tx_idx + tx_num
# Value: the UTXO value as a 64-bit unsigned integer # Value: the UTXO value as a 64-bit unsigned integer
key = b'u' + hash168 + idx_packed + tx_num_packed key = b'u' + hashX + idx_packed + tx_num_packed
db_value = self.db.get(key) db_value = self.db.get(key)
if not db_value: if not db_value:
raise self.DBError('UTXO {} / {:,d} in one table only' raise self.DBError('UTXO {} / {:,d} in one table only'
.format(hash_to_str(tx_hash), tx_idx)) .format(hash_to_str(tx_hash), tx_idx))
value, = unpack('<Q', db_value) value, = unpack('<Q', db_value)
return hash168, value return hashX, value

10
server/env.py

@ -22,12 +22,12 @@ class Env(LoggedClass):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.obsolete(['UTXO_MB', 'HIST_MB'])
coin_name = self.default('COIN', 'Bitcoin') coin_name = self.default('COIN', 'Bitcoin')
network = self.default('NETWORK', 'mainnet') network = self.default('NETWORK', 'mainnet')
self.coin = Coin.lookup_coin_class(coin_name, network) self.coin = Coin.lookup_coin_class(coin_name, network)
self.db_dir = self.required('DB_DIRECTORY') self.db_dir = self.required('DB_DIRECTORY')
self.utxo_MB = self.integer('UTXO_MB', 1000) self.cache_MB = self.integer('CACHE_MB', 1250)
self.hist_MB = self.integer('HIST_MB', 300)
self.host = self.default('HOST', 'localhost') self.host = self.default('HOST', 'localhost')
self.reorg_limit = self.integer('REORG_LIMIT', self.coin.REORG_LIMIT) self.reorg_limit = self.integer('REORG_LIMIT', self.coin.REORG_LIMIT)
self.daemon_url = self.required('DAEMON_URL') self.daemon_url = self.required('DAEMON_URL')
@ -88,3 +88,9 @@ class Env(LoggedClass):
except: except:
raise self.Error('cannot convert envvar {} value {} to an integer' raise self.Error('cannot convert envvar {} value {} to an integer'
.format(envvar, value)) .format(envvar, value))
def obsolete(self, envvars):
bad = [envvar for envvar in envvars if environ.get(envvar)]
if bad:
raise self.Error('remove obsolete environment variables {}'
.format(bad))

66
server/mempool.py

@ -27,9 +27,9 @@ class MemPool(util.LoggedClass):
To that end we maintain the following maps: To that end we maintain the following maps:
tx_hash -> (txin_pairs, txout_pairs) tx_hash -> (txin_pairs, txout_pairs)
hash168 -> set of all tx hashes in which the hash168 appears hashX -> set of all tx hashes in which the hashX appears
A pair is a (hash168, value) tuple. tx hashes are hex strings. A pair is a (hashX, value) tuple. tx hashes are hex strings.
''' '''
def __init__(self, daemon, coin, db): def __init__(self, daemon, coin, db):
@ -42,7 +42,7 @@ class MemPool(util.LoggedClass):
self.prioritized = set() self.prioritized = set()
self.stop = False self.stop = False
self.txs = {} self.txs = {}
self.hash168s = defaultdict(set) # None can be a key self.hashXs = defaultdict(set) # None can be a key
def prioritize(self, tx_hash): def prioritize(self, tx_hash):
'''Prioritize processing the given hash. This is important during '''Prioritize processing the given hash. This is important during
@ -56,7 +56,7 @@ class MemPool(util.LoggedClass):
unfetched. Add new ones to unprocessed. unfetched. Add new ones to unprocessed.
''' '''
txs = self.txs txs = self.txs
hash168s = self.hash168s hashXs = self.hashXs
touched = self.touched touched = self.touched
hashes = self.daemon.cached_mempool_hashes() hashes = self.daemon.cached_mempool_hashes()
@ -67,13 +67,13 @@ class MemPool(util.LoggedClass):
item = txs.pop(hex_hash) item = txs.pop(hex_hash)
if item: if item:
txin_pairs, txout_pairs = item txin_pairs, txout_pairs = item
tx_hash168s = set(hash168 for hash168, value in txin_pairs) tx_hashXs = set(hashX for hashX, value in txin_pairs)
tx_hash168s.update(hash168 for hash168, value in txout_pairs) tx_hashXs.update(hashX for hashX, value in txout_pairs)
for hash168 in tx_hash168s: for hashX in tx_hashXs:
hash168s[hash168].remove(hex_hash) hashXs[hashX].remove(hex_hash)
if not hash168s[hash168]: if not hashXs[hashX]:
del hash168s[hash168] del hashXs[hashX]
touched.update(tx_hash168s) touched.update(tx_hashXs)
new = hashes.difference(txs) new = hashes.difference(txs)
unfetched.update(new) unfetched.update(new)
@ -114,7 +114,7 @@ class MemPool(util.LoggedClass):
now = time.time() now = time.time()
if now >= next_log and loops: if now >= next_log and loops:
self.logger.info('{:,d} txs touching {:,d} addresses' self.logger.info('{:,d} txs touching {:,d} addresses'
.format(len(txs), len(self.hash168s))) .format(len(txs), len(self.hashXs)))
next_log = now + 150 next_log = now + 150
try: try:
@ -168,14 +168,14 @@ class MemPool(util.LoggedClass):
result, deferred = await loop.run_in_executor(None, job) result, deferred = await loop.run_in_executor(None, job)
pending.extend(deferred) pending.extend(deferred)
hash168s = self.hash168s hashXs = self.hashXs
touched = self.touched touched = self.touched
for hex_hash, in_out_pairs in result.items(): for hex_hash, in_out_pairs in result.items():
if hex_hash in txs: if hex_hash in txs:
txs[hex_hash] = in_out_pairs txs[hex_hash] = in_out_pairs
for hash168, value in itertools.chain(*in_out_pairs): for hashX, value in itertools.chain(*in_out_pairs):
touched.add(hash168) touched.add(hashX)
hash168s[hash168].add(hex_hash) hashXs[hashX].add(hex_hash)
return process return process
@ -199,7 +199,7 @@ class MemPool(util.LoggedClass):
variables it doesn't own. Atomic reads of self.txs that do variables it doesn't own. Atomic reads of self.txs that do
not depend on the result remaining the same are fine. not depend on the result remaining the same are fine.
''' '''
script_hash168 = self.coin.hash168_from_script() script_hashX = self.coin.hashX_from_script
db_utxo_lookup = self.db.db_utxo_lookup db_utxo_lookup = self.db.db_utxo_lookup
txs = self.txs txs = self.txs
@ -209,8 +209,8 @@ class MemPool(util.LoggedClass):
continue continue
tx = Deserializer(raw_tx).read_tx() tx = Deserializer(raw_tx).read_tx()
# Convert the tx outputs into (hash168, value) pairs # Convert the tx outputs into (hashX, value) pairs
txout_pairs = [(script_hash168(txout.pk_script), txout.value) txout_pairs = [(script_hashX(txout.pk_script), txout.value)
for txout in tx.outputs] for txout in tx.outputs]
# Convert the tx inputs to ([prev_hex_hash, prev_idx) pairs # Convert the tx inputs to ([prev_hex_hash, prev_idx) pairs
@ -261,17 +261,17 @@ class MemPool(util.LoggedClass):
return result, deferred return result, deferred
async def transactions(self, hash168): async def transactions(self, hashX):
'''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool '''Generate (hex_hash, tx_fee, unconfirmed) tuples for mempool
entries for the hash168. entries for the hashX.
unconfirmed is True if any txin is unconfirmed. unconfirmed is True if any txin is unconfirmed.
''' '''
# hash168s is a defaultdict # hashXs is a defaultdict
if not hash168 in self.hash168s: if not hashX in self.hashXs:
return [] return []
hex_hashes = self.hash168s[hash168] hex_hashes = self.hashXs[hashX]
raw_txs = await self.daemon.getrawtransactions(hex_hashes) raw_txs = await self.daemon.getrawtransactions(hex_hashes)
result = [] result = []
for hex_hash, raw_tx in zip(hex_hashes, raw_txs): for hex_hash, raw_tx in zip(hex_hashes, raw_txs):
@ -279,23 +279,23 @@ class MemPool(util.LoggedClass):
if not item or not raw_tx: if not item or not raw_tx:
continue continue
txin_pairs, txout_pairs = item txin_pairs, txout_pairs = item
tx_fee = (sum(v for hash168, v in txin_pairs) tx_fee = (sum(v for hashX, v in txin_pairs)
- sum(v for hash168, v in txout_pairs)) - sum(v for hashX, v in txout_pairs))
tx = Deserializer(raw_tx).read_tx() tx = Deserializer(raw_tx).read_tx()
unconfirmed = any(txin.prev_hash in self.txs for txin in tx.inputs) unconfirmed = any(txin.prev_hash in self.txs for txin in tx.inputs)
result.append((hex_hash, tx_fee, unconfirmed)) result.append((hex_hash, tx_fee, unconfirmed))
return result return result
def value(self, hash168): def value(self, hashX):
'''Return the unconfirmed amount in the mempool for hash168. '''Return the unconfirmed amount in the mempool for hashX.
Can be positive or negative. Can be positive or negative.
''' '''
value = 0 value = 0
# hash168s is a defaultdict # hashXs is a defaultdict
if hash168 in self.hash168s: if hashX in self.hashXs:
for hex_hash in self.hash168s[hash168]: for hex_hash in self.hashXs[hashX]:
txin_pairs, txout_pairs = self.txs[hex_hash] txin_pairs, txout_pairs = self.txs[hex_hash]
value -= sum(v for h168, v in txin_pairs if h168 == hash168) value -= sum(v for h168, v in txin_pairs if h168 == hashX)
value += sum(v for h168, v in txout_pairs if h168 == hash168) value += sum(v for h168, v in txout_pairs if h168 == hashX)
return value return value

95
server/protocol.py

@ -14,6 +14,7 @@ import traceback
from lib.hash import sha256, double_sha256, hash_to_str, hex_str_to_hash from lib.hash import sha256, double_sha256, hash_to_str, hex_str_to_hash
from lib.jsonrpc import JSONRPC from lib.jsonrpc import JSONRPC
from lib.tx import Deserializer
from server.daemon import DaemonError from server.daemon import DaemonError
from server.version import VERSION from server.version import VERSION
@ -138,17 +139,17 @@ class Session(JSONRPC):
raise self.RPCError('parameter should be a transaction hash: {}' raise self.RPCError('parameter should be a transaction hash: {}'
.format(param)) .format(param))
def param_to_hash168(self, param): def param_to_hashX(self, param):
if isinstance(param, str): if isinstance(param, str):
try: try:
return self.coin.address_to_hash168(param) return self.coin.address_to_hashX(param)
except: except:
pass pass
raise self.RPCError('param {} is not a valid address'.format(param)) raise self.RPCError('param {} is not a valid address'.format(param))
def params_to_hash168(self, params): def params_to_hashX(self, params):
if len(params) == 1: if len(params) == 1:
return self.param_to_hash168(params[0]) return self.param_to_hashX(params[0])
raise self.RPCError('params {} should contain a single address' raise self.RPCError('params {} should contain a single address'
.format(params)) .format(params))
@ -162,7 +163,7 @@ class ElectrumX(Session):
self.subscribe_height = False self.subscribe_height = False
self.notified_height = None self.notified_height = None
self.max_subs = self.env.max_session_subs self.max_subs = self.env.max_session_subs
self.hash168s = set() self.hashX_subs = {}
rpcs = [ rpcs = [
('blockchain', ('blockchain',
'address.get_balance address.get_history address.get_mempool ' 'address.get_balance address.get_history address.get_mempool '
@ -179,7 +180,7 @@ class ElectrumX(Session):
for suffix in suffixes.split()} for suffix in suffixes.split()}
def sub_count(self): def sub_count(self):
return len(self.hash168s) return len(self.hashX_subs)
async def notify(self, height, touched): async def notify(self, height, touched):
'''Notify the client about changes in height and touched addresses. '''Notify the client about changes in height and touched addresses.
@ -202,11 +203,10 @@ class ElectrumX(Session):
) )
self.encode_and_send_payload(payload) self.encode_and_send_payload(payload)
hash168_to_address = self.coin.hash168_to_address matches = touched.intersection(self.hashX_subs)
matches = self.hash168s.intersection(touched) for hashX in matches:
for hash168 in matches: address = self.hashX_subs[hashX]
address = hash168_to_address(hash168) status = await self.address_status(hashX)
status = await self.address_status(hash168)
payload = self.notification_payload( payload = self.notification_payload(
'blockchain.address.subscribe', (address, status)) 'blockchain.address.subscribe', (address, status))
self.encode_and_send_payload(payload) self.encode_and_send_payload(payload)
@ -222,12 +222,12 @@ class ElectrumX(Session):
'''Used as response to a headers subscription request.''' '''Used as response to a headers subscription request.'''
return self.manager.electrum_header(self.height()) return self.manager.electrum_header(self.height())
async def address_status(self, hash168): async def address_status(self, hashX):
'''Returns status as 32 bytes.''' '''Returns status as 32 bytes.'''
# Note history is ordered and mempool unordered in electrum-server # Note history is ordered and mempool unordered in electrum-server
# For mempool, height is -1 if unconfirmed txins, otherwise 0 # For mempool, height is -1 if unconfirmed txins, otherwise 0
history = await self.manager.async_get_history(hash168) history = await self.manager.async_get_history(hashX)
mempool = await self.manager.mempool_transactions(hash168) mempool = await self.manager.mempool_transactions(hashX)
status = ''.join('{}:{:d}:'.format(hash_to_str(tx_hash), height) status = ''.join('{}:{:d}:'.format(hash_to_str(tx_hash), height)
for tx_hash, height in history) for tx_hash, height in history)
@ -262,20 +262,20 @@ class ElectrumX(Session):
return {"block_height": height, "merkle": merkle_branch, "pos": pos} return {"block_height": height, "merkle": merkle_branch, "pos": pos}
async def unconfirmed_history(self, hash168): async def unconfirmed_history(self, hashX):
# Note unconfirmed history is unordered in electrum-server # Note unconfirmed history is unordered in electrum-server
# Height is -1 if unconfirmed txins, otherwise 0 # Height is -1 if unconfirmed txins, otherwise 0
mempool = await self.manager.mempool_transactions(hash168) mempool = await self.manager.mempool_transactions(hashX)
return [{'tx_hash': tx_hash, 'height': -unconfirmed, 'fee': fee} return [{'tx_hash': tx_hash, 'height': -unconfirmed, 'fee': fee}
for tx_hash, fee, unconfirmed in mempool] for tx_hash, fee, unconfirmed in mempool]
async def get_history(self, hash168): async def get_history(self, hashX):
# Note history is ordered but unconfirmed is unordered in e-s # Note history is ordered but unconfirmed is unordered in e-s
history = await self.manager.async_get_history(hash168) history = await self.manager.async_get_history(hashX)
conf = [{'tx_hash': hash_to_str(tx_hash), 'height': height} conf = [{'tx_hash': hash_to_str(tx_hash), 'height': height}
for tx_hash, height in history] for tx_hash, height in history]
return conf + await self.unconfirmed_history(hash168) return conf + await self.unconfirmed_history(hashX)
def get_chunk(self, index): def get_chunk(self, index):
'''Return header chunk as hex. Index is a non-negative integer.''' '''Return header chunk as hex. Index is a non-negative integer.'''
@ -285,55 +285,55 @@ class ElectrumX(Session):
count = min(next_height - start_height, chunk_size) count = min(next_height - start_height, chunk_size)
return self.bp.read_headers(start_height, count).hex() return self.bp.read_headers(start_height, count).hex()
async def get_utxos(self, hash168): async def get_utxos(self, hashX):
'''Get UTXOs asynchronously to reduce latency.''' '''Get UTXOs asynchronously to reduce latency.'''
def job(): def job():
return list(self.bp.get_utxos(hash168, limit=None)) return list(self.bp.get_utxos(hashX, limit=None))
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
return await loop.run_in_executor(None, job) return await loop.run_in_executor(None, job)
async def get_balance(self, hash168): async def get_balance(self, hashX):
utxos = await self.get_utxos(hash168) utxos = await self.get_utxos(hashX)
confirmed = sum(utxo.value for utxo in utxos) confirmed = sum(utxo.value for utxo in utxos)
unconfirmed = self.manager.mempool_value(hash168) unconfirmed = self.manager.mempool_value(hashX)
return {'confirmed': confirmed, 'unconfirmed': unconfirmed} return {'confirmed': confirmed, 'unconfirmed': unconfirmed}
async def list_unspent(self, hash168): async def list_unspent(self, hashX):
return [{'tx_hash': hash_to_str(utxo.tx_hash), 'tx_pos': utxo.tx_pos, return [{'tx_hash': hash_to_str(utxo.tx_hash), 'tx_pos': utxo.tx_pos,
'height': utxo.height, 'value': utxo.value} 'height': utxo.height, 'value': utxo.value}
for utxo in sorted(await self.get_utxos(hash168))] for utxo in sorted(await self.get_utxos(hashX))]
# --- blockchain commands # --- blockchain commands
async def address_get_balance(self, params): async def address_get_balance(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
return await self.get_balance(hash168) return await self.get_balance(hashX)
async def address_get_history(self, params): async def address_get_history(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
return await self.get_history(hash168) return await self.get_history(hashX)
async def address_get_mempool(self, params): async def address_get_mempool(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
return await self.unconfirmed_history(hash168) return await self.unconfirmed_history(hashX)
async def address_get_proof(self, params): async def address_get_proof(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
raise self.RPCError('get_proof is not yet implemented') raise self.RPCError('get_proof is not yet implemented')
async def address_listunspent(self, params): async def address_listunspent(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
return await self.list_unspent(hash168) return await self.list_unspent(hashX)
async def address_subscribe(self, params): async def address_subscribe(self, params):
hash168 = self.params_to_hash168(params) hashX = self.params_to_hashX(params)
if len(self.hash168s) >= self.max_subs: if len(self.hashX_subs) >= self.max_subs:
raise self.RPCError('your address subscription limit {:,d} reached' raise self.RPCError('your address subscription limit {:,d} reached'
.format(self.max_subs)) .format(self.max_subs))
result = await self.address_status(hash168) result = await self.address_status(hashX)
# add_subscription can raise so call it before adding # add_subscription can raise so call it before adding
self.manager.new_subscription() self.manager.new_subscription()
self.hash168s.add(hash168) self.hashX_subs[hashX] = params[0]
return result return result
async def block_get_chunk(self, params): async def block_get_chunk(self, params):
@ -414,14 +414,23 @@ class ElectrumX(Session):
'and height') 'and height')
async def utxo_get_address(self, params): async def utxo_get_address(self, params):
'''Returns the address for a TXO.
Used only for electrum client command-line requests. We no
longer index by address, so need to request the raw
transaction. So it works for any TXO not just UTXOs.
'''
if len(params) == 2: if len(params) == 2:
tx_hash = self.param_to_tx_hash(params[0]) tx_hash = self.param_to_tx_hash(params[0])
index = self.param_to_non_negative_integer(params[1]) index = self.param_to_non_negative_integer(params[1])
tx_hash = hex_str_to_hash(tx_hash) raw_tx = await self.daemon_request('getrawtransaction', tx_hash)
hash168 = self.bp.get_utxo_hash168(tx_hash, index) if not raw_tx:
if hash168: return None
return self.coin.hash168_to_address(hash168) raw_tx = bytes.fromhex(raw_tx)
tx = Deserializer(raw_tx).read_tx()
if index >= len(tx.outputs):
return None return None
return self.coin.address_from_script(tx.outputs[index].pk_script)
raise self.RPCError('params should contain a transaction hash ' raise self.RPCError('params should contain a transaction hash '
'and index') 'and index')

Loading…
Cancel
Save