Browse Source

Merge branch 'nmarley-pep8' into develop

master
Neil Booth 8 years ago
parent
commit
33cdfa4fc8
  1. 2
      electrumx_rpc.py
  2. 5
      electrumx_server.py
  3. 2
      lib/coins.py
  4. 9
      lib/tx.py
  5. 1
      lib/util.py
  6. 1
      query.py
  7. 2
      server/block_processor.py
  8. 6
      server/controller.py
  9. 10
      server/mempool.py
  10. 1
      server/storage.py
  11. 4
      tests/test_storage.py
  12. 3
      tests/test_util.py

2
electrumx_rpc.py

@ -71,7 +71,7 @@ def rpc_send_and_wait(port, method, params, timeout=15):
def main(): def main():
'''Send the RPC command to the server and print the result.''' '''Send the RPC command to the server and print the result.'''
parser = argparse.ArgumentParser('Send electrumx an RPC command' ) parser = argparse.ArgumentParser('Send electrumx an RPC command')
parser.add_argument('-p', '--port', metavar='port_num', type=int, parser.add_argument('-p', '--port', metavar='port_num', type=int,
help='RPC port number') help='RPC port number')
parser.add_argument('command', nargs=1, default=[], parser.add_argument('command', nargs=1, default=[],

5
electrumx_server.py

@ -25,6 +25,7 @@ SUPPRESS_MESSAGES = [
'Fatal write error on socket transport', 'Fatal write error on socket transport',
] ]
def main_loop(): def main_loop():
'''Start the server.''' '''Start the server.'''
if os.geteuid() == 0: if os.geteuid() == 0:
@ -32,7 +33,7 @@ def main_loop():
'account and use that') 'account and use that')
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
#loop.set_debug(True) # loop.set_debug(True)
def on_signal(signame): def on_signal(signame):
'''Call on receipt of a signal to cleanly shutdown.''' '''Call on receipt of a signal to cleanly shutdown.'''
@ -43,7 +44,7 @@ def main_loop():
def on_exception(loop, context): def on_exception(loop, context):
'''Suppress spurious messages it appears we cannot control.''' '''Suppress spurious messages it appears we cannot control.'''
message = context.get('message') message = context.get('message')
if not message in SUPPRESS_MESSAGES: if message not in SUPPRESS_MESSAGES:
if not ('task' in context and if not ('task' in context and
'accept_connection2()' in repr(context.get('task'))): 'accept_connection2()' in repr(context.get('task'))):
loop.default_exception_handler(context) loop.default_exception_handler(context)

2
lib/coins.py

@ -498,7 +498,7 @@ class DashTestnet(Dash):
TX_PER_BLOCK = 1 TX_PER_BLOCK = 1
RPC_PORT = 19998 RPC_PORT = 19998
IRC_PREFIX = "d_" IRC_PREFIX = "d_"
PEER_DEFAULT_PORTS = {'t':'51001', 's':'51002'} PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'}
PEERS = [ PEERS = [
'electrum.dash.siampm.com s t', 'electrum.dash.siampm.com s t',
] ]

9
lib/tx.py

@ -43,6 +43,7 @@ class Tx(namedtuple("Tx", "version inputs outputs locktime")):
# FIXME: add hash as a cached property? # FIXME: add hash as a cached property?
class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")): class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")):
'''Class representing a transaction input.''' '''Class representing a transaction input.'''
@ -117,10 +118,10 @@ class Deserializer(object):
def _read_input(self): def _read_input(self):
return TxInput( return TxInput(
self._read_nbytes(32), # prev_hash self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script self._read_varbytes(), # script
self._read_le_uint32() # sequence self._read_le_uint32() # sequence
) )
def _read_outputs(self): def _read_outputs(self):

1
lib/util.py

@ -127,6 +127,7 @@ def deep_getsizeof(obj):
return size(obj) return size(obj)
def subclasses(base_class, strict=True): def subclasses(base_class, strict=True):
'''Return a list of subclasses of base_class in its module.''' '''Return a list of subclasses of base_class in its module.'''
def select(obj): def select(obj):

1
query.py

@ -71,5 +71,6 @@ def main():
print('Balance: {} {}'.format(coin.decimal_value(balance), print('Balance: {} {}'.format(coin.decimal_value(balance),
coin.SHORTNAME)) coin.SHORTNAME))
if __name__ == '__main__': if __name__ == '__main__':
main() main()

2
server/block_processor.py

@ -735,7 +735,7 @@ class BlockProcessor(server.db.DB):
for cache_key, cache_value in self.utxo_cache.items(): for cache_key, cache_value in self.utxo_cache.items():
# suffix = tx_idx + tx_num # suffix = tx_idx + tx_num
hashX = cache_value[:-12] hashX = cache_value[:-12]
suffix = cache_key[-2:] + cache_value[-12:-8] suffix = cache_key[-2:] + cache_value[-12:-8]
batch_put(b'h' + cache_key[:4] + suffix, hashX) batch_put(b'h' + cache_key[:4] + suffix, hashX)
batch_put(b'u' + hashX + suffix, cache_value[-8:]) batch_put(b'u' + hashX + suffix, cache_value[-8:])
self.utxo_cache = {} self.utxo_cache = {}

6
server/controller.py

@ -167,7 +167,7 @@ class Controller(util.LoggedClass):
def enqueue_session(self, session): def enqueue_session(self, session):
# Might have disconnected whilst waiting # Might have disconnected whilst waiting
if not session in self.sessions: if session not in self.sessions:
return return
priority = self.session_priority(session) priority = self.session_priority(session)
item = (priority, self.next_queue_id, session) item = (priority, self.next_queue_id, session)
@ -283,7 +283,7 @@ class Controller(util.LoggedClass):
future.cancel() future.cancel()
# Wait for all futures to finish # Wait for all futures to finish
while not all (future.done() for future in self.futures): while not all(future.done() for future in self.futures):
await asyncio.sleep(0.1) await asyncio.sleep(0.1)
# Finally shut down the block processor and executor # Finally shut down the block processor and executor
@ -336,7 +336,7 @@ class Controller(util.LoggedClass):
''' '''
self.state = self.LISTENING self.state = self.LISTENING
env= self.env env = self.env
if env.tcp_port is not None: if env.tcp_port is not None:
await self.start_server('TCP', env.host, env.tcp_port) await self.start_server('TCP', env.host, env.tcp_port)
if env.ssl_port is not None: if env.ssl_port is not None:

10
server/mempool.py

@ -93,8 +93,8 @@ class MemPool(util.LoggedClass):
process_some = self.async_process_some(unfetched, fetch_size // 2) process_some = self.async_process_some(unfetched, fetch_size // 2)
await self.daemon.mempool_refresh_event.wait() await self.daemon.mempool_refresh_event.wait()
self.logger.info ('beginning processing of daemon mempool. ' self.logger.info('beginning processing of daemon mempool. '
'This can take some time...') 'This can take some time...')
next_log = 0 next_log = 0
loops = -1 # Zero during initial catchup loops = -1 # Zero during initial catchup
@ -187,7 +187,7 @@ class MemPool(util.LoggedClass):
# Skip hashes the daemon has dropped. Either they were # Skip hashes the daemon has dropped. Either they were
# evicted or they got in a block. # evicted or they got in a block.
return {hh:raw for hh, raw in zip(hex_hashes, raw_txs) if raw} return {hh: raw for hh, raw in zip(hex_hashes, raw_txs) if raw}
def process_raw_txs(self, raw_tx_map, pending): def process_raw_txs(self, raw_tx_map, pending):
'''Process the dictionary of raw transactions and return a dictionary '''Process the dictionary of raw transactions and return a dictionary
@ -204,7 +204,7 @@ class MemPool(util.LoggedClass):
# Deserialize each tx and put it in our priority queue # Deserialize each tx and put it in our priority queue
for tx_hash, raw_tx in raw_tx_map.items(): for tx_hash, raw_tx in raw_tx_map.items():
if not tx_hash in txs: if tx_hash not in txs:
continue continue
tx, _tx_hash = deserializer(raw_tx).read_tx() tx, _tx_hash = deserializer(raw_tx).read_tx()
@ -267,7 +267,7 @@ class MemPool(util.LoggedClass):
unconfirmed is True if any txin is unconfirmed. unconfirmed is True if any txin is unconfirmed.
''' '''
# hashXs is a defaultdict # hashXs is a defaultdict
if not hashX in self.hashXs: if hashX not in self.hashXs:
return [] return []
deserializer = self.coin.deserializer() deserializer = self.coin.deserializer()

1
server/storage.py

@ -12,6 +12,7 @@ from functools import partial
import lib.util as util import lib.util as util
def db_class(name): def db_class(name):
'''Returns a DB engine class.''' '''Returns a DB engine class.'''
for db_class in util.subclasses(Storage): for db_class in util.subclasses(Storage):

4
tests/test_storage.py

@ -44,8 +44,8 @@ def test_batch(db):
def test_iterator(db): def test_iterator(db):
""" """
The iterator should contain all key/value pairs starting with prefix ordered The iterator should contain all key/value pairs starting with prefix
by key. ordered by key.
""" """
for i in range(5): for i in range(5):
db.put(b"abc" + str.encode(str(i)), str.encode(str(i))) db.put(b"abc" + str.encode(str(i)), str.encode(str(i)))

3
tests/test_util.py

@ -19,7 +19,6 @@ def test_cachedproperty():
cls.CALL_COUNT += 1 cls.CALL_COUNT += 1
return cls.CALL_COUNT return cls.CALL_COUNT
t = Target() t = Target()
assert t.prop == t.prop == 1 assert t.prop == t.prop == 1
assert Target.cls_prop == Target.cls_prop == 1 assert Target.cls_prop == Target.cls_prop == 1
@ -56,4 +55,4 @@ def test_chunks():
def test_increment_byte_string(): def test_increment_byte_string():
assert util.increment_byte_string(b'1') == b'2' assert util.increment_byte_string(b'1') == b'2'
assert util.increment_byte_string(b'\x01\x01') == b'\x01\x02' assert util.increment_byte_string(b'\x01\x01') == b'\x01\x02'
assert util.increment_byte_string(b'\xff\xff') == None assert util.increment_byte_string(b'\xff\xff') is None

Loading…
Cancel
Save