Browse Source

Merge branch 'pep8' of https://github.com/nmarley/electrumx into nmarley-pep8

master
Neil Booth 8 years ago
parent
commit
9bd7b5a9f9
  1. 2
      electrumx_rpc.py
  2. 5
      electrumx_server.py
  3. 2
      lib/coins.py
  4. 4
      lib/jsonrpc.py
  5. 9
      lib/tx.py
  6. 1
      lib/util.py
  7. 1
      query.py
  8. 2
      server/block_processor.py
  9. 6
      server/controller.py
  10. 1
      server/daemon.py
  11. 1
      server/db.py
  12. 3
      server/env.py
  13. 10
      server/mempool.py
  14. 1
      server/storage.py
  15. 4
      tests/test_storage.py
  16. 3
      tests/test_util.py

2
electrumx_rpc.py

@ -74,7 +74,7 @@ def rpc_send_and_wait(port, method, params, timeout=15):
def main():
'''Send the RPC command to the server and print the result.'''
parser = argparse.ArgumentParser('Send electrumx an RPC command' )
parser = argparse.ArgumentParser('Send electrumx an RPC command')
parser.add_argument('-p', '--port', metavar='port_num', type=int,
help='RPC port number')
parser.add_argument('command', nargs=1, default=[],

5
electrumx_server.py

@ -25,6 +25,7 @@ SUPPRESS_MESSAGES = [
'Fatal write error on socket transport',
]
def main_loop():
'''Start the server.'''
if os.geteuid() == 0:
@ -32,7 +33,7 @@ def main_loop():
'account and use that')
loop = asyncio.get_event_loop()
#loop.set_debug(True)
# loop.set_debug(True)
def on_signal(signame):
'''Call on receipt of a signal to cleanly shutdown.'''
@ -43,7 +44,7 @@ def main_loop():
def on_exception(loop, context):
'''Suppress spurious messages it appears we cannot control.'''
message = context.get('message')
if not message in SUPPRESS_MESSAGES:
if message not in SUPPRESS_MESSAGES:
if not ('task' in context and
'accept_connection2()' in repr(context.get('task'))):
loop.default_exception_handler(context)

2
lib/coins.py

@ -477,7 +477,7 @@ class DashTestnet(Dash):
TX_PER_BLOCK = 1
RPC_PORT = 19998
IRC_PREFIX = "d_"
PEER_DEFAULT_PORTS = {'t':'51001', 's':'51002'}
PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'}
PEERS = [
'electrum.dash.siampm.com s t',
]

4
lib/jsonrpc.py

@ -87,7 +87,7 @@ class JSONRPCv1(JSONRPC):
def is_request(cls, payload):
'''Returns True if the payload (which has a method) is a request.
False means it is a notification.'''
return payload.get('id') != None
return payload.get('id') is not None
class JSONRPCv2(JSONRPC):
@ -296,7 +296,7 @@ class JSONSessionBase(util.LoggedClass):
'''Extract and return the ID from the payload.
Raises an RPCError if it is missing or invalid.'''
if not 'id' in payload:
if 'id' not in payload:
raise RPCError('missing id', JSONRPC.INVALID_REQUEST)
id_ = payload['id']

9
lib/tx.py

@ -24,6 +24,7 @@ class Tx(namedtuple("Tx", "version inputs outputs locktime")):
# FIXME: add hash as a cached property?
class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")):
'''Class representing a transaction input.'''
@ -98,10 +99,10 @@ class Deserializer(object):
def _read_input(self):
return TxInput(
self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script
self._read_le_uint32() # sequence
self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script
self._read_le_uint32() # sequence
)
def _read_outputs(self):

1
lib/util.py

@ -109,6 +109,7 @@ def deep_getsizeof(obj):
return size(obj)
def subclasses(base_class, strict=True):
'''Return a list of subclasses of base_class in its module.'''
def select(obj):

1
query.py

@ -71,5 +71,6 @@ def main():
print('Balance: {} {}'.format(coin.decimal_value(balance),
coin.SHORTNAME))
if __name__ == '__main__':
main()

2
server/block_processor.py

@ -735,7 +735,7 @@ class BlockProcessor(server.db.DB):
for cache_key, cache_value in self.utxo_cache.items():
# suffix = tx_idx + tx_num
hashX = cache_value[:-12]
suffix = cache_key[-2:] + cache_value[-12:-8]
suffix = cache_key[-2:] + cache_value[-12:-8]
batch_put(b'h' + cache_key[:4] + suffix, hashX)
batch_put(b'u' + hashX + suffix, cache_value[-8:])
self.utxo_cache = {}

6
server/controller.py

@ -170,7 +170,7 @@ class Controller(util.LoggedClass):
def enqueue_session(self, session):
# Might have disconnected whilst waiting
if not session in self.sessions:
if session not in self.sessions:
return
priority = self.session_priority(session)
item = (priority, self.next_queue_id, session)
@ -274,7 +274,7 @@ class Controller(util.LoggedClass):
future.cancel()
# Wait for all futures to finish
while not all (future.done() for future in self.futures):
while not all(future.done() for future in self.futures):
await asyncio.sleep(0.1)
# Finally shut down the block processor and executor
@ -329,7 +329,7 @@ class Controller(util.LoggedClass):
'''
self.state = self.LISTENING
env= self.env
env = self.env
if env.tcp_port is not None:
await self.start_server('TCP', env.host, env.tcp_port)
if env.ssl_port is not None:

1
server/daemon.py

@ -20,6 +20,7 @@ import lib.util as util
class DaemonError(Exception):
'''Raised when the daemon returns an error in its results.'''
class Daemon(util.LoggedClass):
'''Handles connections to a daemon at the given URL.'''

1
server/db.py

@ -23,6 +23,7 @@ from server.version import VERSION
UTXO = namedtuple("UTXO", "tx_num tx_pos tx_hash height value")
class DB(util.LoggedClass):
'''Simple wrapper of the backend database for querying.

3
server/env.py

@ -67,7 +67,7 @@ class Env(LoggedClass):
''
)
self.tor_identity = NetIdentity(
self.default('REPORT_HOST_TOR', ''), # must be a string
self.default('REPORT_HOST_TOR', ''), # must be a string
self.integer('REPORT_TCP_PORT_TOR',
self.identity.tcp_port
if self.identity.tcp_port else
@ -85,7 +85,6 @@ class Env(LoggedClass):
if self.identity.tcp_port == self.identity.ssl_port:
raise self.Error('IRC TCP and SSL ports are the same')
def default(self, envvar, default):
return environ.get(envvar, default)

10
server/mempool.py

@ -93,8 +93,8 @@ class MemPool(util.LoggedClass):
process_some = self.async_process_some(unfetched, fetch_size // 2)
await self.daemon.mempool_refresh_event.wait()
self.logger.info ('beginning processing of daemon mempool. '
'This can take some time...')
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
next_log = 0
loops = -1 # Zero during initial catchup
@ -187,7 +187,7 @@ class MemPool(util.LoggedClass):
# Skip hashes the daemon has dropped. Either they were
# evicted or they got in a block.
return {hh:raw for hh, raw in zip(hex_hashes, raw_txs) if raw}
return {hh: raw for hh, raw in zip(hex_hashes, raw_txs) if raw}
def process_raw_txs(self, raw_tx_map, pending):
'''Process the dictionary of raw transactions and return a dictionary
@ -204,7 +204,7 @@ class MemPool(util.LoggedClass):
# Deserialize each tx and put it in our priority queue
for tx_hash, raw_tx in raw_tx_map.items():
if not tx_hash in txs:
if tx_hash not in txs:
continue
tx, _tx_hash = deserializer(raw_tx).read_tx()
@ -267,7 +267,7 @@ class MemPool(util.LoggedClass):
unconfirmed is True if any txin is unconfirmed.
'''
# hashXs is a defaultdict
if not hashX in self.hashXs:
if hashX not in self.hashXs:
return []
deserializer = self.coin.deserializer()

1
server/storage.py

@ -12,6 +12,7 @@ from functools import partial
import lib.util as util
def db_class(name):
'''Returns a DB engine class.'''
for db_class in util.subclasses(Storage):

4
tests/test_storage.py

@ -44,8 +44,8 @@ def test_batch(db):
def test_iterator(db):
"""
The iterator should contain all key/value pairs starting with prefix ordered
by key.
The iterator should contain all key/value pairs starting with prefix
ordered by key.
"""
for i in range(5):
db.put(b"abc" + str.encode(str(i)), str.encode(str(i)))

3
tests/test_util.py

@ -19,7 +19,6 @@ def test_cachedproperty():
cls.CALL_COUNT += 1
return cls.CALL_COUNT
t = Target()
assert t.prop == t.prop == 1
assert Target.cls_prop == Target.cls_prop == 1
@ -56,4 +55,4 @@ def test_chunks():
def test_increment_byte_string():
assert util.increment_byte_string(b'1') == b'2'
assert util.increment_byte_string(b'\x01\x01') == b'\x01\x02'
assert util.increment_byte_string(b'\xff\xff') == None
assert util.increment_byte_string(b'\xff\xff') is None

Loading…
Cancel
Save