Browse Source

Follow-up prior

- remove sybils member and old sybil detection
- make BLACKLIST_URL a coin default
- tweak logging
patch-2
Neil Booth 6 years ago
parent
commit
03d263fd40
  1. 3
      docs/environment.rst
  2. 2
      electrumx/lib/coins.py
  3. 5
      electrumx/server/env.py
  4. 37
      electrumx/server/peers.py

3
docs/environment.rst

@ -347,7 +347,8 @@ some of this.
.. envvar:: BLACKLIST_URL .. envvar:: BLACKLIST_URL
URL to retrieve a list of blacklisted peers. URL to retrieve a list of blacklisted peers. If not set, a coin-
specific default is used.

2
electrumx/lib/coins.py

@ -84,6 +84,7 @@ class Coin(object):
# Peer discovery # Peer discovery
PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'} PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'}
PEERS = [] PEERS = []
BLACKLIST_URL = None
@classmethod @classmethod
def lookup_coin_class(cls, name, net): def lookup_coin_class(cls, name, net):
@ -422,6 +423,7 @@ class BitcoinSegwit(BitcoinMixin, Coin):
TX_COUNT = 318337769 TX_COUNT = 318337769
TX_COUNT_HEIGHT = 524213 TX_COUNT_HEIGHT = 524213
TX_PER_BLOCK = 1400 TX_PER_BLOCK = 1400
BLACKLIST_URL = 'https://electrum.org/blacklist.json'
PEERS = [ PEERS = [
'btc.smsys.me s995', 'btc.smsys.me s995',
'E-X.not.fyi s t', 'E-X.not.fyi s t',

5
electrumx/server/env.py

@ -75,10 +75,7 @@ class Env(EnvBase):
self.bandwidth_limit = self.integer('BANDWIDTH_LIMIT', 2000000) self.bandwidth_limit = self.integer('BANDWIDTH_LIMIT', 2000000)
self.session_timeout = self.integer('SESSION_TIMEOUT', 600) self.session_timeout = self.integer('SESSION_TIMEOUT', 600)
self.drop_client = self.custom("DROP_CLIENT", None, re.compile) self.drop_client = self.custom("DROP_CLIENT", None, re.compile)
self.blacklist_url = self.default('BLACKLIST_URL', None) self.blacklist_url = self.default('BLACKLIST_URL', self.coin.BLACKLIST_URL)
# temporary default
if self.blacklist_url is None and self.coin.NAME == 'BitcoinSegwit':
self.blacklist_url = 'https://electrum.org/blacklist.json'
# Identities # Identities
clearnet_identity = self.clearnet_identity() clearnet_identity = self.clearnet_identity()

37
electrumx/server/peers.py

@ -79,7 +79,6 @@ class PeerManager(object):
self.group = TaskGroup() self.group = TaskGroup()
# refreshed # refreshed
self.blacklist = set() self.blacklist = set()
self.sybils = set()
def _my_clearnet_peer(self): def _my_clearnet_peer(self):
'''Returns the clearnet peer representing this server, if any.''' '''Returns the clearnet peer representing this server, if any.'''
@ -134,27 +133,19 @@ class PeerManager(object):
for real_name in self.env.coin.PEERS) for real_name in self.env.coin.PEERS)
await self._note_peers(imported_peers, limit=None) await self._note_peers(imported_peers, limit=None)
def _is_allowed(self, peer):
if peer.host in self.blacklist:
return False
if '*.' + '.'.join(peer.host.split('.')[-2:]) in self.blacklist:
return False
return True
async def _refresh_blacklist(self): async def _refresh_blacklist(self):
session = aiohttp.ClientSession()
url = self.env.blacklist_url url = self.env.blacklist_url
if url is None: if not url:
return return
while True: while True:
session = aiohttp.ClientSession()
try: try:
async with session.get(url) as response: async with session.get(url) as response:
r = await response.text() r = await response.text()
self.blacklist = set(json.loads(r)) self.blacklist = set(json.loads(r))
self.logger.info('blacklist retrieved from "%s": %d' self.logger.info(f'blacklist from {url} has {len(self.blacklist)} entries')
% (url, len(self.blacklist)))
except Exception as e: except Exception as e:
self.logger.info('could not retrieve blacklist, "%s"' % url) self.logger.error(f'could not retrieve blacklist from {url}: {e}')
await sleep(600) await sleep(600)
async def _detect_proxy(self): async def _detect_proxy(self):
@ -180,8 +171,7 @@ class PeerManager(object):
self.logger.info('no proxy detected, will try later') self.logger.info('no proxy detected, will try later')
await sleep(900) await sleep(900)
async def _note_peers(self, peers, limit=2, check_ports=False, check_matches=False, async def _note_peers(self, peers, limit=2, check_ports=False, source=None):
source=None):
'''Add a limited number of peers that are not already present.''' '''Add a limited number of peers that are not already present.'''
new_peers = [] new_peers = []
known = [] known = []
@ -200,9 +190,6 @@ class PeerManager(object):
else: else:
new_peers.append(peer) new_peers.append(peer)
if check_matches and len(self.peers) >= 6 and len(known) <= len(self.peers) // 2:
return False
if new_peers: if new_peers:
source = source or new_peers[0].source source = source or new_peers[0].source
if limit: if limit:
@ -335,8 +322,7 @@ class PeerManager(object):
# Process reported peers if remote peer is good # Process reported peers if remote peer is good
peers = peers_task.result() peers = peers_task.result()
if await self._note_peers(peers, check_matches=not peer.is_tor): await self._note_peers(peers)
self.sybils.add(peer.host)
features = self._features_to_register(peer, peers) features = self._features_to_register(peer, peers)
if features: if features:
@ -506,11 +492,12 @@ class PeerManager(object):
not peer.bad and peer.is_public] not peer.bad and peer.is_public]
onion_peers = [] onion_peers = []
def is_blacklisted(host):
return any(item in self.blacklist
for item in (host, '*.' + '.'.join(host.split('.')[-2:])))
if not is_peer: if not is_peer:
recent = filter(self._is_allowed, recent) recent = [peer for peer in recent if not is_blacklisted(peer.host)]
recent = [peer for peer in self.peers
if self._is_allowed(peer)
and peer.host in self.sybils]
# Always report ourselves if valid (even if not public) # Always report ourselves if valid (even if not public)
peers = set(myself for myself in self.myselves peers = set(myself for myself in self.myselves

Loading…
Cancel
Save