Browse Source

pytest: Integrate with known/allowed failures

Moved the flagging for allowed failures into the factory getter, and
renamed into `may_fail`. Also stopped the teardown of a node from
throwing an exception if we are allowed to exit non-cleanly.

Signed-off-by: Christian Decker <decker.christian@gmail.com>
ppa-0.6.1
Christian Decker 7 years ago
committed by Rusty Russell
parent
commit
0b350d2f5f
  1. 12
      tests/test_lightningd.py
  2. 12
      tests/utils.py

12
tests/test_lightningd.py

@ -87,7 +87,7 @@ class NodeFactory(object):
self.nodes = []
self.executor = executor
def get_node(self, disconnect=None, options=None):
def get_node(self, disconnect=None, options=None, may_fail=False):
node_id = self.next_id
self.next_id += 1
@ -107,7 +107,7 @@ class NodeFactory(object):
daemon.cmd_line.append(options)
rpc = LightningRpc(socket_path, self.executor)
node = utils.LightningNode(daemon, rpc, bitcoind, self.executor)
node = utils.LightningNode(daemon, rpc, bitcoind, self.executor, may_fail=may_fail)
self.nodes.append(node)
if VALGRIND:
node.daemon.cmd_line = [
@ -164,7 +164,7 @@ class BaseLightningDTests(unittest.TestCase):
return 1 if errors else 0
def getCrashLog(self, node):
if node.known_fail:
if node.may_fail:
return None, None
try:
crashlog = os.path.join(node.daemon.lightning_dir, 'crash.log')
@ -731,7 +731,7 @@ class LightningDTests(BaseLightningDTests):
def test_penalty_inhtlc(self):
"""Test penalty transaction with an incoming HTLC"""
# We suppress each one after first commit; HTLC gets added not fulfilled.
l1 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED'])
l1 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED'], may_fail=True)
l2 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED'])
l1.rpc.connect('localhost', l2.info['port'], l2.info['id'])
@ -770,7 +770,6 @@ class LightningDTests(BaseLightningDTests):
l2.daemon.wait_for_log('-> ONCHAIND_CHEATED')
# FIXME: l1 should try to stumble along!
l1.allow_failure()
# l2 should spend all of the outputs (except to-us).
# Could happen in any order, depending on commitment tx.
@ -790,7 +789,7 @@ class LightningDTests(BaseLightningDTests):
def test_penalty_outhtlc(self):
"""Test penalty transaction with an outgoing HTLC"""
# First we need to get funds to l2, so suppress after second.
l1 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED*3'])
l1 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED*3'], may_fail=True)
l2 = self.node_factory.get_node(disconnect=['_WIRE_COMMITMENT_SIGNED*3'])
l1.rpc.connect('localhost', l2.info['port'], l2.info['id'])
@ -832,7 +831,6 @@ class LightningDTests(BaseLightningDTests):
l2.daemon.wait_for_log('-> ONCHAIND_CHEATED')
# FIXME: l1 should try to stumble along!
l1.allow_failure()
# l2 should spend all of the outputs (except to-us).
# Could happen in any order, depending on commitment tx.

12
tests/utils.py

@ -252,12 +252,12 @@ class LightningD(TailableProc):
return self.proc.returncode
class LightningNode(object):
def __init__(self, daemon, rpc, btc, executor):
def __init__(self, daemon, rpc, btc, executor, may_fail=False):
self.rpc = rpc
self.daemon = daemon
self.bitcoin = btc
self.executor = executor
self.known_fail = False
self.may_fail = may_fail
# Use batch if you're doing more than one async.
def connect(self, remote_node, capacity, async=False):
@ -323,12 +323,6 @@ class LightningNode(object):
db.close()
return result
# FIXME: we should flag daemon on startup, suppress error
def allow_failure(self):
"""Note that a daemon has (deliberately) crashed, so we don't fail
on cleanup"""
self.known_fail = True
def stop(self, timeout=10):
""" Attempt to do a clean shutdown, but kill if it hangs
"""
@ -346,7 +340,7 @@ class LightningNode(object):
if rc is None:
rc = self.daemon.stop()
if rc != 0:
if rc != 0 and not self.may_fail:
raise ValueError("Node did not exit cleanly, rc={}".format(rc))
else:
return rc

Loading…
Cancel
Save