Browse Source
It might have already happened, and anyway, we wait for the actual state below. ``` # make database snapshot of l2 l2.stop() l2_db_path = os.path.join(l2.daemon.lightning_dir, chainparams['name'], 'lightningd.sqlite3') l2_db_path_bak = os.path.join(l2.daemon.lightning_dir, chainparams['name'], 'lightningd.sqlite3.bak') copyfile(l2_db_path, l2_db_path_bak) l2.start() sync_blockheight(bitcoind, [l2]) # push some money from l3->l2, so that the commit counter advances l2.rpc.connect(l3.info['id'], 'localhost', l3.port) > l2.daemon.wait_for_log('now ACTIVE') tests/test_closing.py:908: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ contrib/pyln-testing/pyln/testing/utils.py:288: in wait_for_log return self.wait_for_logs([regex], timeout) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <pyln.testing.utils.LightningD object at 0x7f0c145a32d0> regexs = ['now ACTIVE'], timeout = 60 def wait_for_logs(self, regexs, timeout=TIMEOUT): """Look for `regexs` in the logs. We tail the stdout of the process and look for each regex in `regexs`, starting from last of the previous waited-for log entries (if any). We fail if the timeout is exceeded or if the underlying process exits before all the `regexs` were found. If timeout is None, no time-out is applied. """ logging.debug("Waiting for {} in the logs".format(regexs)) exs = [re.compile(r) for r in regexs] start_time = time.time() pos = self.logsearch_start while True: if timeout is not None and time.time() > start_time + timeout: print("Time-out: can't find {} in logs".format(exs)) for r in exs: if self.is_in_log(r): print("({} was previously in logs!)".format(r)) > raise TimeoutError('Unable to find "{}" in logs.'.format(exs)) E TimeoutError: Unable to find "[re.compile('now ACTIVE')]" in logs. contrib/pyln-testing/pyln/testing/utils.py:264: TimeoutError ``` Signed-off-by: Rusty Russell <rusty@rustcorp.com.au>bump-pyln-proto
Rusty Russell
4 years ago
1 changed files with 1 additions and 1 deletions
Loading…
Reference in new issue