Browse Source

Update compaction test

Version -> 1.4.4pre1
patch-2
Neil Booth 7 years ago
parent
commit
e88b8c0dec
  1. 1
      server/block_processor.py
  2. 2
      server/version.py
  3. 56
      tests/server/test_compaction.py

1
server/block_processor.py

@ -14,7 +14,6 @@ import asyncio
import logging
from struct import pack, unpack
import time
from collections import defaultdict
from functools import partial
from server.daemon import DaemonError

2
server/version.py

@ -1 +1 @@
VERSION = 'ElectrumX 1.4.3'
VERSION = 'ElectrumX 1.4.4p1'

56
tests/server/test_compaction.py

@ -1,4 +1,4 @@
# Test of compaction code in server/db.py
# Test of compaction code in server/history.py
import array
from collections import defaultdict
@ -11,26 +11,25 @@ from server.env import Env
from server.db import DB
def create_histories(db, hashX_count=100):
def create_histories(history, hashX_count=100):
'''Creates a bunch of random transaction histories, and write them
to disk in a series of small flushes.'''
hashXs = [urandom(HASHX_LEN) for n in range(hashX_count)]
mk_array = lambda : array.array('I')
histories = {hashX : mk_array() for hashX in hashXs}
this_history = defaultdict(mk_array)
unflushed = history.unflushed
tx_num = 0
while hashXs:
hash_indexes = set(random.randrange(len(hashXs))
for n in range(1 + random.randrange(4)))
for index in hash_indexes:
histories[hashXs[index]].append(tx_num)
this_history[hashXs[index]].append(tx_num)
unflushed[hashXs[index]].append(tx_num)
tx_num += 1
# Occasionally flush and drop a random hashX if non-empty
if random.random() < 0.1:
db.flush_history(this_history)
this_history.clear()
history.flush()
index = random.randrange(0, len(hashXs))
if histories[hashXs[index]]:
del hashXs[index]
@ -38,9 +37,9 @@ def create_histories(db, hashX_count=100):
return histories
def check_hashX_compaction(db):
db.max_hist_row_entries = 40
row_size = db.max_hist_row_entries * 4
def check_hashX_compaction(history):
history.max_hist_row_entries = 40
row_size = history.max_hist_row_entries * 4
full_hist = array.array('I', range(100)).tobytes()
hashX = urandom(HASHX_LEN)
pairs = ((1, 20), (26, 50), (56, 30))
@ -57,8 +56,8 @@ def check_hashX_compaction(db):
write_items = []
keys_to_delete = set()
write_size = db._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
# Check results for sanity
assert write_size == len(full_hist)
assert len(write_items) == 3
@ -75,8 +74,8 @@ def check_hashX_compaction(db):
hist_list = [value for key, value in write_items]
write_items.clear()
keys_to_delete.clear()
write_size = db._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
assert write_size == 0
assert len(write_items) == 0
assert len(keys_to_delete) == 0
@ -84,8 +83,8 @@ def check_hashX_compaction(db):
# Check re-compaction adding a single tx writes the one row
hist_list[-1] += array.array('I', [100]).tobytes()
write_size = db._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete)
assert write_size == len(hist_list[-1])
assert write_items == [(hashX + pack('>H', 2), hist_list[-1])]
assert len(keys_to_delete) == 1
@ -93,22 +92,21 @@ def check_hashX_compaction(db):
assert len(hist_map) == len(pairs)
def check_written(db, histories):
def check_written(history, histories):
for hashX, hist in histories.items():
db_hist = array.array('I', db.history.get_txnums(hashX, limit=None))
db_hist = array.array('I', history.get_txnums(hashX, limit=None))
assert hist == db_hist
def compact_history(db):
def compact_history(history):
'''Synchronously compact the DB history.'''
db.first_sync = False
db.comp_cursor = 0
history.comp_cursor = 0
db.comp_flush_count = max(db.comp_flush_count, 1)
history.comp_flush_count = max(history.comp_flush_count, 1)
limit = 5 * 1000
write_size = 0
while db.comp_cursor != -1:
write_size += db._compact_history(limit)
while history.comp_cursor != -1:
write_size += history._compact_history(limit)
assert write_size != 0
def run_test(db_dir):
@ -117,14 +115,14 @@ def run_test(db_dir):
environ['DAEMON_URL'] = ''
environ['COIN'] = 'BitcoinCash'
env = Env()
db = DB(env)
history = DB(env).history
# Test abstract compaction
check_hashX_compaction(db)
check_hashX_compaction(history)
# Now test in with random data
histories = create_histories(db)
check_written(db, histories)
compact_history(db)
check_written(db, histories)
histories = create_histories(history)
check_written(history, histories)
compact_history(history)
check_written(history, histories)
def test_compaction(tmpdir):
db_dir = str(tmpdir)

Loading…
Cancel
Save