Browse Source

Update compaction test

Version -> 1.4.4pre1
patch-2
Neil Booth 7 years ago
parent
commit
e88b8c0dec
  1. 1
      server/block_processor.py
  2. 2
      server/version.py
  3. 56
      tests/server/test_compaction.py

1
server/block_processor.py

@ -14,7 +14,6 @@ import asyncio
import logging import logging
from struct import pack, unpack from struct import pack, unpack
import time import time
from collections import defaultdict
from functools import partial from functools import partial
from server.daemon import DaemonError from server.daemon import DaemonError

2
server/version.py

@ -1 +1 @@
VERSION = 'ElectrumX 1.4.3' VERSION = 'ElectrumX 1.4.4p1'

56
tests/server/test_compaction.py

@ -1,4 +1,4 @@
# Test of compaction code in server/db.py # Test of compaction code in server/history.py
import array import array
from collections import defaultdict from collections import defaultdict
@ -11,26 +11,25 @@ from server.env import Env
from server.db import DB from server.db import DB
def create_histories(db, hashX_count=100): def create_histories(history, hashX_count=100):
'''Creates a bunch of random transaction histories, and write them '''Creates a bunch of random transaction histories, and write them
to disk in a series of small flushes.''' to disk in a series of small flushes.'''
hashXs = [urandom(HASHX_LEN) for n in range(hashX_count)] hashXs = [urandom(HASHX_LEN) for n in range(hashX_count)]
mk_array = lambda : array.array('I') mk_array = lambda : array.array('I')
histories = {hashX : mk_array() for hashX in hashXs} histories = {hashX : mk_array() for hashX in hashXs}
this_history = defaultdict(mk_array) unflushed = history.unflushed
tx_num = 0 tx_num = 0
while hashXs: while hashXs:
hash_indexes = set(random.randrange(len(hashXs)) hash_indexes = set(random.randrange(len(hashXs))
for n in range(1 + random.randrange(4))) for n in range(1 + random.randrange(4)))
for index in hash_indexes: for index in hash_indexes:
histories[hashXs[index]].append(tx_num) histories[hashXs[index]].append(tx_num)
this_history[hashXs[index]].append(tx_num) unflushed[hashXs[index]].append(tx_num)
tx_num += 1 tx_num += 1
# Occasionally flush and drop a random hashX if non-empty # Occasionally flush and drop a random hashX if non-empty
if random.random() < 0.1: if random.random() < 0.1:
db.flush_history(this_history) history.flush()
this_history.clear()
index = random.randrange(0, len(hashXs)) index = random.randrange(0, len(hashXs))
if histories[hashXs[index]]: if histories[hashXs[index]]:
del hashXs[index] del hashXs[index]
@ -38,9 +37,9 @@ def create_histories(db, hashX_count=100):
return histories return histories
def check_hashX_compaction(db): def check_hashX_compaction(history):
db.max_hist_row_entries = 40 history.max_hist_row_entries = 40
row_size = db.max_hist_row_entries * 4 row_size = history.max_hist_row_entries * 4
full_hist = array.array('I', range(100)).tobytes() full_hist = array.array('I', range(100)).tobytes()
hashX = urandom(HASHX_LEN) hashX = urandom(HASHX_LEN)
pairs = ((1, 20), (26, 50), (56, 30)) pairs = ((1, 20), (26, 50), (56, 30))
@ -57,8 +56,8 @@ def check_hashX_compaction(db):
write_items = [] write_items = []
keys_to_delete = set() keys_to_delete = set()
write_size = db._compact_hashX(hashX, hist_map, hist_list, write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete) write_items, keys_to_delete)
# Check results for sanity # Check results for sanity
assert write_size == len(full_hist) assert write_size == len(full_hist)
assert len(write_items) == 3 assert len(write_items) == 3
@ -75,8 +74,8 @@ def check_hashX_compaction(db):
hist_list = [value for key, value in write_items] hist_list = [value for key, value in write_items]
write_items.clear() write_items.clear()
keys_to_delete.clear() keys_to_delete.clear()
write_size = db._compact_hashX(hashX, hist_map, hist_list, write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete) write_items, keys_to_delete)
assert write_size == 0 assert write_size == 0
assert len(write_items) == 0 assert len(write_items) == 0
assert len(keys_to_delete) == 0 assert len(keys_to_delete) == 0
@ -84,8 +83,8 @@ def check_hashX_compaction(db):
# Check re-compaction adding a single tx writes the one row # Check re-compaction adding a single tx writes the one row
hist_list[-1] += array.array('I', [100]).tobytes() hist_list[-1] += array.array('I', [100]).tobytes()
write_size = db._compact_hashX(hashX, hist_map, hist_list, write_size = history._compact_hashX(hashX, hist_map, hist_list,
write_items, keys_to_delete) write_items, keys_to_delete)
assert write_size == len(hist_list[-1]) assert write_size == len(hist_list[-1])
assert write_items == [(hashX + pack('>H', 2), hist_list[-1])] assert write_items == [(hashX + pack('>H', 2), hist_list[-1])]
assert len(keys_to_delete) == 1 assert len(keys_to_delete) == 1
@ -93,22 +92,21 @@ def check_hashX_compaction(db):
assert len(hist_map) == len(pairs) assert len(hist_map) == len(pairs)
def check_written(db, histories): def check_written(history, histories):
for hashX, hist in histories.items(): for hashX, hist in histories.items():
db_hist = array.array('I', db.history.get_txnums(hashX, limit=None)) db_hist = array.array('I', history.get_txnums(hashX, limit=None))
assert hist == db_hist assert hist == db_hist
def compact_history(db): def compact_history(history):
'''Synchronously compact the DB history.''' '''Synchronously compact the DB history.'''
db.first_sync = False history.comp_cursor = 0
db.comp_cursor = 0
db.comp_flush_count = max(db.comp_flush_count, 1) history.comp_flush_count = max(history.comp_flush_count, 1)
limit = 5 * 1000 limit = 5 * 1000
write_size = 0 write_size = 0
while db.comp_cursor != -1: while history.comp_cursor != -1:
write_size += db._compact_history(limit) write_size += history._compact_history(limit)
assert write_size != 0 assert write_size != 0
def run_test(db_dir): def run_test(db_dir):
@ -117,14 +115,14 @@ def run_test(db_dir):
environ['DAEMON_URL'] = '' environ['DAEMON_URL'] = ''
environ['COIN'] = 'BitcoinCash' environ['COIN'] = 'BitcoinCash'
env = Env() env = Env()
db = DB(env) history = DB(env).history
# Test abstract compaction # Test abstract compaction
check_hashX_compaction(db) check_hashX_compaction(history)
# Now test in with random data # Now test in with random data
histories = create_histories(db) histories = create_histories(history)
check_written(db, histories) check_written(history, histories)
compact_history(db) compact_history(history)
check_written(db, histories) check_written(history, histories)
def test_compaction(tmpdir): def test_compaction(tmpdir):
db_dir = str(tmpdir) db_dir = str(tmpdir)

Loading…
Cancel
Save