Browse Source

server: Initial database redesign (#35)

- The evolutions compacted.
- Domain types created.
- Foreign keys added.
- Support for the P2WPKH address format added.
prometheus-integration
Alexis Hernandez 7 years ago
parent
commit
2eb9c010dd
  1. 23
      server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala
  2. 47
      server/conf/evolutions/default/1.sql
  3. 44
      server/conf/evolutions/default/2.sql
  4. 51
      server/conf/evolutions/default/3.sql
  5. 54
      server/conf/evolutions/default/4.sql
  6. 20
      server/conf/evolutions/default/5.sql
  7. 13
      server/conf/evolutions/default/6.sql
  8. 16
      server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala
  9. 40
      server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala
  10. 6
      server/test/com/xsn/explorer/processors/BlockOpsSpec.scala

23
server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala

@ -49,22 +49,37 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
* NOTE: Ensure the connection has an open transaction.
*/
def deleteBy(blockhash: Blockhash)(implicit conn: Connection): List[Transaction] = {
val transactions = SQL(
val expectedTransactions = SQL(
"""
|DELETE FROM transactions
|SELECT txid, blockhash, time, size
|FROM transactions
|WHERE blockhash = {blockhash}
|RETURNING txid, blockhash, time, size
""".stripMargin
).on(
'blockhash -> blockhash.string
).as(parseTransaction.*).flatten
transactions.map { tx =>
val result = expectedTransactions.map { tx =>
val inputs = deleteInputs(tx.id)
val outputs = deleteOutputs(tx.id)
tx.copy(inputs = inputs, outputs = outputs)
}
val deletedTransactions = SQL(
"""
|DELETE FROM transactions
|WHERE blockhash = {blockhash}
|RETURNING txid, blockhash, time, size
""".stripMargin
).on(
'blockhash -> blockhash.string
).as(parseTransaction.*).flatten
Option(deletedTransactions)
.filter(_.size == expectedTransactions.size)
.map(_ => result)
.getOrElse { throw new RuntimeException("Failed to delete transactions consistently")} // this should not happen
}
def getBy(

47
server/conf/evolutions/default/1.sql

@ -1,32 +1,31 @@
# --- !Ups
CREATE TABLE blocks(
blockhash VARCHAR(64) NOT NULL,
previous_blockhash VARCHAR(64) NULL,
next_blockhash VARCHAR(64) NULL,
merkle_root VARCHAR(64) NULL,
tpos_contract VARCHAR(64) NULL,
size INT NOT NULL,
height INT NOT NULL,
version INT NOT NULL,
time BIGINT NOT NULL,
median_time BIGINT NOT NULL,
nonce INT NOT NULL,
bits VARCHAR(50) NOT NULL,
chainwork VARCHAR(80) NOT NULL,
difficulty DECIMAL(30, 20),
-- constraints
CONSTRAINT blocks_blockhash_pk PRIMARY KEY (blockhash),
CONSTRAINT blocks_blockhash_format CHECK (blockhash ~ '^[a-f0-9]{64}$'),
CONSTRAINT blocks_previous_blockhash_format CHECK (previous_blockhash ~ '^[a-f0-9]{64}$'),
CONSTRAINT blocks_next_blockhash_format CHECK (next_blockhash ~ '^[a-f0-9]{64}$'),
CONSTRAINT blocks_tpos_contract_format CHECK (tpos_contract ~ '^[a-f0-9]{64}$')
-- custom types are defined to simplify the validations and simplify updates to the validations.
CREATE DOMAIN BLOCKHASH_TYPE AS TEXT
CHECK (
VALUE ~ '^[a-f0-9]{64}$'
);
CREATE DOMAIN TXID_TYPE AS TEXT
CHECK (
VALUE ~ '^[a-f0-9]{64}$'
);
CREATE INDEX blocks_height_index ON blocks USING BTREE (height);
CREATE INDEX blocks_time_index ON blocks USING BTREE (time);
CREATE DOMAIN ADDRESS_TYPE AS TEXT
CHECK (
VALUE ~ '(^[A-Za-z0-9]{34}$)|(^[A-Za-z0-9]{42}$)'
);
CREATE DOMAIN NON_NEGATIVE_INT_TYPE AS INT
CHECK (
VALUE >= 0
);
# --- !Downs
DROP TABLE blocks;
DROP DOMAIN NON_NEGATIVE_INT_TYPE;
DROP DOMAIN ADDRESS_TYPE;
DROP DOMAIN TXID_TYPE;
DROP DOMAIN BLOCKHASH_TYPE;

44
server/conf/evolutions/default/2.sql

@ -1,32 +1,32 @@
# --- !Ups
-- we pre-compute the balances while storing transactions in order to perform
-- simpler queries while requiring addresses and the available amounts.
CREATE TABLE balances(
address VARCHAR(34) NOT NULL,
received DECIMAL(30, 15) NOT NULL,
spent DECIMAL(30, 15) NOT NULL,
available DECIMAL(30, 15) NOT NULL,
-- the next_blockhash doesn't have a foreign key on purpose, it can cause issues while deleting blocks.
CREATE TABLE blocks(
blockhash BLOCKHASH_TYPE NOT NULL,
previous_blockhash BLOCKHASH_TYPE NULL,
next_blockhash BLOCKHASH_TYPE NULL,
merkle_root TEXT NOT NULL,
tpos_contract TXID_TYPE NULL,
size NON_NEGATIVE_INT_TYPE NOT NULL,
height NON_NEGATIVE_INT_TYPE NOT NULL,
version INT NOT NULL,
time BIGINT NOT NULL,
median_time BIGINT NOT NULL,
nonce INT NOT NULL,
bits TEXT NOT NULL,
chainwork TEXT NOT NULL,
difficulty DECIMAL(30, 20) NOT NULL,
-- constraints
CONSTRAINT balances_address_pk PRIMARY KEY (address),
CONSTRAINT balances_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$')
CONSTRAINT blocks_blockhash_pk PRIMARY KEY (blockhash),
CONSTRAINT blocks_height_unique UNIQUE (height),
CONSTRAINT blocks_previous_blockhash_fk FOREIGN KEY (previous_blockhash) REFERENCES blocks (blockhash)
);
CREATE INDEX balances_available_index ON balances (available);
-- there are certain addresses that we need to hide from public, like the one
-- used for the coin swap.
CREATE TABLE hidden_addresses(
address VARCHAR(34) NOT NULL,
-- constraints
CONSTRAINT hidden_addresses_address_pk PRIMARY KEY (address),
CONSTRAINT hidden_addresses_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$')
);
CREATE INDEX blocks_time_index ON blocks USING BTREE (time);
# --- !Downs
DROP TABLE hidden_addresses;
DROP TABLE balances;
DROP INDEX blocks_time_index;
DROP TABLE blocks;

51
server/conf/evolutions/default/3.sql

@ -1,49 +1,30 @@
# --- !Ups
CREATE TABLE transactions(
txid VARCHAR(64) NOT NULL,
blockhash VARCHAR(64) NOT NULL,
time BIGINT NOT NULL,
size INT NOT NULL,
-- we pre-compute the balances while storing transactions in order to perform
-- simpler queries while requiring addresses and the available amounts.
CREATE TABLE balances(
address ADDRESS_TYPE NOT NULL,
received DECIMAL(30, 15) NOT NULL,
spent DECIMAL(30, 15) NOT NULL,
-- constraints
CONSTRAINT transactions_txid_pk PRIMARY KEY (txid),
CONSTRAINT transactions_txid_format CHECK (txid ~ '^[a-f0-9]{64}$'),
CONSTRAINT transactions_blockhash_format CHECK (blockhash ~ '^[a-f0-9]{64}$')
CONSTRAINT balances_address_pk PRIMARY KEY (address)
);
CREATE INDEX transactions_blockhash_index ON transactions (blockhash);
CREATE INDEX balances_available_index ON balances USING BTREE ((received - spent));
CREATE TABLE transaction_inputs(
txid VARCHAR(64) NOT NULL,
index INT NOT NULL,
value DECIMAL(30, 15) NULL,
address VARCHAR(34) NULL,
-- there are certain addresses that we need to hide from the public,
-- like the one used for the coin swap.
CREATE TABLE hidden_addresses(
address ADDRESS_TYPE NOT NULL,
-- constraints
CONSTRAINT transaction_inputs_txid_index_pk PRIMARY KEY (txid, index),
CONSTRAINT transaction_inputs_txid_format CHECK (txid ~ '^[a-f0-9]{64}$'),
CONSTRAINT transaction_inputs_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$')
);
CREATE TABLE transaction_outputs(
txid VARCHAR(64) NOT NULL,
index INT NOT NULL,
value DECIMAL(30, 15) NOT NULL,
address VARCHAR(34) NULL,
tpos_owner_address VARCHAR(34) NULL,
tpos_merchant_address VARCHAR(34) NULL,
-- constraints
CONSTRAINT transaction_outputs_txid_index_pk PRIMARY KEY (txid, index),
CONSTRAINT transaction_outputs_txid_format CHECK (txid ~ '^[a-f0-9]{64}$'),
CONSTRAINT transaction_outputs_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$'),
CONSTRAINT transaction_outputs_tpos_owner_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$'),
CONSTRAINT transaction_outputs_tpos_merchant_address_format CHECK (address ~ '[a-zA-Z0-9]{34}$')
CONSTRAINT hidden_addresses_address_pk PRIMARY KEY (address)
);
# --- !Downs
DROP TABLE transaction_outputs;
DROP TABLE transaction_inputs;
DROP TABLE transactions;
DROP TABLE hidden_addresses;
DROP INDEX balances_available_index;
DROP TABLE balances;

54
server/conf/evolutions/default/4.sql

@ -1,14 +1,56 @@
# --- !Ups
DROP INDEX blocks_height_index;
CREATE TABLE transactions(
txid TXID_TYPE NOT NULL,
blockhash BLOCKHASH_TYPE NOT NULL,
time BIGINT NOT NULL,
size NON_NEGATIVE_INT_TYPE NOT NULL,
-- constraints
CONSTRAINT transactions_txid_pk PRIMARY KEY (txid),
CONSTRAINT transactions_blockhash_fk FOREIGN KEY (blockhash) REFERENCES blocks (blockhash)
);
CREATE INDEX transactions_blockhash_index ON transactions USING BTREE (blockhash);
CREATE INDEX transactions_time_index ON transactions USING BTREE (time);
CREATE TABLE transaction_inputs(
txid TXID_TYPE NOT NULL,
index NON_NEGATIVE_INT_TYPE NOT NULL,
value DECIMAL(30, 15) NULL,
address ADDRESS_TYPE NULL,
-- constraints
CONSTRAINT transaction_inputs_txid_index_pk PRIMARY KEY (txid, index),
CONSTRAINT transaction_inputs_txid_fk FOREIGN KEY (txid) REFERENCES transactions (txid)
);
CREATE INDEX transaction_inputs_address_index ON transaction_inputs USING BTREE (address);
CREATE TABLE transaction_outputs(
txid TXID_TYPE NOT NULL,
index NON_NEGATIVE_INT_TYPE NOT NULL,
value DECIMAL(30, 15) NOT NULL,
address ADDRESS_TYPE NULL,
tpos_owner_address ADDRESS_TYPE NULL,
tpos_merchant_address ADDRESS_TYPE NULL,
-- constraints
CONSTRAINT transaction_outputs_txid_index_pk PRIMARY KEY (txid, index),
CONSTRAINT transaction_outputs_txid_fk FOREIGN KEY (txid) REFERENCES transactions (txid)
);
CREATE INDEX transaction_outputs_address_index ON transaction_outputs USING BTREE (address);
ALTER TABLE blocks
ADD CONSTRAINT blocks_height_unique UNIQUE (height);
# --- !Downs
ALTER TABLE blocks
DROP CONSTRAINT blocks_height_unique;
DROP INDEX transaction_outputs_address_index;
DROP TABLE transaction_outputs;
DROP INDEX transaction_inputs_address_index;
DROP TABLE transaction_inputs;
CREATE INDEX blocks_height_index ON blocks USING BTREE (height);
DROP INDEX transactions_blockhash_index;
DROP INDEX transactions_time_index;
DROP TABLE transactions;

20
server/conf/evolutions/default/5.sql

@ -1,20 +0,0 @@
# --- !Ups
DROP INDEX balances_available_index;
ALTER TABLE balances
DROP COLUMN available;
CREATE INDEX balances_available_index ON balances ((received - spent));
# --- !Downs
DROP INDEX balances_available_index;
-- in case this down is applied, the table needs to be rebuilt.
ALTER TABLE balances
ADD COLUMN available DECIMAL(30, 15) NOT NULL DEFAULT 0;
CREATE INDEX balances_available_index ON balances (available);

13
server/conf/evolutions/default/6.sql

@ -1,13 +0,0 @@
# --- !Ups
CREATE INDEX transaction_inputs_address_index ON transaction_inputs (address);
CREATE INDEX transaction_outputs_address_index ON transaction_outputs (address);
# --- !Downs
DROP INDEX transaction_outputs_address_index;
DROP INDEX transaction_inputs_address_index;

16
server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala

@ -22,6 +22,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"add a new block" in {
// PoS block
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
.copy(previousBlockhash = None)
val result = dataHandler.insert(block)
result.isGood mustEqual true
@ -30,6 +31,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"fail on existing blockhash" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
.copy(previousBlockhash = None)
dataHandler.insert(block).isGood mustEqual true
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058")
@ -40,6 +42,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"fail on existing height" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
.copy(previousBlockhash = None)
dataHandler.insert(block).isGood mustEqual true
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058")
@ -52,6 +55,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"getBy blockhash" should {
"return a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
.copy(previousBlockhash = None)
dataHandler.insert(block)
@ -71,8 +75,9 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"getBy height" should {
"return a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
.copy(previousBlockhash = None)
dataHandler.insert(block)
dataHandler.insert(block).isGood mustEqual true
val result = dataHandler.getBy(block.height)
result.isGood mustEqual true
@ -90,7 +95,8 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
"delete" should {
"delete a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
dataHandler.insert(block)
.copy(previousBlockhash = None)
dataHandler.insert(block).isGood mustEqual true
val result = dataHandler.delete(block.hash)
result.isGood mustEqual true
@ -110,10 +116,11 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
clearDatabase()
val block0 = BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34")
.copy(previousBlockhash = None)
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
List(block1, block2, block0).foreach(dataHandler.insert)
List(block0, block1, block2).map(dataHandler.insert).foreach(_.isGood mustEqual true)
val result = dataHandler.getLatestBlock()
result.isGood mustEqual true
@ -133,10 +140,11 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
clearDatabase()
val block0 = BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34")
.copy(previousBlockhash = None)
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
List(block1, block2, block0).map(dataHandler.insert).foreach(_.isGood mustEqual true)
List(block0, block1, block2).map(dataHandler.insert).foreach(_.isGood mustEqual true)
val result = dataHandler.getFirstBlock()
result.isGood mustEqual true

40
server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala

@ -1,21 +1,43 @@
package com.xsn.explorer.data
import com.alexitc.playsonify.models._
import com.xsn.explorer.data.anorm.TransactionPostgresDataHandler
import com.xsn.explorer.data.anorm.dao.TransactionPostgresDAO
import com.xsn.explorer.data.anorm.dao.{BlockPostgresDAO, TransactionPostgresDAO}
import com.xsn.explorer.data.anorm.interpreters.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, TransactionPostgresDataHandler}
import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.TransactionNotFoundError
import com.xsn.explorer.helpers.DataHelper._
import com.xsn.explorer.models._
import com.xsn.explorer.models.fields.TransactionField
import com.xsn.explorer.models.{Size, Transaction, TransactionWithValues}
import com.xsn.explorer.models.rpc.Block
import org.scalactic.{Bad, Good}
import org.scalatest.BeforeAndAfter
class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter {
lazy val dataHandler = new TransactionPostgresDataHandler(database, new TransactionPostgresDAO(new FieldOrderingSQLInterpreter))
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
val defaultOrdering = FieldOrdering(TransactionField.Time, OrderingCondition.DescendingOrder)
val block = Block(
hash = createBlockhash("ad92f0dcea2fdaa357aac6eab00695cf07b487e34113598909f625c24629c981"),
previousBlockhash = None,
nextBlockhash = None,
merkleRoot = createBlockhash("ad9320dcea2fdaa357aac6eab00695cf07b487e34113598909f625c24629c981"),
transactions = List.empty,
confirmations = Confirmations(0),
size = Size(10),
height = Height(0),
version = 0,
time = 0,
medianTime = 0,
nonce = 0,
bits = "abcdef",
chainwork = "abcdef",
difficulty = 12.2,
tposContract = None
)
val inputs = List(
Transaction.Input(0, None, None),
Transaction.Input(1, Some(BigDecimal(100)), Some(createAddress("XxQ7j37LfuXgsLd5DZAwFKhT3s2ZMkW85F")))
@ -33,12 +55,16 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
val transaction = Transaction(
createTransactionId("99c51e4fe89466faa734d6207a7ef6115fa1dd33f7156b006fafc6bb85a79eb8"),
createBlockhash("ad92f0dcea2fdaa357aac6eab00695cf07b487e34113598909f625c24629c981"),
block.hash,
12312312L,
Size(1000),
inputs,
outputs)
before {
blockDataHandler.insert(block)
}
"upsert" should {
"add a new transaction" in {
val result = dataHandler.upsert(transaction)
@ -47,7 +73,6 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
"update an existing transaction" in {
val newTransaction = transaction.copy(
blockhash = createBlockhash("99c51e4fe89466faa734d6207a7ef6115fa1dd32f7156b006fafc6bb85a79eb8"),
time = 2313121L,
size = Size(2000))
@ -76,6 +101,7 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
dataHandler.upsert(transaction).isGood mustEqual true
val result = dataHandler.deleteBy(transaction.blockhash)
println(result)
result.isGood mustEqual true
result.get mustEqual List(transaction)
}
@ -100,7 +126,7 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
val transaction = Transaction(
createTransactionId("92c51e4fe89466faa734d6207a7ef6115fa1dd33f7156b006fafc6bb85a79eb8"),
createBlockhash("ad22f0dcea2fdaa357aac6eab00695cf07b487e34113598909f625c24629c981"),
block.hash,
12312312L,
Size(1000),
inputs,

6
server/test/com/xsn/explorer/processors/BlockOpsSpec.scala

@ -35,15 +35,15 @@ class BlockOpsSpec extends PostgresDataHandlerSpec with ScalaFutures with Before
"createBlock" should {
"create a new block" in {
whenReady(blockOps.createBlock(block1, List.empty)) { result =>
whenReady(blockOps.createBlock(block1.copy(previousBlockhash = None), List.empty)) { result =>
result mustEqual Good(BlockOps.Result.BlockCreated)
}
}
"replace a block if the height already exist" in {
whenReady(blockOps.createBlock(block1, List.empty)) { _.isGood mustEqual true }
whenReady(blockOps.createBlock(block1.copy(previousBlockhash = None), List.empty)) { _.isGood mustEqual true }
whenReady(blockOps.createBlock(block2.copy(height = block1.height), List.empty)) { result =>
whenReady(blockOps.createBlock(block2.copy(height = block1.height, previousBlockhash = None), List.empty)) { result =>
result mustEqual Good(BlockOps.Result.BlockReplacedByHeight)
}
}

Loading…
Cancel
Save