Browse Source

server: Split the TransactionPostgresDAO into several files

prometheus-integration
Alexis Hernandez 6 years ago
parent
commit
668153d6c4
  1. 5
      server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala
  2. 83
      server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala
  3. 75
      server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala
  4. 128
      server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala
  5. 292
      server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala
  6. 13
      server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala
  7. 3
      server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala
  8. 17
      server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala
  9. 19
      server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala

5
server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala

@ -4,7 +4,7 @@ import com.alexitc.playsonify.core.ApplicationResult
import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition} import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition}
import com.alexitc.playsonify.models.pagination.{Limit, PaginatedQuery, PaginatedResult} import com.alexitc.playsonify.models.pagination.{Limit, PaginatedQuery, PaginatedResult}
import com.xsn.explorer.data.TransactionBlockingDataHandler import com.xsn.explorer.data.TransactionBlockingDataHandler
import com.xsn.explorer.data.anorm.dao.TransactionPostgresDAO import com.xsn.explorer.data.anorm.dao.{TransactionOutputPostgresDAO, TransactionPostgresDAO}
import com.xsn.explorer.models._ import com.xsn.explorer.models._
import com.xsn.explorer.models.fields.TransactionField import com.xsn.explorer.models.fields.TransactionField
import javax.inject.Inject import javax.inject.Inject
@ -13,6 +13,7 @@ import play.api.db.Database
class TransactionPostgresDataHandler @Inject() ( class TransactionPostgresDataHandler @Inject() (
override val database: Database, override val database: Database,
transactionOutputDAO: TransactionOutputPostgresDAO,
transactionPostgresDAO: TransactionPostgresDAO) transactionPostgresDAO: TransactionPostgresDAO)
extends TransactionBlockingDataHandler extends TransactionBlockingDataHandler
with AnormPostgresDataHandler { with AnormPostgresDataHandler {
@ -43,7 +44,7 @@ class TransactionPostgresDataHandler @Inject() (
} }
override def getUnspentOutputs(address: Address): ApplicationResult[List[Transaction.Output]] = withConnection { implicit conn => override def getUnspentOutputs(address: Address): ApplicationResult[List[Transaction.Output]] = withConnection { implicit conn =>
val result = transactionPostgresDAO.getUnspentOutputs(address) val result = transactionOutputDAO.getUnspentOutputs(address)
Good(result) Good(result)
} }

83
server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala

@ -0,0 +1,83 @@
package com.xsn.explorer.data.anorm.dao
import java.sql.Connection
import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models.{AddressTransactionDetails, Transaction, TransactionId}
class AddressTransactionDetailsPostgresDAO {
def batchInsertDetails(transaction: Transaction)(implicit conn: Connection): Option[Unit] = {
val received = transaction
.outputs
.groupBy(_.address)
.mapValues { outputs => outputs.map(_.value).sum }
.map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, received = value) }
val sent = transaction
.inputs
.groupBy(_.address)
.mapValues { inputs => inputs.map(_.value).sum }
.map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, sent = value) }
val details = (received ++ sent)
.groupBy(_.address)
.mapValues {
case head :: list => list.foldLeft(head) { (acc, current) =>
current.copy(received = current.received + acc.received, sent = current.sent + acc.sent)
}
}
.values
batchInsertDetails(details.toList)
}
def batchInsertDetails(details: List[AddressTransactionDetails])(implicit conn: Connection): Option[Unit] = {
details match {
case Nil => Some(())
case _ =>
val params = details.map { d =>
List(
'address -> d.address.string: NamedParameter,
'txid -> d.txid.string: NamedParameter,
'received -> d.received: NamedParameter,
'sent -> d.sent: NamedParameter,
'time -> d.time: NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO address_transaction_details
| (address, txid, received, sent, time)
|VALUES
| ({address}, {txid}, {received}, {sent}, {time})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(())
} else {
None
}
}
}
def deleteDetails(txid: TransactionId)(implicit conn: Connection): List[AddressTransactionDetails] = {
val result = SQL(
"""
|DELETE FROM address_transaction_details
|WHERE txid = {txid}
|RETURNING address, txid, received, sent, time
""".stripMargin
).on(
'txid -> txid.string
).as(parseAddressTransactionDetails.*)
result
}
}

75
server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala

@ -0,0 +1,75 @@
package com.xsn.explorer.data.anorm.dao
import java.sql.Connection
import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models.{Address, Transaction, TransactionId}
class TransactionInputPostgresDAO {
def batchInsertInputs(
inputs: List[(TransactionId, Transaction.Input)])(
implicit conn: Connection): Option[List[(TransactionId, Transaction.Input)]] = {
inputs match {
case Nil => Some(inputs)
case _ =>
val params = inputs.map { case (txid, input) =>
List(
'txid -> txid.string: NamedParameter,
'index -> input.index: NamedParameter,
'from_txid -> input.fromTxid.string: NamedParameter,
'from_output_index -> input.fromOutputIndex: NamedParameter,
'value -> input.value: NamedParameter,
'address -> input.address.string: NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO transaction_inputs
| (txid, index, from_txid, from_output_index, value, address)
|VALUES
| ({txid}, {index}, {from_txid}, {from_output_index}, {value}, {address})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(inputs)
} else {
None
}
}
}
def deleteInputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Input] = {
SQL(
"""
|DELETE FROM transaction_inputs
|WHERE txid = {txid}
|RETURNING txid, index, from_txid, from_output_index, value, address
""".stripMargin
).on(
'txid -> txid.string
).as(parseTransactionInput.*).flatten
}
def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = {
SQL(
"""
|SELECT txid, index, from_txid, from_output_index, value, address
|FROM transaction_inputs
|WHERE txid = {txid} AND
| address = {address}
""".stripMargin
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionInput.*).flatten
}
}

128
server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala

@ -0,0 +1,128 @@
package com.xsn.explorer.data.anorm.dao
import java.sql.Connection
import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models.{Address, Transaction, TransactionId}
class TransactionOutputPostgresDAO {
def getUnspentOutputs(address: Address)(implicit conn: Connection): List[Transaction.Output] = {
SQL(
"""
|SELECT txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address
|FROM transaction_outputs
|WHERE address = {address} AND
| spent_on IS NULL AND
| value > 0
""".stripMargin
).on(
'address -> address.string
).as(parseTransactionOutput.*).flatten
}
def batchInsertOutputs(
outputs: List[Transaction.Output])(
implicit conn: Connection): Option[List[Transaction.Output]] = {
outputs match {
case Nil => Some(outputs)
case _ =>
val params = outputs.map { output =>
List(
'txid -> output.txid.string: NamedParameter,
'index -> output.index: NamedParameter,
'value -> output.value: NamedParameter,
'address -> output.address.string: NamedParameter,
'hex_script -> output.script.string: NamedParameter,
'tpos_owner_address -> output.tposOwnerAddress.map(_.string): NamedParameter,
'tpos_merchant_address -> output.tposMerchantAddress.map(_.string): NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO transaction_outputs
| (txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address)
|VALUES
| ({txid}, {index}, {value}, {address}, {hex_script}, {tpos_owner_address}, {tpos_merchant_address})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(outputs)
} else {
None
}
}
}
def deleteOutputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Output] = {
val result = SQL(
"""
|DELETE FROM transaction_outputs
|WHERE txid = {txid}
|RETURNING txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address
""".stripMargin
).on(
'txid -> txid.string
).as(parseTransactionOutput.*)
result.flatten
}
def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = {
SQL(
"""
|SELECT txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address
|FROM transaction_outputs
|WHERE txid = {txid} AND
| address = {address}
""".stripMargin
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionOutput.*).flatten
}
def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = {
inputs match {
case Nil => Option(())
case _ =>
val txidArray = inputs
.map { input => s"'${input.fromTxid.string}'" }
.mkString("[", ",", "]")
val indexArray = inputs.map(_.fromOutputIndex).mkString("[", ",", "]")
// Note: the TransactionId must meet a safe format, this approach speeds up the inserts
val result = SQL(
s"""
|UPDATE transaction_outputs t
|SET spent_on = tmp.spent_on
|FROM (
| WITH CTE AS (
| SELECT '${txid.string}' AS spent_on
| )
| SELECT spent_on, txid, index
| FROM CTE CROSS JOIN (SELECT
| UNNEST(array$indexArray) AS index,
| UNNEST(array$txidArray) AS txid) x
|) AS tmp
|WHERE t.txid = tmp.txid AND
| t.index = tmp.index
""".stripMargin
).executeUpdate()
if (result == inputs.size) {
Option(())
} else {
None
}
}
}
}

292
server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala

@ -11,7 +11,11 @@ import com.xsn.explorer.models._
import com.xsn.explorer.models.fields.TransactionField import com.xsn.explorer.models.fields.TransactionField
import javax.inject.Inject import javax.inject.Inject
class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) { class TransactionPostgresDAO @Inject() (
transactionInputDAO: TransactionInputPostgresDAO,
transactionOutputDAO: TransactionOutputPostgresDAO,
addressTransactionDetailsDAO: AddressTransactionDetailsPostgresDAO,
fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) {
/** /**
* NOTE: Ensure the connection has an open transaction. * NOTE: Ensure the connection has an open transaction.
@ -19,10 +23,10 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
def upsert(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = { def upsert(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = {
for { for {
partialTx <- upsertTransaction(index, transaction) partialTx <- upsertTransaction(index, transaction)
_ <- batchInsertOutputs(transaction.outputs) _ <- transactionOutputDAO.batchInsertOutputs(transaction.outputs)
_ <- batchInsertInputs(transaction.inputs.map(transaction.id -> _)) _ <- transactionInputDAO.batchInsertInputs(transaction.inputs.map(transaction.id -> _))
_ <- batchSpend(transaction.id, transaction.inputs) _ <- transactionOutputDAO.batchSpend(transaction.id, transaction.inputs)
_ <- batchInsertDetails(transaction) _ <- addressTransactionDetailsDAO.batchInsertDetails(transaction)
} yield partialTx.copy(inputs = transaction.inputs, outputs = transaction.outputs) } yield partialTx.copy(inputs = transaction.inputs, outputs = transaction.outputs)
} }
@ -31,15 +35,15 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
r <- batchInsert(transactions) r <- batchInsert(transactions)
outputs = transactions.flatMap(_.outputs) outputs = transactions.flatMap(_.outputs)
_ <- batchInsertOutputs(outputs) _ <- transactionOutputDAO.batchInsertOutputs(outputs)
inputs = transactions.flatMap { tx => tx.inputs.map(tx.id -> _) } inputs = transactions.flatMap { tx => tx.inputs.map(tx.id -> _) }
_ <- batchInsertInputs(inputs) _ <- transactionInputDAO.batchInsertInputs(inputs)
} yield { } yield {
val extra = for { val extra = for {
tx <- transactions tx <- transactions
_ <- batchInsertDetails(tx) _ <- addressTransactionDetailsDAO.batchInsertDetails(tx)
_ <- batchSpend(tx.id, tx.inputs) _ <- transactionOutputDAO.batchSpend(tx.id, tx.inputs)
} yield tx } yield tx
assert(extra.size == transactions.size, "Not all transactions were inserted properly") assert(extra.size == transactions.size, "Not all transactions were inserted properly")
@ -98,9 +102,9 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
).as(parseTransaction.*).flatten ).as(parseTransaction.*).flatten
val result = expectedTransactions.map { tx => val result = expectedTransactions.map { tx =>
val inputs = deleteInputs(tx.id) val inputs = transactionInputDAO.deleteInputs(tx.id)
val outputs = deleteOutputs(tx.id) val outputs = transactionOutputDAO.deleteOutputs(tx.id)
val _ = deleteDetails(tx.id) val _ = addressTransactionDetailsDAO.deleteDetails(tx.id)
tx.copy(inputs = inputs, outputs = outputs) tx.copy(inputs = inputs, outputs = outputs)
} }
@ -143,8 +147,8 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
for { for {
tx <- transactions tx <- transactions
} yield { } yield {
val inputs = getInputs(tx.id, address) val inputs = transactionInputDAO.getInputs(tx.id, address)
val outputs = getOutputs(tx.id, address) val outputs = transactionOutputDAO.getOutputs(tx.id, address)
tx.copy(inputs = inputs, outputs = outputs) tx.copy(inputs = inputs, outputs = outputs)
} }
} }
@ -192,8 +196,8 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
for { for {
tx <- transactions tx <- transactions
} yield { } yield {
val inputs = getInputs(tx.id, address) val inputs = transactionInputDAO.getInputs(tx.id, address)
val outputs = getOutputs(tx.id, address) val outputs = transactionOutputDAO.getOutputs(tx.id, address)
tx.copy(inputs = inputs, outputs = outputs) tx.copy(inputs = inputs, outputs = outputs)
} }
} }
@ -318,20 +322,6 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
).as(parseTransactionWithValues.*).flatten ).as(parseTransactionWithValues.*).flatten
} }
def getUnspentOutputs(address: Address)(implicit conn: Connection): List[Transaction.Output] = {
SQL(
"""
|SELECT txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address
|FROM transaction_outputs
|WHERE address = {address} AND
| spent_on IS NULL AND
| value > 0
""".stripMargin
).on(
'address -> address.string
).as(parseTransactionOutput.*).flatten
}
private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = { private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = {
SQL( SQL(
""" """
@ -355,248 +345,6 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi
).as(parseTransaction.singleOpt).flatten ).as(parseTransaction.singleOpt).flatten
} }
private def batchInsertInputs(
inputs: List[(TransactionId, Transaction.Input)])(
implicit conn: Connection): Option[List[(TransactionId, Transaction.Input)]] = {
inputs match {
case Nil => Some(inputs)
case _ =>
val params = inputs.map { case (txid, input) =>
List(
'txid -> txid.string: NamedParameter,
'index -> input.index: NamedParameter,
'from_txid -> input.fromTxid.string: NamedParameter,
'from_output_index -> input.fromOutputIndex: NamedParameter,
'value -> input.value: NamedParameter,
'address -> input.address.string: NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO transaction_inputs
| (txid, index, from_txid, from_output_index, value, address)
|VALUES
| ({txid}, {index}, {from_txid}, {from_output_index}, {value}, {address})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(inputs)
} else {
None
}
}
}
private def batchInsertOutputs(
outputs: List[Transaction.Output])(
implicit conn: Connection): Option[List[Transaction.Output]] = {
outputs match {
case Nil => Some(outputs)
case _ =>
val params = outputs.map { output =>
List(
'txid -> output.txid.string: NamedParameter,
'index -> output.index: NamedParameter,
'value -> output.value: NamedParameter,
'address -> output.address.string: NamedParameter,
'hex_script -> output.script.string: NamedParameter,
'tpos_owner_address -> output.tposOwnerAddress.map(_.string): NamedParameter,
'tpos_merchant_address -> output.tposMerchantAddress.map(_.string): NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO transaction_outputs
| (txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address)
|VALUES
| ({txid}, {index}, {value}, {address}, {hex_script}, {tpos_owner_address}, {tpos_merchant_address})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(outputs)
} else {
None
}
}
}
private def deleteInputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Input] = {
SQL(
"""
|DELETE FROM transaction_inputs
|WHERE txid = {txid}
|RETURNING txid, index, from_txid, from_output_index, value, address
""".stripMargin
).on(
'txid -> txid.string
).as(parseTransactionInput.*).flatten
}
private def deleteOutputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Output] = {
val result = SQL(
"""
|DELETE FROM transaction_outputs
|WHERE txid = {txid}
|RETURNING txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address
""".stripMargin
).on(
'txid -> txid.string
).as(parseTransactionOutput.*)
result.flatten
}
private def batchInsertDetails(transaction: Transaction)(implicit conn: Connection): Option[Unit] = {
val received = transaction
.outputs
.groupBy(_.address)
.mapValues { outputs => outputs.map(_.value).sum }
.map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, received = value) }
val sent = transaction
.inputs
.groupBy(_.address)
.mapValues { inputs => inputs.map(_.value).sum }
.map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, sent = value) }
val details = (received ++ sent)
.groupBy(_.address)
.mapValues {
case head :: list => list.foldLeft(head) { (acc, current) =>
current.copy(received = current.received + acc.received, sent = current.sent + acc.sent)
}
}
.values
batchInsertDetails(details.toList)
}
private def batchInsertDetails(details: List[AddressTransactionDetails])(implicit conn: Connection): Option[Unit] = {
details match {
case Nil => Some(())
case _ =>
val params = details.map { d =>
List(
'address -> d.address.string: NamedParameter,
'txid -> d.txid.string: NamedParameter,
'received -> d.received: NamedParameter,
'sent -> d.sent: NamedParameter,
'time -> d.time: NamedParameter)
}
val batch = BatchSql(
"""
|INSERT INTO address_transaction_details
| (address, txid, received, sent, time)
|VALUES
| ({address}, {txid}, {received}, {sent}, {time})
""".stripMargin,
params.head,
params.tail: _*
)
val success = batch.execute().forall(_ == 1)
if (success) {
Some(())
} else {
None
}
}
}
private def deleteDetails(txid: TransactionId)(implicit conn: Connection): List[AddressTransactionDetails] = {
val result = SQL(
"""
|DELETE FROM address_transaction_details
|WHERE txid = {txid}
|RETURNING address, txid, received, sent, time
""".stripMargin
).on(
'txid -> txid.string
).as(parseAddressTransactionDetails.*)
result
}
private def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = {
SQL(
"""
|SELECT txid, index, from_txid, from_output_index, value, address
|FROM transaction_inputs
|WHERE txid = {txid} AND
| address = {address}
""".stripMargin
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionInput.*).flatten
}
private def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = {
SQL(
"""
|SELECT txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address
|FROM transaction_outputs
|WHERE txid = {txid} AND
| address = {address}
""".stripMargin
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionOutput.*).flatten
}
private def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = {
inputs match {
case Nil => Option(())
case _ =>
val txidArray = inputs
.map { input => s"'${input.fromTxid.string}'" }
.mkString("[", ",", "]")
val indexArray = inputs.map(_.fromOutputIndex).mkString("[", ",", "]")
// Note: the TransactionId must meet a safe format, this approach speeds up the inserts
val result = SQL(
s"""
|UPDATE transaction_outputs t
|SET spent_on = tmp.spent_on
|FROM (
| WITH CTE AS (
| SELECT '${txid.string}' AS spent_on
| )
| SELECT spent_on, txid, index
| FROM CTE CROSS JOIN (SELECT
| UNNEST(array$indexArray) AS index,
| UNNEST(array$txidArray) AS txid) x
|) AS tmp
|WHERE t.txid = tmp.txid AND
| t.index = tmp.index
""".stripMargin
).executeUpdate()
if (result == inputs.size) {
Option(())
} else {
None
}
}
}
private def toSQL(condition: OrderingCondition): String = condition match { private def toSQL(condition: OrderingCondition): String = condition match {
case OrderingCondition.AscendingOrder => "ASC" case OrderingCondition.AscendingOrder => "ASC"
case OrderingCondition.DescendingOrder => "DESC" case OrderingCondition.DescendingOrder => "DESC"

13
server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala

@ -1,10 +1,8 @@
package com.xsn.explorer.data package com.xsn.explorer.data
import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.LedgerPostgresDataHandler
import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO}
import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.{PreviousBlockMissingError, RepeatedBlockHeightError} import com.xsn.explorer.errors.{PreviousBlockMissingError, RepeatedBlockHeightError}
import com.xsn.explorer.helpers.DataHandlerObjects._
import com.xsn.explorer.helpers.{BlockLoader, TransactionLoader} import com.xsn.explorer.helpers.{BlockLoader, TransactionLoader}
import com.xsn.explorer.models.Transaction import com.xsn.explorer.models.Transaction
import com.xsn.explorer.models.rpc.Block import com.xsn.explorer.models.rpc.Block
@ -13,12 +11,7 @@ import org.scalatest.BeforeAndAfter
class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter { class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter {
lazy val dataHandler = new LedgerPostgresDataHandler( lazy val dataHandler = createLedgerDataHandler(database)
database,
new BlockPostgresDAO(new FieldOrderingSQLInterpreter),
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter),
new BalancePostgresDAO(new FieldOrderingSQLInterpreter),
new AggregatedAmountPostgresDAO)
val blockList = List( val blockList = List(
BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"), BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"),
@ -71,7 +64,7 @@ class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeA
dataHandler.pop() dataHandler.pop()
fail() fail()
} catch { } catch {
case _ => () case _: Throwable => ()
} }
} }

3
server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala

@ -2,7 +2,6 @@ package com.xsn.explorer.data
import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition} import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition}
import com.alexitc.playsonify.models.pagination._ import com.alexitc.playsonify.models.pagination._
import com.xsn.explorer.data.anorm.TransactionPostgresDataHandler
import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.{BlockNotFoundError, TransactionNotFoundError} import com.xsn.explorer.errors.{BlockNotFoundError, TransactionNotFoundError}
import com.xsn.explorer.helpers.DataHandlerObjects._ import com.xsn.explorer.helpers.DataHandlerObjects._
@ -18,7 +17,7 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec with Be
import DataGenerator._ import DataGenerator._
lazy val dataHandler = new TransactionPostgresDataHandler(database, transactionPostgresDAO) lazy val dataHandler = createTransactionDataHandler(database)
lazy val ledgerDataHandler = createLedgerDataHandler(database) lazy val ledgerDataHandler = createLedgerDataHandler(database)
lazy val blockDataHandler = createBlockDataHandler(database) lazy val blockDataHandler = createBlockDataHandler(database)

17
server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala

@ -1,14 +1,21 @@
package com.xsn.explorer.helpers package com.xsn.explorer.helpers
import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO} import com.xsn.explorer.data.anorm.dao._
import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler} import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler, TransactionPostgresDataHandler}
import play.api.db.Database import play.api.db.Database
trait DataHandlerObjects { trait DataHandlerObjects {
lazy val fieldOrderingSQLInterpreter = new FieldOrderingSQLInterpreter lazy val fieldOrderingSQLInterpreter = new FieldOrderingSQLInterpreter
lazy val transactionPostgresDAO = new TransactionPostgresDAO(fieldOrderingSQLInterpreter) lazy val transactionInputDAO = new TransactionInputPostgresDAO
lazy val transactionOutputDAO = new TransactionOutputPostgresDAO
lazy val addressTransactionDetailsDAO = new AddressTransactionDetailsPostgresDAO
lazy val transactionPostgresDAO = new TransactionPostgresDAO(
transactionInputDAO,
transactionOutputDAO,
addressTransactionDetailsDAO,
fieldOrderingSQLInterpreter)
lazy val blockPostgresDAO = new BlockPostgresDAO(fieldOrderingSQLInterpreter) lazy val blockPostgresDAO = new BlockPostgresDAO(fieldOrderingSQLInterpreter)
lazy val balancePostgresDAO = new BalancePostgresDAO(fieldOrderingSQLInterpreter) lazy val balancePostgresDAO = new BalancePostgresDAO(fieldOrderingSQLInterpreter)
lazy val aggregatedAmountPostgresDAO = new AggregatedAmountPostgresDAO lazy val aggregatedAmountPostgresDAO = new AggregatedAmountPostgresDAO
@ -25,6 +32,10 @@ trait DataHandlerObjects {
def createBlockDataHandler(database: Database) = { def createBlockDataHandler(database: Database) = {
new BlockPostgresDataHandler(database, blockPostgresDAO) new BlockPostgresDataHandler(database, blockPostgresDAO)
} }
def createTransactionDataHandler(database: Database) = {
new TransactionPostgresDataHandler(database, transactionOutputDAO, transactionPostgresDAO)
}
} }
object DataHandlerObjects extends DataHandlerObjects object DataHandlerObjects extends DataHandlerObjects

19
server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala

@ -1,13 +1,11 @@
package com.xsn.explorer.services package com.xsn.explorer.services
import com.alexitc.playsonify.core.FutureApplicationResult import com.alexitc.playsonify.core.FutureApplicationResult
import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter
import com.alexitc.playsonify.validators.PaginatedQueryValidator import com.alexitc.playsonify.validators.PaginatedQueryValidator
import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO}
import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler, TransactionPostgresDataHandler}
import com.xsn.explorer.data.async.{BlockFutureDataHandler, LedgerFutureDataHandler, TransactionFutureDataHandler} import com.xsn.explorer.data.async.{BlockFutureDataHandler, LedgerFutureDataHandler, TransactionFutureDataHandler}
import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.BlockNotFoundError import com.xsn.explorer.errors.BlockNotFoundError
import com.xsn.explorer.helpers.DataHandlerObjects._
import com.xsn.explorer.helpers._ import com.xsn.explorer.helpers._
import com.xsn.explorer.models.rpc.Block import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Blockhash, Height} import com.xsn.explorer.models.{Blockhash, Height}
@ -20,18 +18,9 @@ import scala.concurrent.Future
class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeAndAfter with ScalaFutures { class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeAndAfter with ScalaFutures {
lazy val dataHandler = new LedgerPostgresDataHandler( lazy val dataHandler = createLedgerDataHandler(database)
database, lazy val transactionDataHandler = createTransactionDataHandler(database)
new BlockPostgresDAO(new FieldOrderingSQLInterpreter), lazy val blockDataHandler = createBlockDataHandler(database)
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter),
new BalancePostgresDAO(new FieldOrderingSQLInterpreter),
new AggregatedAmountPostgresDAO)
lazy val transactionDataHandler = new TransactionPostgresDataHandler(
database,
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter))
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO(new FieldOrderingSQLInterpreter))
val blockList = List( val blockList = List(
BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"), BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"),

Loading…
Cancel
Save