From 668153d6c4ad4e40a40649d7025934e44facfdff Mon Sep 17 00:00:00 2001 From: Alexis Hernandez Date: Sat, 19 Jan 2019 23:25:13 -0700 Subject: [PATCH] server: Split the TransactionPostgresDAO into several files --- .../TransactionPostgresDataHandler.scala | 5 +- ...AddressTransactionDetailsPostgresDAO.scala | 83 +++++ .../dao/TransactionInputPostgresDAO.scala | 75 +++++ .../dao/TransactionOutputPostgresDAO.scala | 128 ++++++++ .../anorm/dao/TransactionPostgresDAO.scala | 292 ++---------------- .../data/LedgerPostgresDataHandlerSpec.scala | 13 +- .../TransactionPostgresDataHandlerSpec.scala | 3 +- .../explorer/helpers/DataHandlerObjects.scala | 17 +- .../LedgerSynchronizerServiceSpec.scala | 19 +- 9 files changed, 331 insertions(+), 304 deletions(-) create mode 100644 server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala diff --git a/server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala b/server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala index 6c572ef..92ad73e 100644 --- a/server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala +++ b/server/app/com/xsn/explorer/data/anorm/TransactionPostgresDataHandler.scala @@ -4,7 +4,7 @@ import com.alexitc.playsonify.core.ApplicationResult import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition} import com.alexitc.playsonify.models.pagination.{Limit, PaginatedQuery, PaginatedResult} import com.xsn.explorer.data.TransactionBlockingDataHandler -import com.xsn.explorer.data.anorm.dao.TransactionPostgresDAO +import com.xsn.explorer.data.anorm.dao.{TransactionOutputPostgresDAO, TransactionPostgresDAO} import com.xsn.explorer.models._ import com.xsn.explorer.models.fields.TransactionField import javax.inject.Inject @@ -13,6 +13,7 @@ import play.api.db.Database class TransactionPostgresDataHandler @Inject() ( override val database: Database, + transactionOutputDAO: TransactionOutputPostgresDAO, transactionPostgresDAO: TransactionPostgresDAO) extends TransactionBlockingDataHandler with AnormPostgresDataHandler { @@ -43,7 +44,7 @@ class TransactionPostgresDataHandler @Inject() ( } override def getUnspentOutputs(address: Address): ApplicationResult[List[Transaction.Output]] = withConnection { implicit conn => - val result = transactionPostgresDAO.getUnspentOutputs(address) + val result = transactionOutputDAO.getUnspentOutputs(address) Good(result) } diff --git a/server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala new file mode 100644 index 0000000..756a094 --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/dao/AddressTransactionDetailsPostgresDAO.scala @@ -0,0 +1,83 @@ +package com.xsn.explorer.data.anorm.dao + +import java.sql.Connection + +import anorm._ +import com.xsn.explorer.data.anorm.parsers.TransactionParsers._ +import com.xsn.explorer.models.{AddressTransactionDetails, Transaction, TransactionId} + +class AddressTransactionDetailsPostgresDAO { + + def batchInsertDetails(transaction: Transaction)(implicit conn: Connection): Option[Unit] = { + val received = transaction + .outputs + .groupBy(_.address) + .mapValues { outputs => outputs.map(_.value).sum } + .map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, received = value) } + + val sent = transaction + .inputs + .groupBy(_.address) + .mapValues { inputs => inputs.map(_.value).sum } + .map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, sent = value) } + + val details = (received ++ sent) + .groupBy(_.address) + .mapValues { + case head :: list => list.foldLeft(head) { (acc, current) => + current.copy(received = current.received + acc.received, sent = current.sent + acc.sent) + } + } + .values + + batchInsertDetails(details.toList) + } + + def batchInsertDetails(details: List[AddressTransactionDetails])(implicit conn: Connection): Option[Unit] = { + details match { + case Nil => Some(()) + case _ => + val params = details.map { d => + List( + 'address -> d.address.string: NamedParameter, + 'txid -> d.txid.string: NamedParameter, + 'received -> d.received: NamedParameter, + 'sent -> d.sent: NamedParameter, + 'time -> d.time: NamedParameter) + } + + val batch = BatchSql( + """ + |INSERT INTO address_transaction_details + | (address, txid, received, sent, time) + |VALUES + | ({address}, {txid}, {received}, {sent}, {time}) + """.stripMargin, + params.head, + params.tail: _* + ) + + val success = batch.execute().forall(_ == 1) + + if (success) { + Some(()) + } else { + None + } + } + } + + def deleteDetails(txid: TransactionId)(implicit conn: Connection): List[AddressTransactionDetails] = { + val result = SQL( + """ + |DELETE FROM address_transaction_details + |WHERE txid = {txid} + |RETURNING address, txid, received, sent, time + """.stripMargin + ).on( + 'txid -> txid.string + ).as(parseAddressTransactionDetails.*) + + result + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala new file mode 100644 index 0000000..b13f64e --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala @@ -0,0 +1,75 @@ +package com.xsn.explorer.data.anorm.dao + +import java.sql.Connection + +import anorm._ +import com.xsn.explorer.data.anorm.parsers.TransactionParsers._ +import com.xsn.explorer.models.{Address, Transaction, TransactionId} + +class TransactionInputPostgresDAO { + + def batchInsertInputs( + inputs: List[(TransactionId, Transaction.Input)])( + implicit conn: Connection): Option[List[(TransactionId, Transaction.Input)]] = { + + inputs match { + case Nil => Some(inputs) + + case _ => + val params = inputs.map { case (txid, input) => + List( + 'txid -> txid.string: NamedParameter, + 'index -> input.index: NamedParameter, + 'from_txid -> input.fromTxid.string: NamedParameter, + 'from_output_index -> input.fromOutputIndex: NamedParameter, + 'value -> input.value: NamedParameter, + 'address -> input.address.string: NamedParameter) + } + + val batch = BatchSql( + """ + |INSERT INTO transaction_inputs + | (txid, index, from_txid, from_output_index, value, address) + |VALUES + | ({txid}, {index}, {from_txid}, {from_output_index}, {value}, {address}) + """.stripMargin, + params.head, + params.tail: _* + ) + + val success = batch.execute().forall(_ == 1) + + if (success) { + Some(inputs) + } else { + None + } + } + } + + def deleteInputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Input] = { + SQL( + """ + |DELETE FROM transaction_inputs + |WHERE txid = {txid} + |RETURNING txid, index, from_txid, from_output_index, value, address + """.stripMargin + ).on( + 'txid -> txid.string + ).as(parseTransactionInput.*).flatten + } + + def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = { + SQL( + """ + |SELECT txid, index, from_txid, from_output_index, value, address + |FROM transaction_inputs + |WHERE txid = {txid} AND + | address = {address} + """.stripMargin + ).on( + 'txid -> txid.string, + 'address -> address.string + ).as(parseTransactionInput.*).flatten + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala new file mode 100644 index 0000000..6a7adfa --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala @@ -0,0 +1,128 @@ +package com.xsn.explorer.data.anorm.dao + +import java.sql.Connection + +import anorm._ +import com.xsn.explorer.data.anorm.parsers.TransactionParsers._ +import com.xsn.explorer.models.{Address, Transaction, TransactionId} + +class TransactionOutputPostgresDAO { + + def getUnspentOutputs(address: Address)(implicit conn: Connection): List[Transaction.Output] = { + SQL( + """ + |SELECT txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address + |FROM transaction_outputs + |WHERE address = {address} AND + | spent_on IS NULL AND + | value > 0 + """.stripMargin + ).on( + 'address -> address.string + ).as(parseTransactionOutput.*).flatten + } + + def batchInsertOutputs( + outputs: List[Transaction.Output])( + implicit conn: Connection): Option[List[Transaction.Output]] = { + + outputs match { + case Nil => Some(outputs) + case _ => + val params = outputs.map { output => + List( + 'txid -> output.txid.string: NamedParameter, + 'index -> output.index: NamedParameter, + 'value -> output.value: NamedParameter, + 'address -> output.address.string: NamedParameter, + 'hex_script -> output.script.string: NamedParameter, + 'tpos_owner_address -> output.tposOwnerAddress.map(_.string): NamedParameter, + 'tpos_merchant_address -> output.tposMerchantAddress.map(_.string): NamedParameter) + } + + val batch = BatchSql( + """ + |INSERT INTO transaction_outputs + | (txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address) + |VALUES + | ({txid}, {index}, {value}, {address}, {hex_script}, {tpos_owner_address}, {tpos_merchant_address}) + """.stripMargin, + params.head, + params.tail: _* + ) + + val success = batch.execute().forall(_ == 1) + + if (success) { + Some(outputs) + } else { + None + } + } + } + + def deleteOutputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Output] = { + val result = SQL( + """ + |DELETE FROM transaction_outputs + |WHERE txid = {txid} + |RETURNING txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address + """.stripMargin + ).on( + 'txid -> txid.string + ).as(parseTransactionOutput.*) + + result.flatten + } + + def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = { + SQL( + """ + |SELECT txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address + |FROM transaction_outputs + |WHERE txid = {txid} AND + | address = {address} + """.stripMargin + ).on( + 'txid -> txid.string, + 'address -> address.string + ).as(parseTransactionOutput.*).flatten + } + + def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = { + inputs match { + case Nil => Option(()) + case _ => + val txidArray = inputs + .map { input => s"'${input.fromTxid.string}'" } + .mkString("[", ",", "]") + + val indexArray = inputs.map(_.fromOutputIndex).mkString("[", ",", "]") + + // Note: the TransactionId must meet a safe format, this approach speeds up the inserts + val result = SQL( + s""" + |UPDATE transaction_outputs t + |SET spent_on = tmp.spent_on + |FROM ( + | WITH CTE AS ( + | SELECT '${txid.string}' AS spent_on + | ) + | SELECT spent_on, txid, index + | FROM CTE CROSS JOIN (SELECT + | UNNEST(array$indexArray) AS index, + | UNNEST(array$txidArray) AS txid) x + |) AS tmp + |WHERE t.txid = tmp.txid AND + | t.index = tmp.index + """.stripMargin + ).executeUpdate() + + if (result == inputs.size) { + Option(()) + } else { + None + } + } + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala index 7c4b72a..8ab9d22 100644 --- a/server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala +++ b/server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala @@ -11,7 +11,11 @@ import com.xsn.explorer.models._ import com.xsn.explorer.models.fields.TransactionField import javax.inject.Inject -class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) { +class TransactionPostgresDAO @Inject() ( + transactionInputDAO: TransactionInputPostgresDAO, + transactionOutputDAO: TransactionOutputPostgresDAO, + addressTransactionDetailsDAO: AddressTransactionDetailsPostgresDAO, + fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) { /** * NOTE: Ensure the connection has an open transaction. @@ -19,10 +23,10 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi def upsert(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = { for { partialTx <- upsertTransaction(index, transaction) - _ <- batchInsertOutputs(transaction.outputs) - _ <- batchInsertInputs(transaction.inputs.map(transaction.id -> _)) - _ <- batchSpend(transaction.id, transaction.inputs) - _ <- batchInsertDetails(transaction) + _ <- transactionOutputDAO.batchInsertOutputs(transaction.outputs) + _ <- transactionInputDAO.batchInsertInputs(transaction.inputs.map(transaction.id -> _)) + _ <- transactionOutputDAO.batchSpend(transaction.id, transaction.inputs) + _ <- addressTransactionDetailsDAO.batchInsertDetails(transaction) } yield partialTx.copy(inputs = transaction.inputs, outputs = transaction.outputs) } @@ -31,15 +35,15 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi r <- batchInsert(transactions) outputs = transactions.flatMap(_.outputs) - _ <- batchInsertOutputs(outputs) + _ <- transactionOutputDAO.batchInsertOutputs(outputs) inputs = transactions.flatMap { tx => tx.inputs.map(tx.id -> _) } - _ <- batchInsertInputs(inputs) + _ <- transactionInputDAO.batchInsertInputs(inputs) } yield { val extra = for { tx <- transactions - _ <- batchInsertDetails(tx) - _ <- batchSpend(tx.id, tx.inputs) + _ <- addressTransactionDetailsDAO.batchInsertDetails(tx) + _ <- transactionOutputDAO.batchSpend(tx.id, tx.inputs) } yield tx assert(extra.size == transactions.size, "Not all transactions were inserted properly") @@ -98,9 +102,9 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi ).as(parseTransaction.*).flatten val result = expectedTransactions.map { tx => - val inputs = deleteInputs(tx.id) - val outputs = deleteOutputs(tx.id) - val _ = deleteDetails(tx.id) + val inputs = transactionInputDAO.deleteInputs(tx.id) + val outputs = transactionOutputDAO.deleteOutputs(tx.id) + val _ = addressTransactionDetailsDAO.deleteDetails(tx.id) tx.copy(inputs = inputs, outputs = outputs) } @@ -143,8 +147,8 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi for { tx <- transactions } yield { - val inputs = getInputs(tx.id, address) - val outputs = getOutputs(tx.id, address) + val inputs = transactionInputDAO.getInputs(tx.id, address) + val outputs = transactionOutputDAO.getOutputs(tx.id, address) tx.copy(inputs = inputs, outputs = outputs) } } @@ -192,8 +196,8 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi for { tx <- transactions } yield { - val inputs = getInputs(tx.id, address) - val outputs = getOutputs(tx.id, address) + val inputs = transactionInputDAO.getInputs(tx.id, address) + val outputs = transactionOutputDAO.getOutputs(tx.id, address) tx.copy(inputs = inputs, outputs = outputs) } } @@ -318,20 +322,6 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi ).as(parseTransactionWithValues.*).flatten } - def getUnspentOutputs(address: Address)(implicit conn: Connection): List[Transaction.Output] = { - SQL( - """ - |SELECT txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address - |FROM transaction_outputs - |WHERE address = {address} AND - | spent_on IS NULL AND - | value > 0 - """.stripMargin - ).on( - 'address -> address.string - ).as(parseTransactionOutput.*).flatten - } - private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = { SQL( """ @@ -355,248 +345,6 @@ class TransactionPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderi ).as(parseTransaction.singleOpt).flatten } - private def batchInsertInputs( - inputs: List[(TransactionId, Transaction.Input)])( - implicit conn: Connection): Option[List[(TransactionId, Transaction.Input)]] = { - - inputs match { - case Nil => Some(inputs) - - case _ => - val params = inputs.map { case (txid, input) => - List( - 'txid -> txid.string: NamedParameter, - 'index -> input.index: NamedParameter, - 'from_txid -> input.fromTxid.string: NamedParameter, - 'from_output_index -> input.fromOutputIndex: NamedParameter, - 'value -> input.value: NamedParameter, - 'address -> input.address.string: NamedParameter) - } - - val batch = BatchSql( - """ - |INSERT INTO transaction_inputs - | (txid, index, from_txid, from_output_index, value, address) - |VALUES - | ({txid}, {index}, {from_txid}, {from_output_index}, {value}, {address}) - """.stripMargin, - params.head, - params.tail: _* - ) - - val success = batch.execute().forall(_ == 1) - - if (success) { - Some(inputs) - } else { - None - } - } - } - - private def batchInsertOutputs( - outputs: List[Transaction.Output])( - implicit conn: Connection): Option[List[Transaction.Output]] = { - - outputs match { - case Nil => Some(outputs) - case _ => - val params = outputs.map { output => - List( - 'txid -> output.txid.string: NamedParameter, - 'index -> output.index: NamedParameter, - 'value -> output.value: NamedParameter, - 'address -> output.address.string: NamedParameter, - 'hex_script -> output.script.string: NamedParameter, - 'tpos_owner_address -> output.tposOwnerAddress.map(_.string): NamedParameter, - 'tpos_merchant_address -> output.tposMerchantAddress.map(_.string): NamedParameter) - } - - val batch = BatchSql( - """ - |INSERT INTO transaction_outputs - | (txid, index, value, address, hex_script, tpos_owner_address, tpos_merchant_address) - |VALUES - | ({txid}, {index}, {value}, {address}, {hex_script}, {tpos_owner_address}, {tpos_merchant_address}) - """.stripMargin, - params.head, - params.tail: _* - ) - - val success = batch.execute().forall(_ == 1) - - if (success) { - Some(outputs) - } else { - None - } - } - } - - private def deleteInputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Input] = { - SQL( - """ - |DELETE FROM transaction_inputs - |WHERE txid = {txid} - |RETURNING txid, index, from_txid, from_output_index, value, address - """.stripMargin - ).on( - 'txid -> txid.string - ).as(parseTransactionInput.*).flatten - } - - private def deleteOutputs(txid: TransactionId)(implicit conn: Connection): List[Transaction.Output] = { - val result = SQL( - """ - |DELETE FROM transaction_outputs - |WHERE txid = {txid} - |RETURNING txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address - """.stripMargin - ).on( - 'txid -> txid.string - ).as(parseTransactionOutput.*) - - result.flatten - } - - private def batchInsertDetails(transaction: Transaction)(implicit conn: Connection): Option[Unit] = { - val received = transaction - .outputs - .groupBy(_.address) - .mapValues { outputs => outputs.map(_.value).sum } - .map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, received = value) } - - val sent = transaction - .inputs - .groupBy(_.address) - .mapValues { inputs => inputs.map(_.value).sum } - .map { case (address, value) => AddressTransactionDetails(address, transaction.id, time = transaction.time, sent = value) } - - val details = (received ++ sent) - .groupBy(_.address) - .mapValues { - case head :: list => list.foldLeft(head) { (acc, current) => - current.copy(received = current.received + acc.received, sent = current.sent + acc.sent) - } - } - .values - - batchInsertDetails(details.toList) - } - - private def batchInsertDetails(details: List[AddressTransactionDetails])(implicit conn: Connection): Option[Unit] = { - details match { - case Nil => Some(()) - case _ => - val params = details.map { d => - List( - 'address -> d.address.string: NamedParameter, - 'txid -> d.txid.string: NamedParameter, - 'received -> d.received: NamedParameter, - 'sent -> d.sent: NamedParameter, - 'time -> d.time: NamedParameter) - } - - val batch = BatchSql( - """ - |INSERT INTO address_transaction_details - | (address, txid, received, sent, time) - |VALUES - | ({address}, {txid}, {received}, {sent}, {time}) - """.stripMargin, - params.head, - params.tail: _* - ) - - val success = batch.execute().forall(_ == 1) - - if (success) { - Some(()) - } else { - None - } - } - } - - private def deleteDetails(txid: TransactionId)(implicit conn: Connection): List[AddressTransactionDetails] = { - val result = SQL( - """ - |DELETE FROM address_transaction_details - |WHERE txid = {txid} - |RETURNING address, txid, received, sent, time - """.stripMargin - ).on( - 'txid -> txid.string - ).as(parseAddressTransactionDetails.*) - - result - } - - private def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = { - SQL( - """ - |SELECT txid, index, from_txid, from_output_index, value, address - |FROM transaction_inputs - |WHERE txid = {txid} AND - | address = {address} - """.stripMargin - ).on( - 'txid -> txid.string, - 'address -> address.string - ).as(parseTransactionInput.*).flatten - } - - private def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = { - SQL( - """ - |SELECT txid, index, hex_script, value, address, tpos_owner_address, tpos_merchant_address - |FROM transaction_outputs - |WHERE txid = {txid} AND - | address = {address} - """.stripMargin - ).on( - 'txid -> txid.string, - 'address -> address.string - ).as(parseTransactionOutput.*).flatten - } - - private def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = { - inputs match { - case Nil => Option(()) - case _ => - val txidArray = inputs - .map { input => s"'${input.fromTxid.string}'" } - .mkString("[", ",", "]") - - val indexArray = inputs.map(_.fromOutputIndex).mkString("[", ",", "]") - - // Note: the TransactionId must meet a safe format, this approach speeds up the inserts - val result = SQL( - s""" - |UPDATE transaction_outputs t - |SET spent_on = tmp.spent_on - |FROM ( - | WITH CTE AS ( - | SELECT '${txid.string}' AS spent_on - | ) - | SELECT spent_on, txid, index - | FROM CTE CROSS JOIN (SELECT - | UNNEST(array$indexArray) AS index, - | UNNEST(array$txidArray) AS txid) x - |) AS tmp - |WHERE t.txid = tmp.txid AND - | t.index = tmp.index - """.stripMargin - ).executeUpdate() - - if (result == inputs.size) { - Option(()) - } else { - None - } - } - } - private def toSQL(condition: OrderingCondition): String = condition match { case OrderingCondition.AscendingOrder => "ASC" case OrderingCondition.DescendingOrder => "DESC" diff --git a/server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala b/server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala index ee45c94..e52273e 100644 --- a/server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala +++ b/server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala @@ -1,10 +1,8 @@ package com.xsn.explorer.data -import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter -import com.xsn.explorer.data.anorm.LedgerPostgresDataHandler -import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO} import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.errors.{PreviousBlockMissingError, RepeatedBlockHeightError} +import com.xsn.explorer.helpers.DataHandlerObjects._ import com.xsn.explorer.helpers.{BlockLoader, TransactionLoader} import com.xsn.explorer.models.Transaction import com.xsn.explorer.models.rpc.Block @@ -13,12 +11,7 @@ import org.scalatest.BeforeAndAfter class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter { - lazy val dataHandler = new LedgerPostgresDataHandler( - database, - new BlockPostgresDAO(new FieldOrderingSQLInterpreter), - new TransactionPostgresDAO(new FieldOrderingSQLInterpreter), - new BalancePostgresDAO(new FieldOrderingSQLInterpreter), - new AggregatedAmountPostgresDAO) + lazy val dataHandler = createLedgerDataHandler(database) val blockList = List( BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"), @@ -71,7 +64,7 @@ class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeA dataHandler.pop() fail() } catch { - case _ => () + case _: Throwable => () } } diff --git a/server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala b/server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala index d976152..9393dff 100644 --- a/server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala +++ b/server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala @@ -2,7 +2,6 @@ package com.xsn.explorer.data import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition} import com.alexitc.playsonify.models.pagination._ -import com.xsn.explorer.data.anorm.TransactionPostgresDataHandler import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.errors.{BlockNotFoundError, TransactionNotFoundError} import com.xsn.explorer.helpers.DataHandlerObjects._ @@ -18,7 +17,7 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec with Be import DataGenerator._ - lazy val dataHandler = new TransactionPostgresDataHandler(database, transactionPostgresDAO) + lazy val dataHandler = createTransactionDataHandler(database) lazy val ledgerDataHandler = createLedgerDataHandler(database) lazy val blockDataHandler = createBlockDataHandler(database) diff --git a/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala b/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala index 5d2a828..ac3c8bd 100644 --- a/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala +++ b/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala @@ -1,14 +1,21 @@ package com.xsn.explorer.helpers import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter -import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO} -import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler} +import com.xsn.explorer.data.anorm.dao._ +import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler, TransactionPostgresDataHandler} import play.api.db.Database trait DataHandlerObjects { lazy val fieldOrderingSQLInterpreter = new FieldOrderingSQLInterpreter - lazy val transactionPostgresDAO = new TransactionPostgresDAO(fieldOrderingSQLInterpreter) + lazy val transactionInputDAO = new TransactionInputPostgresDAO + lazy val transactionOutputDAO = new TransactionOutputPostgresDAO + lazy val addressTransactionDetailsDAO = new AddressTransactionDetailsPostgresDAO + lazy val transactionPostgresDAO = new TransactionPostgresDAO( + transactionInputDAO, + transactionOutputDAO, + addressTransactionDetailsDAO, + fieldOrderingSQLInterpreter) lazy val blockPostgresDAO = new BlockPostgresDAO(fieldOrderingSQLInterpreter) lazy val balancePostgresDAO = new BalancePostgresDAO(fieldOrderingSQLInterpreter) lazy val aggregatedAmountPostgresDAO = new AggregatedAmountPostgresDAO @@ -25,6 +32,10 @@ trait DataHandlerObjects { def createBlockDataHandler(database: Database) = { new BlockPostgresDataHandler(database, blockPostgresDAO) } + + def createTransactionDataHandler(database: Database) = { + new TransactionPostgresDataHandler(database, transactionOutputDAO, transactionPostgresDAO) + } } object DataHandlerObjects extends DataHandlerObjects diff --git a/server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala b/server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala index a175e61..3f93240 100644 --- a/server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala +++ b/server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala @@ -1,13 +1,11 @@ package com.xsn.explorer.services import com.alexitc.playsonify.core.FutureApplicationResult -import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter import com.alexitc.playsonify.validators.PaginatedQueryValidator -import com.xsn.explorer.data.anorm.dao.{AggregatedAmountPostgresDAO, BalancePostgresDAO, BlockPostgresDAO, TransactionPostgresDAO} -import com.xsn.explorer.data.anorm.{BlockPostgresDataHandler, LedgerPostgresDataHandler, TransactionPostgresDataHandler} import com.xsn.explorer.data.async.{BlockFutureDataHandler, LedgerFutureDataHandler, TransactionFutureDataHandler} import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.errors.BlockNotFoundError +import com.xsn.explorer.helpers.DataHandlerObjects._ import com.xsn.explorer.helpers._ import com.xsn.explorer.models.rpc.Block import com.xsn.explorer.models.{Blockhash, Height} @@ -20,18 +18,9 @@ import scala.concurrent.Future class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeAndAfter with ScalaFutures { - lazy val dataHandler = new LedgerPostgresDataHandler( - database, - new BlockPostgresDAO(new FieldOrderingSQLInterpreter), - new TransactionPostgresDAO(new FieldOrderingSQLInterpreter), - new BalancePostgresDAO(new FieldOrderingSQLInterpreter), - new AggregatedAmountPostgresDAO) - - lazy val transactionDataHandler = new TransactionPostgresDataHandler( - database, - new TransactionPostgresDAO(new FieldOrderingSQLInterpreter)) - - lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO(new FieldOrderingSQLInterpreter)) + lazy val dataHandler = createLedgerDataHandler(database) + lazy val transactionDataHandler = createTransactionDataHandler(database) + lazy val blockDataHandler = createBlockDataHandler(database) val blockList = List( BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"),