Browse Source

server: Refactor the sql parsers to expect correct data from the database

If the data turns out to be corrupted, an exception is going to be thrown,
as there are check constraints on the sql schema, this shouldn't occur.
master
Alexis Hernandez 6 years ago
parent
commit
b8b44eab5b
  1. 12
      server/app/com/xsn/explorer/data/anorm/dao/BalancePostgresDAO.scala
  2. 5
      server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala
  3. 7
      server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala
  4. 18
      server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala
  5. 2
      server/app/com/xsn/explorer/data/anorm/parsers/BalanceParsers.scala
  6. 7
      server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala
  7. 53
      server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

12
server/app/com/xsn/explorer/data/anorm/dao/BalancePostgresDAO.scala

@ -36,7 +36,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
'address -> partial.address.string, 'address -> partial.address.string,
'received -> partial.received, 'received -> partial.received,
'spent -> partial.spent, 'spent -> partial.spent,
).as(parseBalance.singleOpt).flatten ).as(parseBalance.singleOpt)
} }
def get( def get(
@ -60,7 +60,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on( ).on(
'offset -> query.offset.int, 'offset -> query.offset.int,
'limit -> query.limit.int 'limit -> query.limit.int
).as(parseBalance.*).flatten ).as(parseBalance.*)
} }
def getNonZeroBalances( def getNonZeroBalances(
@ -84,7 +84,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on( ).on(
'offset -> query.offset.int, 'offset -> query.offset.int,
'limit -> query.limit.int 'limit -> query.limit.int
).as(parseBalance.*).flatten ).as(parseBalance.*)
} }
def count(implicit conn: Connection): Count = { def count(implicit conn: Connection): Count = {
@ -111,7 +111,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
""".stripMargin """.stripMargin
).on( ).on(
'address -> address.string 'address -> address.string
).as(parseBalance.singleOpt).flatten ).as(parseBalance.singleOpt)
} }
def countNonZeroBalances(implicit conn: Connection): Count = { def countNonZeroBalances(implicit conn: Connection): Count = {
@ -143,7 +143,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
""".stripMargin """.stripMargin
).on( ).on(
'limit -> limit.int 'limit -> limit.int
).as(parseBalance.*).flatten ).as(parseBalance.*)
} }
/** /**
@ -170,6 +170,6 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on( ).on(
'limit -> limit.int, 'limit -> limit.int,
'lastSeenAddress -> lastSeenAddress.string 'lastSeenAddress -> lastSeenAddress.string
).as(parseBalance.*).flatten ).as(parseBalance.*)
} }
} }

5
server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala

@ -4,7 +4,6 @@ import java.sql.Connection
import anorm._ import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._ import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models._
import com.xsn.explorer.models.persisted.Transaction import com.xsn.explorer.models.persisted.Transaction
import com.xsn.explorer.models.values.{Address, TransactionId} import com.xsn.explorer.models.values.{Address, TransactionId}
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
@ -60,7 +59,7 @@ class TransactionInputPostgresDAO {
""".stripMargin """.stripMargin
).on( ).on(
'txid -> txid.string 'txid -> txid.string
).as(parseTransactionInput.*).flatten ).as(parseTransactionInput.*)
} }
def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = { def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = {
@ -74,6 +73,6 @@ class TransactionInputPostgresDAO {
).on( ).on(
'txid -> txid.string, 'txid -> txid.string,
'address -> address.string 'address -> address.string
).as(parseTransactionInput.*).flatten ).as(parseTransactionInput.*)
} }
} }

7
server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala

@ -4,7 +4,6 @@ import java.sql.Connection
import anorm._ import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._ import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models._
import com.xsn.explorer.models.persisted.Transaction import com.xsn.explorer.models.persisted.Transaction
import com.xsn.explorer.models.values.{Address, TransactionId} import com.xsn.explorer.models.values.{Address, TransactionId}
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
@ -24,7 +23,7 @@ class TransactionOutputPostgresDAO {
""".stripMargin """.stripMargin
).on( ).on(
'address -> address.string 'address -> address.string
).as(parseTransactionOutput.*).flatten ).as(parseTransactionOutput.*)
} }
def batchInsertOutputs( def batchInsertOutputs(
@ -76,7 +75,7 @@ class TransactionOutputPostgresDAO {
'txid -> txid.string 'txid -> txid.string
).as(parseTransactionOutput.*) ).as(parseTransactionOutput.*)
result.flatten result
} }
def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = { def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = {
@ -90,7 +89,7 @@ class TransactionOutputPostgresDAO {
).on( ).on(
'txid -> txid.string, 'txid -> txid.string,
'address -> address.string 'address -> address.string
).as(parseTransactionOutput.*).flatten ).as(parseTransactionOutput.*)
} }
def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = { def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = {

18
server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala

@ -109,7 +109,7 @@ class TransactionPostgresDAO @Inject() (
""".stripMargin """.stripMargin
).on( ).on(
'blockhash -> blockhash.string 'blockhash -> blockhash.string
).as(parseTransaction.*).flatten ).as(parseTransaction.*)
val result = expectedTransactions.map { tx => val result = expectedTransactions.map { tx =>
val inputs = transactionInputDAO.deleteInputs(tx.id) val inputs = transactionInputDAO.deleteInputs(tx.id)
@ -127,7 +127,7 @@ class TransactionPostgresDAO @Inject() (
""".stripMargin """.stripMargin
).on( ).on(
'blockhash -> blockhash.string 'blockhash -> blockhash.string
).as(parseTransaction.*).flatten ).as(parseTransaction.*)
Option(deletedTransactions) Option(deletedTransactions)
.filter(_.size == expectedTransactions.size) .filter(_.size == expectedTransactions.size)
@ -152,7 +152,7 @@ class TransactionPostgresDAO @Inject() (
).on( ).on(
'address -> address.string, 'address -> address.string,
'limit -> limit.int 'limit -> limit.int
).as(parseTransaction.*).flatten ).as(parseTransaction.*)
for { for {
tx <- transactions tx <- transactions
@ -201,7 +201,7 @@ class TransactionPostgresDAO @Inject() (
'address -> address.string, 'address -> address.string,
'limit -> limit.int, 'limit -> limit.int,
'lastSeenTxid -> lastSeenTxid.string 'lastSeenTxid -> lastSeenTxid.string
).as(parseTransaction.*).flatten ).as(parseTransaction.*)
for { for {
tx <- transactions tx <- transactions
@ -234,7 +234,7 @@ class TransactionPostgresDAO @Inject() (
'address -> address.string, 'address -> address.string,
'offset -> paginatedQuery.offset.int, 'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int 'limit -> paginatedQuery.limit.int
).as(parseTransactionWithValues.*).flatten ).as(parseTransactionWithValues.*)
} }
def countBy(address: Address)(implicit conn: Connection): Count = { def countBy(address: Address)(implicit conn: Connection): Count = {
@ -279,7 +279,7 @@ class TransactionPostgresDAO @Inject() (
'blockhash -> blockhash.string, 'blockhash -> blockhash.string,
'offset -> paginatedQuery.offset.int, 'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int 'limit -> paginatedQuery.limit.int
).as(parseTransactionWithValues.*).flatten ).as(parseTransactionWithValues.*)
} }
def countByBlockhash(blockhash: Blockhash)(implicit conn: Connection): Count = { def countByBlockhash(blockhash: Blockhash)(implicit conn: Connection): Count = {
@ -310,7 +310,7 @@ class TransactionPostgresDAO @Inject() (
).on( ).on(
'limit -> limit.int, 'limit -> limit.int,
'blockhash -> blockhash.string 'blockhash -> blockhash.string
).as(parseTransactionWithValues.*).flatten ).as(parseTransactionWithValues.*)
} }
def getByBlockhash(blockhash: Blockhash, lastSeenTxid: TransactionId, limit: Limit)(implicit conn: Connection): List[TransactionWithValues] = { def getByBlockhash(blockhash: Blockhash, lastSeenTxid: TransactionId, limit: Limit)(implicit conn: Connection): List[TransactionWithValues] = {
@ -329,7 +329,7 @@ class TransactionPostgresDAO @Inject() (
'limit -> limit.int, 'limit -> limit.int,
'blockhash -> blockhash.string, 'blockhash -> blockhash.string,
'lastSeenTxid -> lastSeenTxid.string 'lastSeenTxid -> lastSeenTxid.string
).as(parseTransactionWithValues.*).flatten ).as(parseTransactionWithValues.*)
} }
private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = { private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = {
@ -352,7 +352,7 @@ class TransactionPostgresDAO @Inject() (
'time -> transaction.time, 'time -> transaction.time,
'size -> transaction.size.int, 'size -> transaction.size.int,
'index -> index 'index -> index
).as(parseTransaction.singleOpt).flatten ).as(parseTransaction.singleOpt)
} }
private def toSQL(condition: OrderingCondition): String = condition match { private def toSQL(condition: OrderingCondition): String = condition match {

2
server/app/com/xsn/explorer/data/anorm/parsers/BalanceParsers.scala

@ -12,6 +12,6 @@ object BalanceParsers {
val parseSpent = get[BigDecimal]("spent") val parseSpent = get[BigDecimal]("spent")
val parseBalance = (parseAddress ~ parseReceived ~ parseSpent).map { case address ~ received ~ spent => val parseBalance = (parseAddress ~ parseReceived ~ parseSpent).map { case address ~ received ~ spent =>
address.map { Balance(_, received, spent) } Balance(address, received, spent)
} }
} }

7
server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala

@ -1,7 +1,7 @@
package com.xsn.explorer.data.anorm.parsers package com.xsn.explorer.data.anorm.parsers
import anorm.SqlParser.{int, long, str} import anorm.SqlParser.{int, long, str}
import com.xsn.explorer.models.values.{Size, _} import com.xsn.explorer.models.values._
object CommonParsers { object CommonParsers {
@ -9,7 +9,10 @@ object CommonParsers {
.map(Blockhash.from) .map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted blockhash")) } .map { _.getOrElse(throw new RuntimeException("corrupted blockhash")) }
val parseAddress = str("address").map(Address.from) val parseAddress = str("address")
.map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted address")) }
val parseTime = long("time") val parseTime = long("time")
val parseSize = int("size").map(Size.apply) val parseSize = int("size").map(Size.apply)
} }

53
server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

@ -10,8 +10,14 @@ object TransactionParsers {
import CommonParsers._ import CommonParsers._
val parseTransactionId = str("txid").map(TransactionId.from) val parseTransactionId = str("txid")
val parseFromTxid = str("from_txid").map(TransactionId.from) .map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted txid")) }
val parseFromTxid = str("from_txid")
.map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted from_txid")) }
val parseFromOutputIndex = get[Int]("from_output_index") val parseFromOutputIndex = get[Int]("from_output_index")
val parseReceived = get[BigDecimal]("received") val parseReceived = get[BigDecimal]("received")
val parseSpent = get[BigDecimal]("spent") val parseSpent = get[BigDecimal]("spent")
@ -19,16 +25,20 @@ object TransactionParsers {
val parseIndex = get[Int]("index") val parseIndex = get[Int]("index")
val parseValue = get[BigDecimal]("value") val parseValue = get[BigDecimal]("value")
val parseHexString = get[String]("hex_script").map(HexString.from) val parseHexString = get[String]("hex_script")
.map(HexString.from)
.map { _.getOrElse(throw new RuntimeException("corrupted hex_script")) }
val parseTposOwnerAddress = str("tpos_owner_address")
.map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_owner_address")) }
val parseTposOwnerAddress = str("tpos_owner_address").map(Address.from) val parseTposMerchantAddress = str("tpos_merchant_address")
val parseTposMerchantAddress = str("tpos_merchant_address").map(Address.from) .map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_merchant_address")) }
val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map { val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map {
case txidMaybe ~ blockhash ~ time ~ size => case txid ~ blockhash ~ time ~ size => Transaction(txid, blockhash, time, size)
for {
txid <- txidMaybe
} yield Transaction(txid, blockhash, time, size)
} }
val parseTransactionWithValues = ( val parseTransactionWithValues = (
@ -39,18 +49,13 @@ object TransactionParsers {
parseSent ~ parseSent ~
parseReceived).map { parseReceived).map {
case txidMaybe ~ blockhash ~ time ~ size ~ sent ~ received => case txid ~ blockhash ~ time ~ size ~ sent ~ received =>
for { TransactionWithValues(txid, blockhash, time, size, sent, received)
txid <- txidMaybe
} yield TransactionWithValues(txid, blockhash, time, size, sent, received)
} }
val parseTransactionInput = (parseFromTxid ~ parseFromOutputIndex ~ parseIndex ~ parseValue ~ parseAddress) val parseTransactionInput = (parseFromTxid ~ parseFromOutputIndex ~ parseIndex ~ parseValue ~ parseAddress)
.map { case fromTxidMaybe ~ fromOutputIndex ~ index ~ value ~ addressMaybe => .map { case fromTxid ~ fromOutputIndex ~ index ~ value ~ address =>
for { Transaction.Input(fromTxid, fromOutputIndex, index, value, address)
from <- fromTxidMaybe
address <- addressMaybe
} yield Transaction.Input(from, fromOutputIndex, index, value, address)
} }
val parseTransactionOutput = ( val parseTransactionOutput = (
@ -62,18 +67,14 @@ object TransactionParsers {
parseTposOwnerAddress.? ~ parseTposOwnerAddress.? ~
parseTposMerchantAddress.?).map { parseTposMerchantAddress.?).map {
case txidMaybe ~ index ~ value ~ addressMaybe ~ scriptMaybe ~ tposOwnerAddress ~ tposMerchantAddress => case txid ~ index ~ value ~ address ~ script ~ tposOwnerAddress ~ tposMerchantAddress =>
for { Transaction.Output(txid, index, value, address, script, tposOwnerAddress, tposMerchantAddress)
txid <- txidMaybe
address <- addressMaybe
script <- scriptMaybe
} yield Transaction.Output(txid, index, value, address, script, tposOwnerAddress.flatten, tposMerchantAddress.flatten)
} }
val parseAddressTransactionDetails = (parseAddress ~ parseTransactionId ~ parseSent ~ parseReceived ~ parseTime).map { val parseAddressTransactionDetails = (parseAddress ~ parseTransactionId ~ parseSent ~ parseReceived ~ parseTime).map {
case address ~ txid ~ sent ~ received ~ time => AddressTransactionDetails( case address ~ txid ~ sent ~ received ~ time => AddressTransactionDetails(
address.getOrElse(throw new RuntimeException("failed to retrieve address")), address,
txid.getOrElse(throw new RuntimeException("failed to retrieve txid")), txid,
time = time, time = time,
sent = sent, sent = sent,
received = received) received = received)

Loading…
Cancel
Save