Browse Source

server: Refactor the sql parsers to expect correct data from the database

If the data turns out to be corrupted, an exception is going to be thrown,
as there are check constraints on the sql schema, this shouldn't occur.
master
Alexis Hernandez 6 years ago
parent
commit
b8b44eab5b
  1. 12
      server/app/com/xsn/explorer/data/anorm/dao/BalancePostgresDAO.scala
  2. 5
      server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala
  3. 7
      server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala
  4. 18
      server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala
  5. 2
      server/app/com/xsn/explorer/data/anorm/parsers/BalanceParsers.scala
  6. 7
      server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala
  7. 53
      server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

12
server/app/com/xsn/explorer/data/anorm/dao/BalancePostgresDAO.scala

@ -36,7 +36,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
'address -> partial.address.string,
'received -> partial.received,
'spent -> partial.spent,
).as(parseBalance.singleOpt).flatten
).as(parseBalance.singleOpt)
}
def get(
@ -60,7 +60,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on(
'offset -> query.offset.int,
'limit -> query.limit.int
).as(parseBalance.*).flatten
).as(parseBalance.*)
}
def getNonZeroBalances(
@ -84,7 +84,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on(
'offset -> query.offset.int,
'limit -> query.limit.int
).as(parseBalance.*).flatten
).as(parseBalance.*)
}
def count(implicit conn: Connection): Count = {
@ -111,7 +111,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
""".stripMargin
).on(
'address -> address.string
).as(parseBalance.singleOpt).flatten
).as(parseBalance.singleOpt)
}
def countNonZeroBalances(implicit conn: Connection): Count = {
@ -143,7 +143,7 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
""".stripMargin
).on(
'limit -> limit.int
).as(parseBalance.*).flatten
).as(parseBalance.*)
}
/**
@ -170,6 +170,6 @@ class BalancePostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQ
).on(
'limit -> limit.int,
'lastSeenAddress -> lastSeenAddress.string
).as(parseBalance.*).flatten
).as(parseBalance.*)
}
}

5
server/app/com/xsn/explorer/data/anorm/dao/TransactionInputPostgresDAO.scala

@ -4,7 +4,6 @@ import java.sql.Connection
import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models._
import com.xsn.explorer.models.persisted.Transaction
import com.xsn.explorer.models.values.{Address, TransactionId}
import org.slf4j.LoggerFactory
@ -60,7 +59,7 @@ class TransactionInputPostgresDAO {
""".stripMargin
).on(
'txid -> txid.string
).as(parseTransactionInput.*).flatten
).as(parseTransactionInput.*)
}
def getInputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Input] = {
@ -74,6 +73,6 @@ class TransactionInputPostgresDAO {
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionInput.*).flatten
).as(parseTransactionInput.*)
}
}

7
server/app/com/xsn/explorer/data/anorm/dao/TransactionOutputPostgresDAO.scala

@ -4,7 +4,6 @@ import java.sql.Connection
import anorm._
import com.xsn.explorer.data.anorm.parsers.TransactionParsers._
import com.xsn.explorer.models._
import com.xsn.explorer.models.persisted.Transaction
import com.xsn.explorer.models.values.{Address, TransactionId}
import org.slf4j.LoggerFactory
@ -24,7 +23,7 @@ class TransactionOutputPostgresDAO {
""".stripMargin
).on(
'address -> address.string
).as(parseTransactionOutput.*).flatten
).as(parseTransactionOutput.*)
}
def batchInsertOutputs(
@ -76,7 +75,7 @@ class TransactionOutputPostgresDAO {
'txid -> txid.string
).as(parseTransactionOutput.*)
result.flatten
result
}
def getOutputs(txid: TransactionId, address: Address)(implicit conn: Connection): List[Transaction.Output] = {
@ -90,7 +89,7 @@ class TransactionOutputPostgresDAO {
).on(
'txid -> txid.string,
'address -> address.string
).as(parseTransactionOutput.*).flatten
).as(parseTransactionOutput.*)
}
def batchSpend(txid: TransactionId, inputs: List[Transaction.Input])(implicit conn: Connection): Option[Unit] = {

18
server/app/com/xsn/explorer/data/anorm/dao/TransactionPostgresDAO.scala

@ -109,7 +109,7 @@ class TransactionPostgresDAO @Inject() (
""".stripMargin
).on(
'blockhash -> blockhash.string
).as(parseTransaction.*).flatten
).as(parseTransaction.*)
val result = expectedTransactions.map { tx =>
val inputs = transactionInputDAO.deleteInputs(tx.id)
@ -127,7 +127,7 @@ class TransactionPostgresDAO @Inject() (
""".stripMargin
).on(
'blockhash -> blockhash.string
).as(parseTransaction.*).flatten
).as(parseTransaction.*)
Option(deletedTransactions)
.filter(_.size == expectedTransactions.size)
@ -152,7 +152,7 @@ class TransactionPostgresDAO @Inject() (
).on(
'address -> address.string,
'limit -> limit.int
).as(parseTransaction.*).flatten
).as(parseTransaction.*)
for {
tx <- transactions
@ -201,7 +201,7 @@ class TransactionPostgresDAO @Inject() (
'address -> address.string,
'limit -> limit.int,
'lastSeenTxid -> lastSeenTxid.string
).as(parseTransaction.*).flatten
).as(parseTransaction.*)
for {
tx <- transactions
@ -234,7 +234,7 @@ class TransactionPostgresDAO @Inject() (
'address -> address.string,
'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int
).as(parseTransactionWithValues.*).flatten
).as(parseTransactionWithValues.*)
}
def countBy(address: Address)(implicit conn: Connection): Count = {
@ -279,7 +279,7 @@ class TransactionPostgresDAO @Inject() (
'blockhash -> blockhash.string,
'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int
).as(parseTransactionWithValues.*).flatten
).as(parseTransactionWithValues.*)
}
def countByBlockhash(blockhash: Blockhash)(implicit conn: Connection): Count = {
@ -310,7 +310,7 @@ class TransactionPostgresDAO @Inject() (
).on(
'limit -> limit.int,
'blockhash -> blockhash.string
).as(parseTransactionWithValues.*).flatten
).as(parseTransactionWithValues.*)
}
def getByBlockhash(blockhash: Blockhash, lastSeenTxid: TransactionId, limit: Limit)(implicit conn: Connection): List[TransactionWithValues] = {
@ -329,7 +329,7 @@ class TransactionPostgresDAO @Inject() (
'limit -> limit.int,
'blockhash -> blockhash.string,
'lastSeenTxid -> lastSeenTxid.string
).as(parseTransactionWithValues.*).flatten
).as(parseTransactionWithValues.*)
}
private def upsertTransaction(index: Int, transaction: Transaction)(implicit conn: Connection): Option[Transaction] = {
@ -352,7 +352,7 @@ class TransactionPostgresDAO @Inject() (
'time -> transaction.time,
'size -> transaction.size.int,
'index -> index
).as(parseTransaction.singleOpt).flatten
).as(parseTransaction.singleOpt)
}
private def toSQL(condition: OrderingCondition): String = condition match {

2
server/app/com/xsn/explorer/data/anorm/parsers/BalanceParsers.scala

@ -12,6 +12,6 @@ object BalanceParsers {
val parseSpent = get[BigDecimal]("spent")
val parseBalance = (parseAddress ~ parseReceived ~ parseSpent).map { case address ~ received ~ spent =>
address.map { Balance(_, received, spent) }
Balance(address, received, spent)
}
}

7
server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala

@ -1,7 +1,7 @@
package com.xsn.explorer.data.anorm.parsers
import anorm.SqlParser.{int, long, str}
import com.xsn.explorer.models.values.{Size, _}
import com.xsn.explorer.models.values._
object CommonParsers {
@ -9,7 +9,10 @@ object CommonParsers {
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted blockhash")) }
val parseAddress = str("address").map(Address.from)
val parseAddress = str("address")
.map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted address")) }
val parseTime = long("time")
val parseSize = int("size").map(Size.apply)
}

53
server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

@ -10,8 +10,14 @@ object TransactionParsers {
import CommonParsers._
val parseTransactionId = str("txid").map(TransactionId.from)
val parseFromTxid = str("from_txid").map(TransactionId.from)
val parseTransactionId = str("txid")
.map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted txid")) }
val parseFromTxid = str("from_txid")
.map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted from_txid")) }
val parseFromOutputIndex = get[Int]("from_output_index")
val parseReceived = get[BigDecimal]("received")
val parseSpent = get[BigDecimal]("spent")
@ -19,16 +25,20 @@ object TransactionParsers {
val parseIndex = get[Int]("index")
val parseValue = get[BigDecimal]("value")
val parseHexString = get[String]("hex_script").map(HexString.from)
val parseHexString = get[String]("hex_script")
.map(HexString.from)
.map { _.getOrElse(throw new RuntimeException("corrupted hex_script")) }
val parseTposOwnerAddress = str("tpos_owner_address")
.map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_owner_address")) }
val parseTposOwnerAddress = str("tpos_owner_address").map(Address.from)
val parseTposMerchantAddress = str("tpos_merchant_address").map(Address.from)
val parseTposMerchantAddress = str("tpos_merchant_address")
.map(Address.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_merchant_address")) }
val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map {
case txidMaybe ~ blockhash ~ time ~ size =>
for {
txid <- txidMaybe
} yield Transaction(txid, blockhash, time, size)
case txid ~ blockhash ~ time ~ size => Transaction(txid, blockhash, time, size)
}
val parseTransactionWithValues = (
@ -39,18 +49,13 @@ object TransactionParsers {
parseSent ~
parseReceived).map {
case txidMaybe ~ blockhash ~ time ~ size ~ sent ~ received =>
for {
txid <- txidMaybe
} yield TransactionWithValues(txid, blockhash, time, size, sent, received)
case txid ~ blockhash ~ time ~ size ~ sent ~ received =>
TransactionWithValues(txid, blockhash, time, size, sent, received)
}
val parseTransactionInput = (parseFromTxid ~ parseFromOutputIndex ~ parseIndex ~ parseValue ~ parseAddress)
.map { case fromTxidMaybe ~ fromOutputIndex ~ index ~ value ~ addressMaybe =>
for {
from <- fromTxidMaybe
address <- addressMaybe
} yield Transaction.Input(from, fromOutputIndex, index, value, address)
.map { case fromTxid ~ fromOutputIndex ~ index ~ value ~ address =>
Transaction.Input(fromTxid, fromOutputIndex, index, value, address)
}
val parseTransactionOutput = (
@ -62,18 +67,14 @@ object TransactionParsers {
parseTposOwnerAddress.? ~
parseTposMerchantAddress.?).map {
case txidMaybe ~ index ~ value ~ addressMaybe ~ scriptMaybe ~ tposOwnerAddress ~ tposMerchantAddress =>
for {
txid <- txidMaybe
address <- addressMaybe
script <- scriptMaybe
} yield Transaction.Output(txid, index, value, address, script, tposOwnerAddress.flatten, tposMerchantAddress.flatten)
case txid ~ index ~ value ~ address ~ script ~ tposOwnerAddress ~ tposMerchantAddress =>
Transaction.Output(txid, index, value, address, script, tposOwnerAddress, tposMerchantAddress)
}
val parseAddressTransactionDetails = (parseAddress ~ parseTransactionId ~ parseSent ~ parseReceived ~ parseTime).map {
case address ~ txid ~ sent ~ received ~ time => AddressTransactionDetails(
address.getOrElse(throw new RuntimeException("failed to retrieve address")),
txid.getOrElse(throw new RuntimeException("failed to retrieve txid")),
address,
txid,
time = time,
sent = sent,
received = received)

Loading…
Cancel
Save