Browse Source

server: Refactor the sql parsers to expect correct data from the database

If the data turns out to be corrupted, an exception is going to be thrown,
as there are check constraints on the sql schema, this shouldn't occur.
master
Alexis Hernandez 6 years ago
parent
commit
988694b88a
  1. 12
      server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala
  2. 35
      server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala
  3. 5
      server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala
  4. 6
      server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

12
server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala

@ -45,7 +45,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
'bits -> block.bits, 'bits -> block.bits,
'chainwork -> block.chainwork, 'chainwork -> block.chainwork,
'difficulty -> block.difficulty 'difficulty -> block.difficulty
).as(parseBlock.single) ).as(parseBlock.singleOpt)
} }
def setNextBlockhash( def setNextBlockhash(
@ -64,7 +64,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
).on( ).on(
'blockhash -> blockhash.string, 'blockhash -> blockhash.string,
'next_blockhash -> nextBlockhash.string 'next_blockhash -> nextBlockhash.string
).as(parseBlock.singleOpt).flatten ).as(parseBlock.singleOpt)
} }
def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[Block] = { def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[Block] = {
@ -77,7 +77,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin """.stripMargin
).on( ).on(
"blockhash" -> blockhash.string "blockhash" -> blockhash.string
).as(parseBlock.singleOpt).flatten ).as(parseBlock.singleOpt)
} }
def getBy(height: Height)(implicit conn: Connection): Option[Block] = { def getBy(height: Height)(implicit conn: Connection): Option[Block] = {
@ -90,7 +90,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin """.stripMargin
).on( ).on(
"height" -> height.int "height" -> height.int
).as(parseBlock.singleOpt).flatten ).as(parseBlock.singleOpt)
} }
def getBy( def getBy(
@ -111,7 +111,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
).on( ).on(
'offset -> paginatedQuery.offset.int, 'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int 'limit -> paginatedQuery.limit.int
).as(parseBlock.*).flatten ).as(parseBlock.*)
} }
def count(implicit conn: Connection): Count = { def count(implicit conn: Connection): Count = {
@ -135,7 +135,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
""".stripMargin """.stripMargin
).on( ).on(
"blockhash" -> blockhash.string "blockhash" -> blockhash.string
).as(parseBlock.singleOpt).flatten ).as(parseBlock.singleOpt)
} }
def getLatestBlock(implicit conn: Connection): Option[Block] = { def getLatestBlock(implicit conn: Connection): Option[Block] = {

35
server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala

@ -9,10 +9,22 @@ object BlockParsers {
import CommonParsers._ import CommonParsers._
val parseNextBlockhash = str("next_blockhash").map(Blockhash.from) val parseNextBlockhash = str("next_blockhash")
val parsePreviousBlockhash = str("previous_blockhash").map(Blockhash.from) .map(Blockhash.from)
val parseTposContract = str("tpos_contract").map(TransactionId.from) .map { _.getOrElse(throw new RuntimeException("corrupted next_blockhash")) }
val parseMerkleRoot = str("merkle_root").map(Blockhash.from)
val parsePreviousBlockhash = str("previous_blockhash")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted previous_blockhash")) }
val parseTposContract = str("tpos_contract")
.map(TransactionId.from)
.map { _.getOrElse(throw new RuntimeException("corrupted tpos_contract")) }
val parseMerkleRoot = str("merkle_root")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted merkle_root")) }
val parseSize = int("size").map(Size.apply) val parseSize = int("size").map(Size.apply)
val parseHeight = int("height").map(Height.apply) val parseHeight = int("height").map(Height.apply)
val parseVersion = int("version") val parseVersion = int("version")
@ -38,11 +50,11 @@ object BlockParsers {
parseChainwork ~ parseChainwork ~
parseDifficulty).map { parseDifficulty).map {
case hashMaybe ~ case hash ~
nextBlockhash ~ nextBlockhash ~
previousBlockhash ~ previousBlockhash ~
tposContract ~ tposContract ~
merkleRootMaybe ~ merkleRoot ~
size ~ size ~
height ~ height ~
version ~ version ~
@ -53,14 +65,11 @@ object BlockParsers {
chainwork ~ chainwork ~
difficulty => difficulty =>
for { Block(
hash <- hashMaybe
merkleRoot <- merkleRootMaybe
} yield Block(
hash = hash, hash = hash,
previousBlockhash = previousBlockhash.flatten, previousBlockhash = previousBlockhash,
nextBlockhash = nextBlockhash.flatten, nextBlockhash = nextBlockhash,
tposContract = tposContract.flatten, tposContract = tposContract,
merkleRoot = merkleRoot, merkleRoot = merkleRoot,
size = size, size = size,
height = height, height = height,

5
server/app/com/xsn/explorer/data/anorm/parsers/CommonParsers.scala

@ -5,7 +5,10 @@ import com.xsn.explorer.models.{Address, Blockhash, Size}
object CommonParsers { object CommonParsers {
val parseBlockhash = str("blockhash").map(Blockhash.from) val parseBlockhash = str("blockhash")
.map(Blockhash.from)
.map { _.getOrElse(throw new RuntimeException("corrupted blockhash")) }
val parseAddress = str("address").map(Address.from) val parseAddress = str("address").map(Address.from)
val parseTime = long("time") val parseTime = long("time")
val parseSize = int("size").map(Size.apply) val parseSize = int("size").map(Size.apply)

6
server/app/com/xsn/explorer/data/anorm/parsers/TransactionParsers.scala

@ -23,10 +23,9 @@ object TransactionParsers {
val parseTposMerchantAddress = str("tpos_merchant_address").map(Address.from) val parseTposMerchantAddress = str("tpos_merchant_address").map(Address.from)
val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map { val parseTransaction = (parseTransactionId ~ parseBlockhash ~ parseTime ~ parseSize).map {
case txidMaybe ~ blockhashMaybe ~ time ~ size => case txidMaybe ~ blockhash ~ time ~ size =>
for { for {
txid <- txidMaybe txid <- txidMaybe
blockhash <- blockhashMaybe
} yield Transaction(txid, blockhash, time, size, List.empty, List.empty) } yield Transaction(txid, blockhash, time, size, List.empty, List.empty)
} }
@ -38,10 +37,9 @@ object TransactionParsers {
parseSent ~ parseSent ~
parseReceived).map { parseReceived).map {
case txidMaybe ~ blockhashMaybe ~ time ~ size ~ sent ~ received => case txidMaybe ~ blockhash ~ time ~ size ~ sent ~ received =>
for { for {
txid <- txidMaybe txid <- txidMaybe
blockhash <- blockhashMaybe
} yield TransactionWithValues(txid, blockhash, time, size, sent, received) } yield TransactionWithValues(txid, blockhash, time, size, sent, received)
} }

Loading…
Cancel
Save