Browse Source

server: Attach the filter while retrieving block headers

master
Alexis Hernandez 6 years ago
parent
commit
5d1b3662c0
  1. 12
      server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala
  2. 22
      server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala
  3. 2
      server/app/com/xsn/explorer/models/persisted/BlockHeader.scala
  4. 8
      server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala
  5. 2
      server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala

12
server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala

@ -40,4 +40,16 @@ class BlockFilterPostgresDAO {
'blockhash -> blockhash.string
).as(parseFilter.singleOpt)
}
def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[GolombCodedSet] = {
SQL(
"""
|SELECT blockhash, m, n, p, hex
|FROM block_address_gcs
|WHERE blockhash = {blockhash}
""".stripMargin
).on(
'blockhash -> blockhash.string
).as(parseFilter.singleOpt)
}
}

22
server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala

@ -12,7 +12,9 @@ import com.xsn.explorer.models.persisted.{Block, BlockHeader}
import com.xsn.explorer.models.values.{Blockhash, Height}
import javax.inject.Inject
class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) {
class BlockPostgresDAO @Inject() (
blockFilterPostgresDAO: BlockFilterPostgresDAO,
fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) {
def insert(block: Block)(implicit conn: Connection): Option[Block] = {
SQL(
@ -154,7 +156,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
def getHeaders(limit: Limit, orderingCondition: OrderingCondition)(implicit conn: Connection): List[BlockHeader] = {
val order = toSQL(orderingCondition)
SQL(
val headers = SQL(
s"""
|SELECT blockhash, previous_blockhash, merkle_root, height, time
|FROM blocks
@ -164,6 +166,13 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
).on(
'limit -> limit.int
).as(parseHeader.*)
for {
header <- headers
filterMaybe = blockFilterPostgresDAO.getBy(header.hash)
} yield filterMaybe
.map(header.withFilter)
.getOrElse(header)
}
def getHeaders(lastSeenHash: Blockhash, limit: Limit, orderingCondition: OrderingCondition)(implicit conn: Connection): List[BlockHeader] = {
@ -173,7 +182,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
case OrderingCondition.AscendingOrder => ">"
}
SQL(
val headers = SQL(
s"""
|WITH CTE AS (
| SELECT height as lastSeenHeight
@ -190,6 +199,13 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI
'lastSeenHash -> lastSeenHash.string,
'limit -> limit.int
).as(parseHeader.*)
for {
header <- headers
filterMaybe = blockFilterPostgresDAO.getBy(header.hash)
} yield filterMaybe
.map(header.withFilter)
.getOrElse(header)
}
private def toSQL(condition: OrderingCondition): String = condition match {

2
server/app/com/xsn/explorer/models/persisted/BlockHeader.scala

@ -5,7 +5,7 @@ import com.xsn.explorer.models.values._
import io.scalaland.chimney.dsl._
import play.api.libs.json.{Json, Writes}
sealed trait BlockHeader {
sealed trait BlockHeader extends Product with Serializable {
def hash: Blockhash
def previousBlockhash: Option[Blockhash]

8
server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala

@ -2,12 +2,10 @@ package com.xsn.explorer.data
import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition}
import com.alexitc.playsonify.models.pagination.{Count, Limit, Offset, PaginatedQuery}
import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.BlockPostgresDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.BlockNotFoundError
import com.xsn.explorer.helpers.BlockLoader
import com.xsn.explorer.helpers.{BlockLoader, DataHandlerObjects}
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.persisted.Block
import com.xsn.explorer.models.values.Blockhash
@ -16,12 +14,14 @@ import org.scalatest.BeforeAndAfter
class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter {
import DataHandlerObjects._
before {
clearDatabase()
}
val dao = blockPostgresDAO
val defaultOrdering = FieldOrdering(BlockField.Height, OrderingCondition.AscendingOrder)
lazy val dao = new BlockPostgresDAO(new FieldOrderingSQLInterpreter)
lazy val dataHandler = new BlockPostgresDataHandler(database, dao)
def insert(block: Block) = {

2
server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala

@ -16,8 +16,8 @@ trait DataHandlerObjects {
transactionOutputDAO,
addressTransactionDetailsDAO,
fieldOrderingSQLInterpreter)
lazy val blockPostgresDAO = new BlockPostgresDAO(fieldOrderingSQLInterpreter)
lazy val blockFilterPostgresDAO = new BlockFilterPostgresDAO
lazy val blockPostgresDAO = new BlockPostgresDAO(blockFilterPostgresDAO, fieldOrderingSQLInterpreter)
lazy val balancePostgresDAO = new BalancePostgresDAO(fieldOrderingSQLInterpreter)
lazy val aggregatedAmountPostgresDAO = new AggregatedAmountPostgresDAO

Loading…
Cancel
Save