diff --git a/server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala index d513906..f4fdf49 100644 --- a/server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala +++ b/server/app/com/xsn/explorer/data/anorm/dao/BlockFilterPostgresDAO.scala @@ -40,4 +40,16 @@ class BlockFilterPostgresDAO { 'blockhash -> blockhash.string ).as(parseFilter.singleOpt) } + + def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[GolombCodedSet] = { + SQL( + """ + |SELECT blockhash, m, n, p, hex + |FROM block_address_gcs + |WHERE blockhash = {blockhash} + """.stripMargin + ).on( + 'blockhash -> blockhash.string + ).as(parseFilter.singleOpt) + } } diff --git a/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala index 47fc553..3fad497 100644 --- a/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala +++ b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala @@ -12,7 +12,9 @@ import com.xsn.explorer.models.persisted.{Block, BlockHeader} import com.xsn.explorer.models.values.{Blockhash, Height} import javax.inject.Inject -class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) { +class BlockPostgresDAO @Inject() ( + blockFilterPostgresDAO: BlockFilterPostgresDAO, + fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) { def insert(block: Block)(implicit conn: Connection): Option[Block] = { SQL( @@ -154,7 +156,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI def getHeaders(limit: Limit, orderingCondition: OrderingCondition)(implicit conn: Connection): List[BlockHeader] = { val order = toSQL(orderingCondition) - SQL( + val headers = SQL( s""" |SELECT blockhash, previous_blockhash, merkle_root, height, time |FROM blocks @@ -164,6 +166,13 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI ).on( 'limit -> limit.int ).as(parseHeader.*) + + for { + header <- headers + filterMaybe = blockFilterPostgresDAO.getBy(header.hash) + } yield filterMaybe + .map(header.withFilter) + .getOrElse(header) } def getHeaders(lastSeenHash: Blockhash, limit: Limit, orderingCondition: OrderingCondition)(implicit conn: Connection): List[BlockHeader] = { @@ -173,7 +182,7 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI case OrderingCondition.AscendingOrder => ">" } - SQL( + val headers = SQL( s""" |WITH CTE AS ( | SELECT height as lastSeenHeight @@ -190,6 +199,13 @@ class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLI 'lastSeenHash -> lastSeenHash.string, 'limit -> limit.int ).as(parseHeader.*) + + for { + header <- headers + filterMaybe = blockFilterPostgresDAO.getBy(header.hash) + } yield filterMaybe + .map(header.withFilter) + .getOrElse(header) } private def toSQL(condition: OrderingCondition): String = condition match { diff --git a/server/app/com/xsn/explorer/models/persisted/BlockHeader.scala b/server/app/com/xsn/explorer/models/persisted/BlockHeader.scala index 574f4c4..5a1a42f 100644 --- a/server/app/com/xsn/explorer/models/persisted/BlockHeader.scala +++ b/server/app/com/xsn/explorer/models/persisted/BlockHeader.scala @@ -5,7 +5,7 @@ import com.xsn.explorer.models.values._ import io.scalaland.chimney.dsl._ import play.api.libs.json.{Json, Writes} -sealed trait BlockHeader { +sealed trait BlockHeader extends Product with Serializable { def hash: Blockhash def previousBlockhash: Option[Blockhash] diff --git a/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala b/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala index cf46a52..14a7295 100644 --- a/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala +++ b/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala @@ -2,12 +2,10 @@ package com.xsn.explorer.data import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition} import com.alexitc.playsonify.models.pagination.{Count, Limit, Offset, PaginatedQuery} -import com.alexitc.playsonify.sql.FieldOrderingSQLInterpreter import com.xsn.explorer.data.anorm.BlockPostgresDataHandler -import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO import com.xsn.explorer.data.common.PostgresDataHandlerSpec import com.xsn.explorer.errors.BlockNotFoundError -import com.xsn.explorer.helpers.BlockLoader +import com.xsn.explorer.helpers.{BlockLoader, DataHandlerObjects} import com.xsn.explorer.models.fields.BlockField import com.xsn.explorer.models.persisted.Block import com.xsn.explorer.models.values.Blockhash @@ -16,12 +14,14 @@ import org.scalatest.BeforeAndAfter class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter { + import DataHandlerObjects._ + before { clearDatabase() } + val dao = blockPostgresDAO val defaultOrdering = FieldOrdering(BlockField.Height, OrderingCondition.AscendingOrder) - lazy val dao = new BlockPostgresDAO(new FieldOrderingSQLInterpreter) lazy val dataHandler = new BlockPostgresDataHandler(database, dao) def insert(block: Block) = { diff --git a/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala b/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala index af72693..c980675 100644 --- a/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala +++ b/server/test/com/xsn/explorer/helpers/DataHandlerObjects.scala @@ -16,8 +16,8 @@ trait DataHandlerObjects { transactionOutputDAO, addressTransactionDetailsDAO, fieldOrderingSQLInterpreter) - lazy val blockPostgresDAO = new BlockPostgresDAO(fieldOrderingSQLInterpreter) lazy val blockFilterPostgresDAO = new BlockFilterPostgresDAO + lazy val blockPostgresDAO = new BlockPostgresDAO(blockFilterPostgresDAO, fieldOrderingSQLInterpreter) lazy val balancePostgresDAO = new BalancePostgresDAO(fieldOrderingSQLInterpreter) lazy val aggregatedAmountPostgresDAO = new AggregatedAmountPostgresDAO