Browse Source

server: Allow to get paginated blocks from the BlockDataHandler

prometheus-integration
Alexis Hernandez 6 years ago
parent
commit
c72f458a9d
  1. 6
      server/app/com/xsn/explorer/data/BlockDataHandler.scala
  2. 15
      server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala
  3. 38
      server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala
  4. 6
      server/app/com/xsn/explorer/data/async/BlockFutureDataHandler.scala
  5. 21
      server/app/com/xsn/explorer/models/fields/BlockField.scala
  6. 67
      server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala
  7. 2
      server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala
  8. 6
      server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala
  9. 4
      server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala

6
server/app/com/xsn/explorer/data/BlockDataHandler.scala

@ -1,6 +1,8 @@
package com.xsn.explorer.data
import com.alexitc.playsonify.core.ApplicationResult
import com.alexitc.playsonify.models.{FieldOrdering, PaginatedQuery, PaginatedResult}
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Blockhash, Height}
@ -12,6 +14,10 @@ trait BlockDataHandler[F[_]] {
def getBy(height: Height): F[Block]
def getBy(
paginatedQuery: PaginatedQuery,
ordering: FieldOrdering[BlockField]): F[PaginatedResult[Block]]
def delete(blockhash: Blockhash): F[Block]
def getLatestBlock(): F[Block]

15
server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala

@ -3,12 +3,14 @@ package com.xsn.explorer.data.anorm
import javax.inject.Inject
import com.alexitc.playsonify.core.ApplicationResult
import com.alexitc.playsonify.models.{FieldOrdering, PaginatedQuery, PaginatedResult}
import com.xsn.explorer.data.BlockBlockingDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.errors._
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Blockhash, Height}
import org.scalactic.{One, Or}
import org.scalactic.{Good, One, Or}
import play.api.db.Database
class BlockPostgresDataHandler @Inject() (
@ -27,6 +29,17 @@ class BlockPostgresDataHandler @Inject() (
Or.from(maybe, One(BlockNotFoundError))
}
override def getBy(
paginatedQuery: PaginatedQuery,
ordering: FieldOrdering[BlockField]): ApplicationResult[PaginatedResult[Block]] = withConnection { implicit conn =>
val data = blockPostgresDAO.getBy(paginatedQuery, ordering)
val total = blockPostgresDAO.count
val result = PaginatedResult(paginatedQuery.offset, paginatedQuery.limit, total, data)
Good(result)
}
override def delete(blockhash: Blockhash): ApplicationResult[Block] = database.withConnection { implicit conn =>
val maybe = blockPostgresDAO.delete(blockhash)
Or.from(maybe, One(BlockNotFoundError))

38
server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala

@ -1,13 +1,17 @@
package com.xsn.explorer.data.anorm.dao
import java.sql.Connection
import javax.inject.Inject
import anorm._
import com.alexitc.playsonify.models.{Count, FieldOrdering, PaginatedQuery}
import com.xsn.explorer.data.anorm.interpreters.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.parsers.BlockParsers._
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Blockhash, Height}
class BlockPostgresDAO {
class BlockPostgresDAO @Inject() (fieldOrderingSQLInterpreter: FieldOrderingSQLInterpreter) {
def insert(block: Block)(implicit conn: Connection): Option[Block] = {
SQL(
@ -88,6 +92,38 @@ class BlockPostgresDAO {
).as(parseBlock.singleOpt).flatten
}
def getBy(
paginatedQuery: PaginatedQuery,
ordering: FieldOrdering[BlockField])(
implicit conn: Connection): List[Block] = {
val orderBy = fieldOrderingSQLInterpreter.toOrderByClause(ordering)
SQL(
s"""
|SELECT blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty
|FROM blocks
|$orderBy
|OFFSET {offset}
|LIMIT {limit}
""".stripMargin
).on(
'offset -> paginatedQuery.offset.int,
'limit -> paginatedQuery.limit.int
).as(parseBlock.*).flatten
}
def count(implicit conn: Connection): Count = {
val total = SQL(
s"""
|SELECT COUNT(*)
|FROM blocks
""".stripMargin
).as(SqlParser.scalar[Int].single)
Count(total)
}
def delete(blockhash: Blockhash)(implicit conn: Connection): Option[Block] = {
SQL(
"""

6
server/app/com/xsn/explorer/data/async/BlockFutureDataHandler.scala

@ -3,8 +3,10 @@ package com.xsn.explorer.data.async
import javax.inject.Inject
import com.alexitc.playsonify.core.FutureApplicationResult
import com.alexitc.playsonify.models.{FieldOrdering, PaginatedQuery, PaginatedResult}
import com.xsn.explorer.data.{BlockBlockingDataHandler, BlockDataHandler}
import com.xsn.explorer.executors.DatabaseExecutionContext
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Blockhash, Height}
@ -23,6 +25,10 @@ class BlockFutureDataHandler @Inject() (
blockBlockingDataHandler.getBy(height)
}
override def getBy(paginatedQuery: PaginatedQuery, ordering: FieldOrdering[BlockField]): FutureApplicationResult[PaginatedResult[Block]] = Future {
blockBlockingDataHandler.getBy(paginatedQuery, ordering)
}
override def delete(blockhash: Blockhash): FutureApplicationResult[Block] = Future {
blockBlockingDataHandler.delete(blockhash)
}

21
server/app/com/xsn/explorer/models/fields/BlockField.scala

@ -0,0 +1,21 @@
package com.xsn.explorer.models.fields
import com.xsn.explorer.data.anorm.interpreters.ColumnNameResolver
import enumeratum.{Enum, EnumEntry}
sealed abstract class BlockField(override val entryName: String) extends EnumEntry
object BlockField extends Enum[BlockField] {
val values = findValues
case object Height extends BlockField("height")
case object Time extends BlockField("time")
implicit val columnNameResolver: ColumnNameResolver[BlockField] = new ColumnNameResolver[BlockField] {
override def getUniqueColumnName: String = Height.entryName
override def getColumnName(field: BlockField): String = field.entryName
}
}

67
server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala

@ -1,11 +1,14 @@
package com.xsn.explorer.data
import com.alexitc.playsonify.models._
import com.xsn.explorer.data.anorm.BlockPostgresDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.data.anorm.interpreters.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.BlockNotFoundError
import com.xsn.explorer.helpers.BlockLoader
import com.xsn.explorer.models.Blockhash
import com.xsn.explorer.models.fields.BlockField
import com.xsn.explorer.models.rpc.Block
import org.scalactic.{Bad, One, Or}
import org.scalatest.BeforeAndAfter
@ -16,10 +19,11 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
clearDatabase()
}
lazy val dataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
val defaultOrdering = FieldOrdering(BlockField.Height, OrderingCondition.AscendingOrder)
lazy val dao = new BlockPostgresDAO(new FieldOrderingSQLInterpreter)
lazy val dataHandler = new BlockPostgresDataHandler(database, dao)
def insert(block: Block) = {
val dao = new BlockPostgresDAO
database.withConnection { implicit conn =>
val maybe = dao.insert(block)
Or.from(maybe, One(BlockNotFoundError))
@ -66,6 +70,65 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAn
}
}
"getBy" should {
"paginate the results" in {
val block0 = BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34")
.copy(previousBlockhash = None, nextBlockhash = None)
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
.copy(nextBlockhash = None)
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
.copy(nextBlockhash = None)
List(block0, block1, block2).map(insert).foreach(_.isGood mustEqual true)
val query = PaginatedQuery(Offset(1), Limit(3))
val expected = List(block1, block2)
val result = dataHandler.getBy(query, defaultOrdering).get
result.total mustEqual Count(3)
result.offset mustEqual query.offset
result.limit mustEqual query.limit
result.data.size mustEqual expected.size
val data = result.data
matches(data(0), expected(0))
matches(data(1), expected(1))
}
def testOrdering[B](field: BlockField)(sortBy: Block => B)(implicit order: Ordering[B]) = {
val block0 = BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34")
.copy(previousBlockhash = None, nextBlockhash = None)
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
.copy(nextBlockhash = None)
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
.copy(nextBlockhash = None)
val blocks = List(block0, block1, block2)
blocks.map(insert).foreach(_.isGood mustEqual true)
val ordering = FieldOrdering(field, OrderingCondition.AscendingOrder)
val query = PaginatedQuery(Offset(0), Limit(10))
val expected = blocks.sortBy(sortBy)(order).map(_.hash)
val result = dataHandler.getBy(query, ordering).get.data
result.map(_.hash) mustEqual expected
val expectedReverse = expected.reverse
val resultReverse = dataHandler.getBy(query, ordering.copy(orderingCondition = OrderingCondition.DescendingOrder)).get.data
resultReverse.map(_.hash) mustEqual expectedReverse
}
"allow to sort by txid" in {
testOrdering(BlockField.Height)(_.height.int)
}
"allow to sort by time" in {
testOrdering(BlockField.Time)(_.time)
}
}
"delete" should {
"delete a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")

2
server/test/com/xsn/explorer/data/LedgerPostgresDataHandlerSpec.scala

@ -15,7 +15,7 @@ class LedgerPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeA
lazy val dataHandler = new LedgerPostgresDataHandler(
database,
new BlockPostgresDAO,
new BlockPostgresDAO(new FieldOrderingSQLInterpreter),
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter),
new BalancePostgresDAO(new FieldOrderingSQLInterpreter))

6
server/test/com/xsn/explorer/data/TransactionPostgresDataHandlerSpec.scala

@ -19,11 +19,11 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec with Be
lazy val dataHandler = new TransactionPostgresDataHandler(database, new TransactionPostgresDAO(new FieldOrderingSQLInterpreter))
lazy val ledgerDataHandler = new LedgerPostgresDataHandler(
database,
new BlockPostgresDAO,
new BlockPostgresDAO(new FieldOrderingSQLInterpreter),
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter),
new BalancePostgresDAO(new FieldOrderingSQLInterpreter))
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO(new FieldOrderingSQLInterpreter))
val defaultOrdering = FieldOrdering(TransactionField.Time, OrderingCondition.DescendingOrder)
val block = Block(
@ -91,7 +91,7 @@ class TransactionPostgresDataHandlerSpec extends PostgresDataHandlerSpec with Be
)
private def prepareBlock(block: Block) = {
val dao = new BlockPostgresDAO
val dao = new BlockPostgresDAO(new FieldOrderingSQLInterpreter)
try {
database.withConnection { implicit conn =>
val maybe = dao.insert(block)

4
server/test/com/xsn/explorer/services/LedgerSynchronizerServiceSpec.scala

@ -22,7 +22,7 @@ class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeA
lazy val dataHandler = new LedgerPostgresDataHandler(
database,
new BlockPostgresDAO,
new BlockPostgresDAO(new FieldOrderingSQLInterpreter),
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter),
new BalancePostgresDAO(new FieldOrderingSQLInterpreter))
@ -30,7 +30,7 @@ class LedgerSynchronizerServiceSpec extends PostgresDataHandlerSpec with BeforeA
database,
new TransactionPostgresDAO(new FieldOrderingSQLInterpreter))
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
lazy val blockDataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO(new FieldOrderingSQLInterpreter))
val blockList = List(
BlockLoader.get("00000c822abdbb23e28f79a49d29b41429737c6c7e15df40d1b1f1b35907ae34"),

Loading…
Cancel
Save