Browse Source

server: Add unique constraint to the blocks table

This also changes the upsert method to insert on the BlockDataHandler,
this is piece for fixing the bug #6
scalafmt-draft
Alexis Hernandez 7 years ago
parent
commit
61c6649584
  1. 2
      server/app/com/xsn/explorer/data/BlockDataHandler.scala
  2. 16
      server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala
  3. 18
      server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala
  4. 36
      server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala
  5. 4
      server/app/com/xsn/explorer/data/async/BlockFutureDataHandler.scala
  6. 10
      server/app/com/xsn/explorer/errors/blockErrors.scala
  7. 14
      server/conf/evolutions/default/4.sql
  8. 44
      server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala
  9. 28
      server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala

2
server/app/com/xsn/explorer/data/BlockDataHandler.scala

@ -8,7 +8,7 @@ import scala.language.higherKinds
trait BlockDataHandler[F[_]] {
def upsert(block: Block): F[Block]
def insert(block: Block): F[Block]
def getBy(blockhash: Blockhash): F[Block]

16
server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala

@ -5,7 +5,7 @@ import javax.inject.Inject
import com.alexitc.playsonify.core.ApplicationResult
import com.xsn.explorer.data.BlockBlockingDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.errors.{BlockNotFoundError, BlockUnknownError}
import com.xsn.explorer.errors._
import com.xsn.explorer.models.Blockhash
import com.xsn.explorer.models.rpc.Block
import org.scalactic.{One, Or}
@ -17,11 +17,21 @@ class BlockPostgresDataHandler @Inject() (
extends BlockBlockingDataHandler
with AnormPostgresDataHandler {
override def upsert(block: Block): ApplicationResult[Block] = database.withConnection { implicit conn =>
val maybe = blockPostgresDAO.upsert(block)
override def insert(block: Block): ApplicationResult[Block] = {
val result = withConnection { implicit conn =>
val maybe = blockPostgresDAO.insert(block)
Or.from(maybe, One(BlockUnknownError))
}
result.badMap { errors =>
errors.map {
case PostgresForeignKeyViolationError("blockhash", _) => RepeatedBlockhashError
case PostgresForeignKeyViolationError("height", _) => RepeatedBlockHeightError
case e => e
}
}
}
override def getBy(blockhash: Blockhash): ApplicationResult[Block] = database.withConnection { implicit conn =>
val maybe = blockPostgresDAO.getBy(blockhash)
Or.from(maybe, One(BlockNotFoundError))

18
server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala

@ -11,6 +11,7 @@ import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.{Address, Balance, Transaction}
import com.xsn.explorer.util.Extensions.ListOptionExt
import org.scalactic.Good
import org.slf4j.LoggerFactory
import play.api.db.Database
class DatabasePostgresSeeder @Inject() (
@ -21,6 +22,8 @@ class DatabasePostgresSeeder @Inject() (
extends DatabaseBlockingSeeder
with AnormPostgresDataHandler {
private val logger = LoggerFactory.getLogger(this.getClass)
override def firstBlock(command: CreateBlockCommand): ApplicationResult[Unit] = database.withTransaction { implicit conn =>
val result = upsertBlockCascade(command)
@ -34,9 +37,13 @@ class DatabasePostgresSeeder @Inject() (
val result = for {
// link previous block
previousBlockhash <- command.block.previousBlockhash
previous <- blockPostgresDAO.getBy(previousBlockhash)
newPrevious = previous.copy(nextBlockhash = Some(command.block.hash))
_ <- blockPostgresDAO.upsert(newPrevious)
_ <- blockPostgresDAO
.setNextBlockhash(previousBlockhash, command.block.hash)
.orElse {
logger.warn(s"Failed to link previous block = ${previousBlockhash.string} to ${command.block.hash.string} because it wasn't found")
None
}
_ <- upsertBlockCascade(command)
} yield ()
@ -71,7 +78,10 @@ class DatabasePostgresSeeder @Inject() (
private def upsertBlockCascade(command: CreateBlockCommand)(implicit conn: Connection): Option[Unit] = {
for {
// block
_ <- blockPostgresDAO.upsert(command.block)
_ <- blockPostgresDAO
.delete(command.block.hash)
.orElse { Some(command.block) }
_ <- blockPostgresDAO.insert(command.block)
// transactions
_ <- command.transactions.map(tx => transactionPostgresDAO.upsert(tx)).everything

36
server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala

@ -9,7 +9,7 @@ import com.xsn.explorer.models.rpc.Block
class BlockPostgresDAO {
def upsert(block: Block)(implicit conn: Connection): Option[Block] = {
def insert(block: Block)(implicit conn: Connection): Option[Block] = {
SQL(
"""
|INSERT INTO blocks
@ -22,21 +22,6 @@ class BlockPostgresDAO {
| {blockhash}, {previous_blockhash}, {next_blockhash}, {tpos_contract}, {merkle_root}, {size},
| {height}, {version}, {time}, {median_time}, {nonce}, {bits}, {chainwork}, {difficulty}
| )
|ON CONFLICT (blockhash)
|DO UPDATE
| SET previous_blockhash = EXCLUDED.previous_blockhash,
| next_blockhash = EXCLUDED.next_blockhash,
| tpos_contract = EXCLUDED.tpos_contract,
| merkle_root = EXCLUDED.merkle_root,
| size = EXCLUDED.size,
| height = EXCLUDED.height,
| version = EXCLUDED.version,
| time = EXCLUDED.time,
| median_time = EXCLUDED.median_time,
| nonce = EXCLUDED.nonce,
| bits = EXCLUDED.bits,
| chainwork = EXCLUDED.chainwork,
| difficulty = EXCLUDED.difficulty
|RETURNING blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty
""".stripMargin
@ -55,6 +40,25 @@ class BlockPostgresDAO {
'bits -> block.bits,
'chainwork -> block.chainwork,
'difficulty -> block.difficulty
).as(parseBlock.single)
}
def setNextBlockhash(
blockhash: Blockhash,
nextBlockhash: Blockhash)(
implicit conn: Connection): Option[Block] = {
SQL(
"""
|UPDATE blocks
|SET next_blockhash = {next_blockhash}
|WHERE blockhash = {blockhash}
|RETURNING blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size,
| height, version, time, median_time, nonce, bits, chainwork, difficulty
""".stripMargin
).on(
'blockhash -> blockhash.string,
'next_blockhash -> nextBlockhash.string
).as(parseBlock.singleOpt).flatten
}

4
server/app/com/xsn/explorer/data/async/BlockFutureDataHandler.scala

@ -15,8 +15,8 @@ class BlockFutureDataHandler @Inject() (
implicit ec: DatabaseExecutionContext)
extends BlockDataHandler[FutureApplicationResult] {
def upsert(block: Block): FutureApplicationResult[Block] = Future {
blockBlockingDataHandler.upsert(block)
def insert(block: Block): FutureApplicationResult[Block] = Future {
blockBlockingDataHandler.insert(block)
}
def getBy(blockhash: Blockhash): FutureApplicationResult[Block] = Future {

10
server/app/com/xsn/explorer/errors/blockErrors.scala

@ -27,3 +27,13 @@ case object BlockUnknownError extends BlockError with ServerError {
override def cause: Option[Throwable] = None
override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = List.empty
}
case object RepeatedBlockhashError extends BlockError with ServerError {
override def cause: Option[Throwable] = None
override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = List.empty
}
case object RepeatedBlockHeightError extends BlockError with ServerError {
override def cause: Option[Throwable] = None
override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = List.empty
}

14
server/conf/evolutions/default/4.sql

@ -0,0 +1,14 @@
# --- !Ups
DROP INDEX blocks_height_index;
ALTER TABLE blocks
ADD CONSTRAINT blocks_height_unique UNIQUE (height);
# --- !Downs
ALTER TABLE blocks
DROP CONSTRAINT blocks_height_unique;
CREATE INDEX blocks_height_index ON blocks USING BTREE (height);

44
server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala

@ -3,35 +3,49 @@ package com.xsn.explorer.data
import com.xsn.explorer.data.anorm.BlockPostgresDataHandler
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO
import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.BlockNotFoundError
import com.xsn.explorer.errors.{BlockNotFoundError, RepeatedBlockHeightError, RepeatedBlockhashError}
import com.xsn.explorer.helpers.BlockLoader
import com.xsn.explorer.models.Blockhash
import com.xsn.explorer.models.rpc.Block
import org.scalactic.Bad
import org.scalatest.BeforeAndAfter
class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec with BeforeAndAfter {
before {
clearDatabase()
}
lazy val dataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
"upsert" should {
"insert" should {
"add a new block" in {
// PoS block
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
val result = dataHandler.upsert(block)
val result = dataHandler.insert(block)
result.isGood mustEqual true
matches(block, result.get)
}
"override an existing block" in {
"fail on existing blockhash" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
dataHandler.upsert(block)
dataHandler.insert(block).isGood mustEqual true
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058").copy(hash = block.hash)
val expected = newBlock.copy(hash = block.hash)
val result = dataHandler.upsert(newBlock)
result.isGood mustEqual true
matches(expected, result.get)
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058")
.copy(hash = block.hash)
val result = dataHandler.insert(newBlock)
result mustEqual Bad(RepeatedBlockhashError).accumulating
}
"fail on existing height" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
dataHandler.insert(block).isGood mustEqual true
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058")
.copy(height = block.height)
val result = dataHandler.insert(newBlock)
result mustEqual Bad(RepeatedBlockHeightError).accumulating
}
}
@ -39,7 +53,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
"return a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
dataHandler.upsert(block)
dataHandler.insert(block)
val result = dataHandler.getBy(block.hash)
result.isGood mustEqual true
@ -57,7 +71,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
"delete" should {
"delete a block" in {
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0")
dataHandler.upsert(block)
dataHandler.insert(block)
val result = dataHandler.delete(block.hash)
result.isGood mustEqual true
@ -80,7 +94,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
List(block1, block2, block0).foreach(dataHandler.upsert)
List(block1, block2, block0).foreach(dataHandler.insert)
val result = dataHandler.getLatestBlock()
result.isGood mustEqual true
@ -103,7 +117,7 @@ class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec {
val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7")
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
List(block1, block2, block0).map(dataHandler.upsert).foreach(_.isGood mustEqual true)
List(block1, block2, block0).map(dataHandler.insert).foreach(_.isGood mustEqual true)
val result = dataHandler.getFirstBlock()
result.isGood mustEqual true

28
server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala

@ -1,18 +1,25 @@
package com.xsn.explorer.processors
import com.alexitc.playsonify.core.FutureApplicationResult
import com.xsn.explorer.data.anorm.dao.{BalancePostgresDAO, BlockPostgresDAO, StatisticsPostgresDAO, TransactionPostgresDAO}
import com.xsn.explorer.data.anorm.interpreters.FieldOrderingSQLInterpreter
import com.xsn.explorer.data.anorm.{BalancePostgresDataHandler, BlockPostgresDataHandler, DatabasePostgresSeeder, StatisticsPostgresDataHandler}
import com.xsn.explorer.data.async.{BlockFutureDataHandler, DatabaseFutureSeeder}
import com.xsn.explorer.data.common.PostgresDataHandlerSpec
import com.xsn.explorer.errors.{BlockNotFoundError, TransactionNotFoundError}
import com.xsn.explorer.helpers.{BlockLoader, Executors, FileBasedXSNService}
import com.xsn.explorer.models.base._
import com.xsn.explorer.models.fields.BalanceField
import com.xsn.explorer.models.rpc.Block
import com.xsn.explorer.models.rpc.{Block, Transaction}
import com.xsn.explorer.models.{Blockhash, TransactionId}
import com.xsn.explorer.processors.BlockEventsProcessor.{NewBlockAppended, RechainDone}
import com.xsn.explorer.services.TransactionService
import org.scalactic.{Bad, Good}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.ScalaFutures
import scala.concurrent.Future
class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures with BeforeAndAfter {
lazy val dataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO)
@ -56,10 +63,10 @@ class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
val block3 = BlockLoader.get("00000766115b26ecbc09cd3a3db6870fdaf2f049d65a910eb2f2b48b566ca7bd")
List(block1, block2).map(dataHandler.upsert).foreach(_.isGood mustEqual true)
List(block1, block2).map(dataHandler.insert).foreach(_.isGood mustEqual true)
whenReady(processor.newLatestBlock(block3.hash)) { result =>
result.isGood mustEqual true
result mustEqual Good(NewBlockAppended(block3))
val blocks = List(block1, block2, block3)
verifyBlockchain(blocks)
}
@ -70,12 +77,17 @@ class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
val block3 = BlockLoader.get("00000766115b26ecbc09cd3a3db6870fdaf2f049d65a910eb2f2b48b566ca7bd")
List(block1, block2, block3).map(dataHandler.upsert).foreach(_.isGood mustEqual true)
List(block1, block2, block3).map(dataHandler.insert).foreach(_.isGood mustEqual true)
whenReady(processor.newLatestBlock(block2.hash)) {
case Good(RechainDone(orphanBlock, newBlock)) =>
orphanBlock.hash mustEqual block3.hash
newBlock.hash mustEqual block2.hash
whenReady(processor.newLatestBlock(block2.hash)) { result =>
result.isGood mustEqual true
val blocks = List(block1, block2)
verifyBlockchain(blocks)
case _ => fail()
}
}
@ -84,7 +96,7 @@ class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
val block3 = BlockLoader.get("00000766115b26ecbc09cd3a3db6870fdaf2f049d65a910eb2f2b48b566ca7bd")
List(block2, block3).map(dataHandler.upsert).foreach(_.isGood mustEqual true)
List(block2, block3).map(dataHandler.insert).foreach(_.isGood mustEqual true)
whenReady(processor.newLatestBlock(block1.hash)) { result =>
result.isGood mustEqual true
@ -98,7 +110,7 @@ class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures
val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8")
val block3 = BlockLoader.get("00000766115b26ecbc09cd3a3db6870fdaf2f049d65a910eb2f2b48b566ca7bd")
List(block1, block2, block3).map(dataHandler.upsert).foreach(_.isGood mustEqual true)
List(block1, block2, block3).map(dataHandler.insert).foreach(_.isGood mustEqual true)
whenReady(processor.newLatestBlock(block1.hash)) { result =>
result.isGood mustEqual true

Loading…
Cancel
Save