From f10e0ad452bc38a91c3e8b272ab766cf7dbdd055 Mon Sep 17 00:00:00 2001 From: Alexis Hernandez Date: Sat, 2 Jun 2018 21:37:26 -0500 Subject: [PATCH] server: Fix for #12 avoid corrupting the previous_blockhash --- .../data/anorm/DatabasePostgresSeeder.scala | 6 +++ .../data/anorm/dao/BlockPostgresDAO.scala | 19 ++++++++++ .../processors/BlockEventsProcessorSpec.scala | 37 ++++++++++++++++++- 3 files changed, 61 insertions(+), 1 deletion(-) diff --git a/server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala b/server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala index b9ce4f0..9342786 100644 --- a/server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala +++ b/server/app/com/xsn/explorer/data/anorm/DatabasePostgresSeeder.scala @@ -71,6 +71,12 @@ class DatabasePostgresSeeder @Inject() ( .setNextBlockhash(previousBlockhash, command.block.hash) } + // link next block (if possible) + command.block.nextBlockhash.foreach { nextBlockhash => + blockPostgresDAO + .setPreviousBlockhash(nextBlockhash, command.block.hash) + } + result } diff --git a/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala index dc3ac78..f690d95 100644 --- a/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala +++ b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala @@ -62,6 +62,25 @@ class BlockPostgresDAO { ).as(parseBlock.singleOpt).flatten } + def setPreviousBlockhash( + blockhash: Blockhash, + previousBlockhash: Blockhash)( + implicit conn: Connection): Option[Block] = { + + SQL( + """ + |UPDATE blocks + |SET previous_blockhash = {previous_blockhash} + |WHERE blockhash = {blockhash} + |RETURNING blockhash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size, + | height, version, time, median_time, nonce, bits, chainwork, difficulty + """.stripMargin + ).on( + 'blockhash -> blockhash.string, + 'previous_blockhash -> previousBlockhash.string + ).as(parseBlock.singleOpt).flatten + } + def getBy(blockhash: Blockhash)(implicit conn: Connection): Option[Block] = { SQL( """ diff --git a/server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala b/server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala index 8810920..5e263b8 100644 --- a/server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala +++ b/server/test/com/xsn/explorer/processors/BlockEventsProcessorSpec.scala @@ -12,7 +12,7 @@ import com.xsn.explorer.helpers.{BlockLoader, Executors, FileBasedXSNService} import com.xsn.explorer.models.fields.BalanceField import com.xsn.explorer.models.rpc.{Block, Transaction} import com.xsn.explorer.models.{Blockhash, TransactionId} -import com.xsn.explorer.processors.BlockEventsProcessor.{MissingBlockIgnored, NewBlockAppended, RechainDone, ReplacedByBlockHeight} +import com.xsn.explorer.processors.BlockEventsProcessor._ import com.xsn.explorer.services.TransactionService import org.scalactic.{Bad, Good} import org.scalatest.BeforeAndAfter @@ -290,6 +290,41 @@ class BlockEventsProcessorSpec extends PostgresDataHandlerSpec with ScalaFutures dataHandler.getBy(block2.hash) mustEqual Bad(BlockNotFoundError).accumulating } } + + "keep the correct previous_blockhash on rare events" in { + // see https://github.com/X9Developers/block-explorer/issues/12 + val block1 = BlockLoader.get("000003fb382f6892ae96594b81aa916a8923c70701de4e7054aac556c7271ef7") + val block2 = BlockLoader.get("000004645e2717b556682e3c642a4c6e473bf25c653ff8e8c114a3006040ffb8") + val block3 = BlockLoader.get("00000766115b26ecbc09cd3a3db6870fdaf2f049d65a910eb2f2b48b566ca7bd") + val rareBlock3 = block3.copy(previousBlockhash = Some(block1.hash)) + + val xsnService = new FileBasedXSNService { + override def getBlock(blockhash: Blockhash): FutureApplicationResult[Block] = { + if (blockhash == block3.hash) { + Future.successful(Good(rareBlock3)) + } else { + super.getBlock(blockhash) + } + } + } + + val processor = new BlockEventsProcessor( + xsnService, + new TransactionService(xsnService)(Executors.globalEC), + new DatabaseFutureSeeder(dataSeeder)(Executors.databaseEC), + new BlockFutureDataHandler(dataHandler)(Executors.databaseEC)) + + List(block1, rareBlock3) + .map(_.hash) + .map(processor.processBlock) + .foreach { whenReady(_) { _.isGood mustEqual true } } + + whenReady(processor.processBlock(block2.hash)) { result => + result mustEqual Good(MissingBlockProcessed(block2)) + val blocks = List(block1, block2, block3) + verifyBlockchain(blocks) + } + } } private def verifyBlockchain(blocks: List[Block]) = {