From 87ccd51257c383924fd71ad7fa4c75fed83e940c Mon Sep 17 00:00:00 2001 From: Alexis Hernandez Date: Sat, 7 Apr 2018 00:56:52 -0500 Subject: [PATCH] server: Add the BlockPostgresDataHandler --- .../xsn/explorer/data/BlockDataHandler.scala | 14 ++++ .../data/anorm/AnormPostgresDataHandler.scala | 60 ++++++++++++++ .../data/anorm/BlockPostgresDataHandler.scala | 23 ++++++ .../data/anorm/dao/BlockPostgresDAO.scala | 59 ++++++++++++++ .../data/anorm/parsers/BlockParsers.scala | 78 +++++++++++++++++++ .../com/xsn/explorer/errors/blockErrors.scala | 7 +- .../xsn/explorer/errors/postgresErrors.scala | 14 ++++ .../explorer/modules/DataHandlerModule.scala | 12 +++ server/conf/application.conf | 1 + server/conf/evolutions/default/1.sql | 28 +++++++ .../data/BlockPostgresDataHandlerSpec.scala | 52 +++++++++++++ 11 files changed, 347 insertions(+), 1 deletion(-) create mode 100644 server/app/com/xsn/explorer/data/BlockDataHandler.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/AnormPostgresDataHandler.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala create mode 100644 server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala create mode 100644 server/app/com/xsn/explorer/errors/postgresErrors.scala create mode 100644 server/app/com/xsn/explorer/modules/DataHandlerModule.scala create mode 100644 server/conf/evolutions/default/1.sql create mode 100644 server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala diff --git a/server/app/com/xsn/explorer/data/BlockDataHandler.scala b/server/app/com/xsn/explorer/data/BlockDataHandler.scala new file mode 100644 index 0000000..aea973d --- /dev/null +++ b/server/app/com/xsn/explorer/data/BlockDataHandler.scala @@ -0,0 +1,14 @@ +package com.xsn.explorer.data + +import com.alexitc.playsonify.core.ApplicationResult +import com.xsn.explorer.models.rpc.Block + +import scala.language.higherKinds + +trait BlockDataHandler[F[_]] { + + def create(block: Block): F[Block] +} + +trait BlockBlockingDataHandler extends BlockDataHandler[ApplicationResult] + diff --git a/server/app/com/xsn/explorer/data/anorm/AnormPostgresDataHandler.scala b/server/app/com/xsn/explorer/data/anorm/AnormPostgresDataHandler.scala new file mode 100644 index 0000000..b60d4fa --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/AnormPostgresDataHandler.scala @@ -0,0 +1,60 @@ +package com.xsn.explorer.data.anorm + +import java.sql.Connection + +import com.alexitc.playsonify.core.ApplicationResult +import com.xsn.explorer.errors.{PostgresError, PostgresForeignKeyViolationError, UnknownPostgresError} +import org.postgresql.util.PSQLException +import org.scalactic.Bad +import play.api.db.Database + +/** + * Allow us to map a [[PSQLException]] to a sub type of [[PostgresError]]. + * + * This is helpful to differentiate between errors caused by input data + * and failures that can not be prevented, these failures are thrown. + * + * The errors are mapped based on postgres error codes: + * - see: https://www.postgresql.org/docs/9.6/static/errcodes-appendix.html + */ +trait AnormPostgresDataHandler { + + protected def database: Database + + def withConnection[A](block: Connection => ApplicationResult[A]): ApplicationResult[A] = { + try { + database.withConnection(block) + } catch { + case e: PSQLException if isIntegrityConstraintViolationError(e) => + val error = createForeignKeyViolationError(e).getOrElse(UnknownPostgresError(e)) + Bad(error).accumulating + } + } + + def withTransaction[A](block: Connection => ApplicationResult[A]): ApplicationResult[A] = { + try { + database.withTransaction(block) + } catch { + case e: PSQLException if isIntegrityConstraintViolationError(e) => + val error = createForeignKeyViolationError(e).getOrElse(UnknownPostgresError(e)) + Bad(error).accumulating + } + } + + private def isIntegrityConstraintViolationError(e: PSQLException) = e.getSQLState startsWith "23" + private def createForeignKeyViolationError(e: PSQLException): Option[PostgresError] = { + // assumes not null + val detail = e.getServerErrorMessage.getDetail + + // expected format = [Key (column)=(given_value) is not present in table "table".] + val regex = raw"Key (.*)=.*".r + detail match { + case regex(dirtyColumn, _*) => + val column = dirtyColumn.substring(1, dirtyColumn.length - 1) + val error = PostgresForeignKeyViolationError(column, e) + Some(error) + + case _ => None + } + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala b/server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala new file mode 100644 index 0000000..a547dbe --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/BlockPostgresDataHandler.scala @@ -0,0 +1,23 @@ +package com.xsn.explorer.data.anorm + +import javax.inject.Inject + +import com.alexitc.playsonify.core.ApplicationResult +import com.xsn.explorer.data.BlockBlockingDataHandler +import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO +import com.xsn.explorer.errors.BlockUnknownError +import com.xsn.explorer.models.rpc.Block +import org.scalactic.{One, Or} +import play.api.db.Database + +class BlockPostgresDataHandler @Inject() ( + override val database: Database, + blockPostgresDAO: BlockPostgresDAO) + extends BlockBlockingDataHandler + with AnormPostgresDataHandler { + + override def create(block: Block): ApplicationResult[Block] = database.withConnection { implicit conn => + val maybe = blockPostgresDAO.create(block) + Or.from(maybe, One(BlockUnknownError)) + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala new file mode 100644 index 0000000..8bb004f --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/dao/BlockPostgresDAO.scala @@ -0,0 +1,59 @@ +package com.xsn.explorer.data.anorm.dao + +import java.sql.Connection + +import anorm._ +import com.xsn.explorer.data.anorm.parsers.BlockParsers._ +import com.xsn.explorer.models.rpc.Block + +class BlockPostgresDAO { + + def create(block: Block)(implicit conn: Connection): Option[Block] = { + SQL( + """ + |INSERT INTO blocks + | ( + | hash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size, + | height, version, time, median_time, nonce, bits, chainwork, difficulty + | ) + |VALUES + | ( + | {hash}, {previous_blockhash}, {next_blockhash}, {tpos_contract}, {merkle_root}, {size}, + | {height}, {version}, {time}, {median_time}, {nonce}, {bits}, {chainwork}, {difficulty} + | ) + |ON CONFLICT (hash) + |DO UPDATE + | SET previous_blockhash = EXCLUDED.previous_blockhash, + | next_blockhash = EXCLUDED.next_blockhash, + | tpos_contract = EXCLUDED.tpos_contract, + | merkle_root = EXCLUDED.merkle_root, + | size = EXCLUDED.size, + | height = EXCLUDED.height, + | version = EXCLUDED.version, + | time = EXCLUDED.time, + | median_time = EXCLUDED.median_time, + | nonce = EXCLUDED.nonce, + | bits = EXCLUDED.bits, + | chainwork = EXCLUDED.chainwork, + | difficulty = EXCLUDED.difficulty + |RETURNING hash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size, + | height, version, time, median_time, nonce, bits, chainwork, difficulty + """.stripMargin + ).on( + 'hash -> block.hash.string, + 'previous_blockhash -> block.previousBlockhash.map(_.string), + 'next_blockhash -> block.nextBlockhash.map(_.string), + 'tpos_contract -> block.tposContract.map(_.string), + 'merkle_root -> block.merkleRoot.string, + 'size -> block.size.int, + 'height -> block.height.int, + 'version -> block.version, + 'time -> block.time, + 'median_time -> block.medianTime, + 'nonce -> block.nonce, + 'bits -> block.bits, + 'chainwork -> block.chainwork, + 'difficulty -> block.difficulty + ).as(parseBlock.singleOpt).flatten + } +} diff --git a/server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala b/server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala new file mode 100644 index 0000000..721569a --- /dev/null +++ b/server/app/com/xsn/explorer/data/anorm/parsers/BlockParsers.scala @@ -0,0 +1,78 @@ +package com.xsn.explorer.data.anorm.parsers + +import anorm.SqlParser._ +import anorm._ +import com.xsn.explorer.models._ +import com.xsn.explorer.models.rpc.Block + +object BlockParsers { + + val parseHash = str("hash").map(Blockhash.from) + val parseNextBlockhash = str("next_blockhash").map(Blockhash.from) + val parsePreviousBlockhash = str("previous_blockhash").map(Blockhash.from) + val parseTposContract = str("tpos_contract").map(TransactionId.from) + val parseMerkleRoot = str("merkle_root").map(Blockhash.from) + val parseSize = int("size").map(Size.apply) + val parseHeight = int("height").map(Height.apply) + val parseVersion = int("version") + val parseTime = long("time") + val parseMedianTime = long("median_time") + val parseNonce = int("nonce") + val parseBits = str("bits") + val parseChainwork = str("chainwork") + val parseDifficulty = get[BigDecimal]("difficulty") + + val parseBlock = ( + parseHash ~ + parseNextBlockhash.? ~ + parsePreviousBlockhash.? ~ + parseTposContract.? ~ + parseMerkleRoot ~ + parseSize ~ + parseHeight ~ + parseVersion ~ + parseTime ~ + parseMedianTime ~ + parseNonce ~ + parseBits ~ + parseChainwork ~ + parseDifficulty).map { + + case hashMaybe ~ + nextBlockhash ~ + previousBlockhash ~ + tposContract ~ + merkleRootMaybe ~ + size ~ + height ~ + version ~ + time ~ + medianTime ~ + nonce ~ + bits ~ + chainwork ~ + difficulty => + + for { + hash <- hashMaybe + merkleRoot <- merkleRootMaybe + } yield Block( + hash = hash, + previousBlockhash = previousBlockhash.flatten, + nextBlockhash = nextBlockhash.flatten, + tposContract = tposContract.flatten, + merkleRoot = merkleRoot, + size = size, + height = height, + time = time, + medianTime = medianTime, + nonce = nonce, + bits = bits, + chainwork = chainwork, + difficulty = difficulty, + version = version, + transactions = List.empty, + confirmations = Confirmations(0) + ) + } +} diff --git a/server/app/com/xsn/explorer/errors/blockErrors.scala b/server/app/com/xsn/explorer/errors/blockErrors.scala index 38768af..a008e65 100644 --- a/server/app/com/xsn/explorer/errors/blockErrors.scala +++ b/server/app/com/xsn/explorer/errors/blockErrors.scala @@ -1,6 +1,6 @@ package com.xsn.explorer.errors -import com.alexitc.playsonify.models.{FieldValidationError, InputValidationError, PublicError} +import com.alexitc.playsonify.models.{FieldValidationError, InputValidationError, PublicError, ServerError} import play.api.i18n.{Lang, MessagesApi} sealed trait BlockError @@ -22,3 +22,8 @@ case object BlockNotFoundError extends BlockError with InputValidationError { List(error) } } + +case object BlockUnknownError extends BlockError with ServerError { + override def cause: Option[Throwable] = None + override def toPublicErrorList(messagesApi: MessagesApi)(implicit lang: Lang): List[PublicError] = List.empty +} diff --git a/server/app/com/xsn/explorer/errors/postgresErrors.scala b/server/app/com/xsn/explorer/errors/postgresErrors.scala new file mode 100644 index 0000000..ebf1d9f --- /dev/null +++ b/server/app/com/xsn/explorer/errors/postgresErrors.scala @@ -0,0 +1,14 @@ +package com.xsn.explorer.errors + +import com.alexitc.playsonify.models.ServerError +import org.postgresql.util.PSQLException + +sealed trait PostgresError extends ServerError { + + def psqlException: PSQLException + + override def cause: Option[Throwable] = Option(psqlException) +} + +case class UnknownPostgresError(psqlException: PSQLException) extends PostgresError +case class PostgresForeignKeyViolationError(column: String, psqlException: PSQLException) extends PostgresError diff --git a/server/app/com/xsn/explorer/modules/DataHandlerModule.scala b/server/app/com/xsn/explorer/modules/DataHandlerModule.scala new file mode 100644 index 0000000..024f937 --- /dev/null +++ b/server/app/com/xsn/explorer/modules/DataHandlerModule.scala @@ -0,0 +1,12 @@ +package com.xsn.explorer.modules + +import com.google.inject.AbstractModule +import com.xsn.explorer.data.BlockBlockingDataHandler +import com.xsn.explorer.data.anorm.BlockPostgresDataHandler + +class DataHandlerModule extends AbstractModule { + + override def configure(): Unit = { + bind(classOf[BlockBlockingDataHandler]).to(classOf[BlockPostgresDataHandler]) + } +} diff --git a/server/conf/application.conf b/server/conf/application.conf index 393275c..2a0f47e 100644 --- a/server/conf/application.conf +++ b/server/conf/application.conf @@ -31,6 +31,7 @@ rpc { password = ${?XSN_RPC_PASSWORD} } +play.modules.enabled += "com.xsn.explorer.modules.DataHandlerModule" play.modules.enabled += "com.xsn.explorer.modules.ConfigModule" play.modules.enabled += "com.xsn.explorer.modules.ExecutorsModule" play.modules.enabled += "com.xsn.explorer.modules.XSNServiceModule" diff --git a/server/conf/evolutions/default/1.sql b/server/conf/evolutions/default/1.sql new file mode 100644 index 0000000..2b5cf85 --- /dev/null +++ b/server/conf/evolutions/default/1.sql @@ -0,0 +1,28 @@ + +# --- !Ups + +CREATE TABLE blocks( + hash VARCHAR(64) NOT NULL, + previous_blockhash VARCHAR(64) NULL, + next_blockhash VARCHAR(64) NULL, + merkle_root VARCHAR(64) NULL, + tpos_contract VARCHAR(64) NULL, + size INT NOT NULL, + height INT NOT NULL, + version INT NOT NULL, + time BIGINT NOT NULL, + median_time BIGINT NOT NULL, + nonce INT NOT NULL, + bits VARCHAR(50) NOT NULL, + chainwork VARCHAR(80) NOT NULL, + difficulty DECIMAL(30, 20), + -- constraints + CONSTRAINT blocks_hash_pk PRIMARY KEY (hash) +); + +CREATE INDEX blocks_height_index ON blocks USING BTREE (height); +CREATE INDEX blocks_time_index ON blocks USING BTREE (time); + +# --- !Downs + +DROP TABLE blocks; diff --git a/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala b/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala new file mode 100644 index 0000000..005ca5f --- /dev/null +++ b/server/test/com/xsn/explorer/data/BlockPostgresDataHandlerSpec.scala @@ -0,0 +1,52 @@ +package com.xsn.explorer.data + +import com.xsn.explorer.data.anorm.BlockPostgresDataHandler +import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO +import com.xsn.explorer.data.common.PostgresDataHandlerSpec +import com.xsn.explorer.helpers.BlockLoader +import com.xsn.explorer.models.rpc.Block + +class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec { + + lazy val dataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO) + + "create" should { + "add a new block" in { + // PoS block + val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0") + + val result = dataHandler.create(block) + result.isGood mustEqual true + matches(block, result.get) + } + + "override an existing block" in { + val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0") + dataHandler.create(block) + + val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058").copy(hash = block.hash) + val expected = newBlock.copy(hash = block.hash) + val result = dataHandler.create(newBlock) + result.isGood mustEqual true + matches(expected, result.get) + } + } + + private def matches(expected: Block, result: Block) = { + // NOTE: transactions and confirmations are not matched intentionally + result.hash mustEqual expected.hash + result.tposContract mustEqual expected.tposContract + result.nextBlockhash mustEqual expected.nextBlockhash + result.previousBlockhash mustEqual expected.previousBlockhash + result.merkleRoot mustEqual expected.merkleRoot + result.size mustEqual expected.size + result.height mustEqual expected.height + result.version mustEqual expected.version + result.medianTime mustEqual expected.medianTime + result.time mustEqual expected.time + result.bits mustEqual expected.bits + result.chainwork mustEqual expected.chainwork + result.difficulty mustEqual expected.difficulty + result.nonce mustEqual expected.nonce + } +}