Alexis Hernandez
7 years ago
11 changed files with 347 additions and 1 deletions
@ -0,0 +1,14 @@ |
|||||
|
package com.xsn.explorer.data |
||||
|
|
||||
|
import com.alexitc.playsonify.core.ApplicationResult |
||||
|
import com.xsn.explorer.models.rpc.Block |
||||
|
|
||||
|
import scala.language.higherKinds |
||||
|
|
||||
|
trait BlockDataHandler[F[_]] { |
||||
|
|
||||
|
def create(block: Block): F[Block] |
||||
|
} |
||||
|
|
||||
|
trait BlockBlockingDataHandler extends BlockDataHandler[ApplicationResult] |
||||
|
|
@ -0,0 +1,60 @@ |
|||||
|
package com.xsn.explorer.data.anorm |
||||
|
|
||||
|
import java.sql.Connection |
||||
|
|
||||
|
import com.alexitc.playsonify.core.ApplicationResult |
||||
|
import com.xsn.explorer.errors.{PostgresError, PostgresForeignKeyViolationError, UnknownPostgresError} |
||||
|
import org.postgresql.util.PSQLException |
||||
|
import org.scalactic.Bad |
||||
|
import play.api.db.Database |
||||
|
|
||||
|
/** |
||||
|
* Allow us to map a [[PSQLException]] to a sub type of [[PostgresError]]. |
||||
|
* |
||||
|
* This is helpful to differentiate between errors caused by input data |
||||
|
* and failures that can not be prevented, these failures are thrown. |
||||
|
* |
||||
|
* The errors are mapped based on postgres error codes: |
||||
|
* - see: https://www.postgresql.org/docs/9.6/static/errcodes-appendix.html |
||||
|
*/ |
||||
|
trait AnormPostgresDataHandler { |
||||
|
|
||||
|
protected def database: Database |
||||
|
|
||||
|
def withConnection[A](block: Connection => ApplicationResult[A]): ApplicationResult[A] = { |
||||
|
try { |
||||
|
database.withConnection(block) |
||||
|
} catch { |
||||
|
case e: PSQLException if isIntegrityConstraintViolationError(e) => |
||||
|
val error = createForeignKeyViolationError(e).getOrElse(UnknownPostgresError(e)) |
||||
|
Bad(error).accumulating |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
def withTransaction[A](block: Connection => ApplicationResult[A]): ApplicationResult[A] = { |
||||
|
try { |
||||
|
database.withTransaction(block) |
||||
|
} catch { |
||||
|
case e: PSQLException if isIntegrityConstraintViolationError(e) => |
||||
|
val error = createForeignKeyViolationError(e).getOrElse(UnknownPostgresError(e)) |
||||
|
Bad(error).accumulating |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private def isIntegrityConstraintViolationError(e: PSQLException) = e.getSQLState startsWith "23" |
||||
|
private def createForeignKeyViolationError(e: PSQLException): Option[PostgresError] = { |
||||
|
// assumes not null |
||||
|
val detail = e.getServerErrorMessage.getDetail |
||||
|
|
||||
|
// expected format = [Key (column)=(given_value) is not present in table "table".] |
||||
|
val regex = raw"Key (.*)=.*".r |
||||
|
detail match { |
||||
|
case regex(dirtyColumn, _*) => |
||||
|
val column = dirtyColumn.substring(1, dirtyColumn.length - 1) |
||||
|
val error = PostgresForeignKeyViolationError(column, e) |
||||
|
Some(error) |
||||
|
|
||||
|
case _ => None |
||||
|
} |
||||
|
} |
||||
|
} |
@ -0,0 +1,23 @@ |
|||||
|
package com.xsn.explorer.data.anorm |
||||
|
|
||||
|
import javax.inject.Inject |
||||
|
|
||||
|
import com.alexitc.playsonify.core.ApplicationResult |
||||
|
import com.xsn.explorer.data.BlockBlockingDataHandler |
||||
|
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO |
||||
|
import com.xsn.explorer.errors.BlockUnknownError |
||||
|
import com.xsn.explorer.models.rpc.Block |
||||
|
import org.scalactic.{One, Or} |
||||
|
import play.api.db.Database |
||||
|
|
||||
|
class BlockPostgresDataHandler @Inject() ( |
||||
|
override val database: Database, |
||||
|
blockPostgresDAO: BlockPostgresDAO) |
||||
|
extends BlockBlockingDataHandler |
||||
|
with AnormPostgresDataHandler { |
||||
|
|
||||
|
override def create(block: Block): ApplicationResult[Block] = database.withConnection { implicit conn => |
||||
|
val maybe = blockPostgresDAO.create(block) |
||||
|
Or.from(maybe, One(BlockUnknownError)) |
||||
|
} |
||||
|
} |
@ -0,0 +1,59 @@ |
|||||
|
package com.xsn.explorer.data.anorm.dao |
||||
|
|
||||
|
import java.sql.Connection |
||||
|
|
||||
|
import anorm._ |
||||
|
import com.xsn.explorer.data.anorm.parsers.BlockParsers._ |
||||
|
import com.xsn.explorer.models.rpc.Block |
||||
|
|
||||
|
class BlockPostgresDAO { |
||||
|
|
||||
|
def create(block: Block)(implicit conn: Connection): Option[Block] = { |
||||
|
SQL( |
||||
|
""" |
||||
|
|INSERT INTO blocks |
||||
|
| ( |
||||
|
| hash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size, |
||||
|
| height, version, time, median_time, nonce, bits, chainwork, difficulty |
||||
|
| ) |
||||
|
|VALUES |
||||
|
| ( |
||||
|
| {hash}, {previous_blockhash}, {next_blockhash}, {tpos_contract}, {merkle_root}, {size}, |
||||
|
| {height}, {version}, {time}, {median_time}, {nonce}, {bits}, {chainwork}, {difficulty} |
||||
|
| ) |
||||
|
|ON CONFLICT (hash) |
||||
|
|DO UPDATE |
||||
|
| SET previous_blockhash = EXCLUDED.previous_blockhash, |
||||
|
| next_blockhash = EXCLUDED.next_blockhash, |
||||
|
| tpos_contract = EXCLUDED.tpos_contract, |
||||
|
| merkle_root = EXCLUDED.merkle_root, |
||||
|
| size = EXCLUDED.size, |
||||
|
| height = EXCLUDED.height, |
||||
|
| version = EXCLUDED.version, |
||||
|
| time = EXCLUDED.time, |
||||
|
| median_time = EXCLUDED.median_time, |
||||
|
| nonce = EXCLUDED.nonce, |
||||
|
| bits = EXCLUDED.bits, |
||||
|
| chainwork = EXCLUDED.chainwork, |
||||
|
| difficulty = EXCLUDED.difficulty |
||||
|
|RETURNING hash, previous_blockhash, next_blockhash, tpos_contract, merkle_root, size, |
||||
|
| height, version, time, median_time, nonce, bits, chainwork, difficulty |
||||
|
""".stripMargin |
||||
|
).on( |
||||
|
'hash -> block.hash.string, |
||||
|
'previous_blockhash -> block.previousBlockhash.map(_.string), |
||||
|
'next_blockhash -> block.nextBlockhash.map(_.string), |
||||
|
'tpos_contract -> block.tposContract.map(_.string), |
||||
|
'merkle_root -> block.merkleRoot.string, |
||||
|
'size -> block.size.int, |
||||
|
'height -> block.height.int, |
||||
|
'version -> block.version, |
||||
|
'time -> block.time, |
||||
|
'median_time -> block.medianTime, |
||||
|
'nonce -> block.nonce, |
||||
|
'bits -> block.bits, |
||||
|
'chainwork -> block.chainwork, |
||||
|
'difficulty -> block.difficulty |
||||
|
).as(parseBlock.singleOpt).flatten |
||||
|
} |
||||
|
} |
@ -0,0 +1,78 @@ |
|||||
|
package com.xsn.explorer.data.anorm.parsers |
||||
|
|
||||
|
import anorm.SqlParser._ |
||||
|
import anorm._ |
||||
|
import com.xsn.explorer.models._ |
||||
|
import com.xsn.explorer.models.rpc.Block |
||||
|
|
||||
|
object BlockParsers { |
||||
|
|
||||
|
val parseHash = str("hash").map(Blockhash.from) |
||||
|
val parseNextBlockhash = str("next_blockhash").map(Blockhash.from) |
||||
|
val parsePreviousBlockhash = str("previous_blockhash").map(Blockhash.from) |
||||
|
val parseTposContract = str("tpos_contract").map(TransactionId.from) |
||||
|
val parseMerkleRoot = str("merkle_root").map(Blockhash.from) |
||||
|
val parseSize = int("size").map(Size.apply) |
||||
|
val parseHeight = int("height").map(Height.apply) |
||||
|
val parseVersion = int("version") |
||||
|
val parseTime = long("time") |
||||
|
val parseMedianTime = long("median_time") |
||||
|
val parseNonce = int("nonce") |
||||
|
val parseBits = str("bits") |
||||
|
val parseChainwork = str("chainwork") |
||||
|
val parseDifficulty = get[BigDecimal]("difficulty") |
||||
|
|
||||
|
val parseBlock = ( |
||||
|
parseHash ~ |
||||
|
parseNextBlockhash.? ~ |
||||
|
parsePreviousBlockhash.? ~ |
||||
|
parseTposContract.? ~ |
||||
|
parseMerkleRoot ~ |
||||
|
parseSize ~ |
||||
|
parseHeight ~ |
||||
|
parseVersion ~ |
||||
|
parseTime ~ |
||||
|
parseMedianTime ~ |
||||
|
parseNonce ~ |
||||
|
parseBits ~ |
||||
|
parseChainwork ~ |
||||
|
parseDifficulty).map { |
||||
|
|
||||
|
case hashMaybe ~ |
||||
|
nextBlockhash ~ |
||||
|
previousBlockhash ~ |
||||
|
tposContract ~ |
||||
|
merkleRootMaybe ~ |
||||
|
size ~ |
||||
|
height ~ |
||||
|
version ~ |
||||
|
time ~ |
||||
|
medianTime ~ |
||||
|
nonce ~ |
||||
|
bits ~ |
||||
|
chainwork ~ |
||||
|
difficulty => |
||||
|
|
||||
|
for { |
||||
|
hash <- hashMaybe |
||||
|
merkleRoot <- merkleRootMaybe |
||||
|
} yield Block( |
||||
|
hash = hash, |
||||
|
previousBlockhash = previousBlockhash.flatten, |
||||
|
nextBlockhash = nextBlockhash.flatten, |
||||
|
tposContract = tposContract.flatten, |
||||
|
merkleRoot = merkleRoot, |
||||
|
size = size, |
||||
|
height = height, |
||||
|
time = time, |
||||
|
medianTime = medianTime, |
||||
|
nonce = nonce, |
||||
|
bits = bits, |
||||
|
chainwork = chainwork, |
||||
|
difficulty = difficulty, |
||||
|
version = version, |
||||
|
transactions = List.empty, |
||||
|
confirmations = Confirmations(0) |
||||
|
) |
||||
|
} |
||||
|
} |
@ -0,0 +1,14 @@ |
|||||
|
package com.xsn.explorer.errors |
||||
|
|
||||
|
import com.alexitc.playsonify.models.ServerError |
||||
|
import org.postgresql.util.PSQLException |
||||
|
|
||||
|
sealed trait PostgresError extends ServerError { |
||||
|
|
||||
|
def psqlException: PSQLException |
||||
|
|
||||
|
override def cause: Option[Throwable] = Option(psqlException) |
||||
|
} |
||||
|
|
||||
|
case class UnknownPostgresError(psqlException: PSQLException) extends PostgresError |
||||
|
case class PostgresForeignKeyViolationError(column: String, psqlException: PSQLException) extends PostgresError |
@ -0,0 +1,12 @@ |
|||||
|
package com.xsn.explorer.modules |
||||
|
|
||||
|
import com.google.inject.AbstractModule |
||||
|
import com.xsn.explorer.data.BlockBlockingDataHandler |
||||
|
import com.xsn.explorer.data.anorm.BlockPostgresDataHandler |
||||
|
|
||||
|
class DataHandlerModule extends AbstractModule { |
||||
|
|
||||
|
override def configure(): Unit = { |
||||
|
bind(classOf[BlockBlockingDataHandler]).to(classOf[BlockPostgresDataHandler]) |
||||
|
} |
||||
|
} |
@ -0,0 +1,28 @@ |
|||||
|
|
||||
|
# --- !Ups |
||||
|
|
||||
|
CREATE TABLE blocks( |
||||
|
hash VARCHAR(64) NOT NULL, |
||||
|
previous_blockhash VARCHAR(64) NULL, |
||||
|
next_blockhash VARCHAR(64) NULL, |
||||
|
merkle_root VARCHAR(64) NULL, |
||||
|
tpos_contract VARCHAR(64) NULL, |
||||
|
size INT NOT NULL, |
||||
|
height INT NOT NULL, |
||||
|
version INT NOT NULL, |
||||
|
time BIGINT NOT NULL, |
||||
|
median_time BIGINT NOT NULL, |
||||
|
nonce INT NOT NULL, |
||||
|
bits VARCHAR(50) NOT NULL, |
||||
|
chainwork VARCHAR(80) NOT NULL, |
||||
|
difficulty DECIMAL(30, 20), |
||||
|
-- constraints |
||||
|
CONSTRAINT blocks_hash_pk PRIMARY KEY (hash) |
||||
|
); |
||||
|
|
||||
|
CREATE INDEX blocks_height_index ON blocks USING BTREE (height); |
||||
|
CREATE INDEX blocks_time_index ON blocks USING BTREE (time); |
||||
|
|
||||
|
# --- !Downs |
||||
|
|
||||
|
DROP TABLE blocks; |
@ -0,0 +1,52 @@ |
|||||
|
package com.xsn.explorer.data |
||||
|
|
||||
|
import com.xsn.explorer.data.anorm.BlockPostgresDataHandler |
||||
|
import com.xsn.explorer.data.anorm.dao.BlockPostgresDAO |
||||
|
import com.xsn.explorer.data.common.PostgresDataHandlerSpec |
||||
|
import com.xsn.explorer.helpers.BlockLoader |
||||
|
import com.xsn.explorer.models.rpc.Block |
||||
|
|
||||
|
class BlockPostgresDataHandlerSpec extends PostgresDataHandlerSpec { |
||||
|
|
||||
|
lazy val dataHandler = new BlockPostgresDataHandler(database, new BlockPostgresDAO) |
||||
|
|
||||
|
"create" should { |
||||
|
"add a new block" in { |
||||
|
// PoS block |
||||
|
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0") |
||||
|
|
||||
|
val result = dataHandler.create(block) |
||||
|
result.isGood mustEqual true |
||||
|
matches(block, result.get) |
||||
|
} |
||||
|
|
||||
|
"override an existing block" in { |
||||
|
val block = BlockLoader.get("1ca318b7a26ed67ca7c8c9b5069d653ba224bf86989125d1dfbb0973b7d6a5e0") |
||||
|
dataHandler.create(block) |
||||
|
|
||||
|
val newBlock = BlockLoader.get("25762bf01143f7fe34912c926e0b95528b082c6323de35516de0fc321f5d8058").copy(hash = block.hash) |
||||
|
val expected = newBlock.copy(hash = block.hash) |
||||
|
val result = dataHandler.create(newBlock) |
||||
|
result.isGood mustEqual true |
||||
|
matches(expected, result.get) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private def matches(expected: Block, result: Block) = { |
||||
|
// NOTE: transactions and confirmations are not matched intentionally |
||||
|
result.hash mustEqual expected.hash |
||||
|
result.tposContract mustEqual expected.tposContract |
||||
|
result.nextBlockhash mustEqual expected.nextBlockhash |
||||
|
result.previousBlockhash mustEqual expected.previousBlockhash |
||||
|
result.merkleRoot mustEqual expected.merkleRoot |
||||
|
result.size mustEqual expected.size |
||||
|
result.height mustEqual expected.height |
||||
|
result.version mustEqual expected.version |
||||
|
result.medianTime mustEqual expected.medianTime |
||||
|
result.time mustEqual expected.time |
||||
|
result.bits mustEqual expected.bits |
||||
|
result.chainwork mustEqual expected.chainwork |
||||
|
result.difficulty mustEqual expected.difficulty |
||||
|
result.nonce mustEqual expected.nonce |
||||
|
} |
||||
|
} |
Loading…
Reference in new issue