diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f969ff656..4312318e9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -3,8 +3,6 @@ name: Build and push packages on: push: branches: [ "master" ] - pull_request: - branches: [ "master" ] env: REGISTRY: ghcr.io diff --git a/packages/api/index.js b/packages/api/index.js index 9e7f54069..17bc3fb38 100644 --- a/packages/api/index.js +++ b/packages/api/index.js @@ -7,8 +7,9 @@ const Routes = require('./src/routes') const ServiceNotAvailableError = require("./src/errors/ServiceNotAvailableError"); const MainController = require("./src/controllers/MainController"); -const TransactionController = require("./src/controllers/TransactionController"); -const BlockController = require("./src/controllers/BlockController"); +const TransactionsController = require("./src/controllers/TransactionsController"); +const BlocksController = require("./src/controllers/BlocksController"); +const DocumentsController = require("./src/controllers/DocumentsController"); const packageVersion = require('./package.json').version const Worker = require('./src/worker/index') const {BLOCK_TIME} = require("./src/constants"); @@ -69,11 +70,12 @@ const init = async () => { await knex.raw('select 1+1'); const mainController = new MainController(knex) - const blockController = new BlockController(knex) - const transactionController = new TransactionController(client, knex) + const blocksController = new BlocksController(knex) + const transactionsController = new TransactionsController(client, knex) const dataContractsController = new DataContractsController(knex) + const documentsController = new DocumentsController(knex) - Routes({fastify, mainController, blockController, transactionController, dataContractsController}) + Routes({fastify, mainController, blocksController, transactionsController, dataContractsController, documentsController}) fastify.setErrorHandler(errorHandler) fastify.listen({ host: "0.0.0.0", port: 3005, listenTextResolver: (address) => console.log(`Platform indexer API has started on the ${address}`)}); diff --git a/packages/api/src/controllers/BlockController.js b/packages/api/src/controllers/BlockController.js deleted file mode 100644 index d6f30ed9f..000000000 --- a/packages/api/src/controllers/BlockController.js +++ /dev/null @@ -1,82 +0,0 @@ -const Block = require('../models/Block'); - -class BlockController { - constructor(knex) { - this.knex = knex - } - - getBlockByHash = async (request, response) => { - const results = await this.knex - .select('blocks.hash as hash', 'state_transitions.hash as st_hash', - 'blocks.height as height', 'blocks.timestamp as timestamp', - 'blocks.block_version as block_version', 'blocks.app_version as app_version', - 'blocks.l1_locked_height as l1_locked_height') - .from('blocks') - .leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash') - .where('blocks.hash', request.params.hash); - - const [block] = results - - if (!block) { - return response.status(400).send() - } - - const txs = results.reduce((acc, value) => { - if (value.st_hash) { - return [...acc, value.st_hash] - } - - return acc - }, []) - - response.send(Block.fromJSON({header: block, txs})); - } - - getBlocks = async (request, response) => { - const {from, to, order = 'desc'} = request.query - - const subquery = this.knex('blocks') - .select('blocks.hash as hash', - 'blocks.height as height', 'blocks.timestamp as timestamp', - 'blocks.block_version as block_version', 'blocks.app_version as app_version', - 'blocks.l1_locked_height as l1_locked_height').as('blocks') - .where(function () { - if (from && to) { - this.where('height', '>=', from) - this.where('height', '<=', to) - } - }) - .limit(30) - .orderBy('blocks.height', order); - - const rows = await this.knex(subquery) - .select('blocks.hash as hash', - 'height', 'timestamp', - 'block_version', 'app_version', - 'l1_locked_height', 'state_transitions.hash as st_hash') - .leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash') - .groupBy('blocks.hash', 'height', 'blocks.timestamp', 'block_version', 'app_version', - 'l1_locked_height', 'state_transitions.hash') - .orderBy('height', 'desc') - - // map-reduce Blocks with Transactions - const blocksMap = rows.reduce((blocks, row) => { - const block = blocks[row.hash] - const {st_hash} = row - const txs = block?.txs || [] - - if (st_hash) { - txs.push(st_hash) - } - - return {...blocks, [row.hash]: {...row, txs}} - }, {}) - - const blocks = Object - .keys(blocksMap).map(blockHash => Block.fromJSON({header: blocksMap[blockHash], txs: blocksMap[blockHash].txs})) - - response.send(blocks); - } -} - -module.exports = BlockController diff --git a/packages/api/src/controllers/BlocksController.js b/packages/api/src/controllers/BlocksController.js new file mode 100644 index 000000000..d5bc8e932 --- /dev/null +++ b/packages/api/src/controllers/BlocksController.js @@ -0,0 +1,30 @@ +const Block = require('../models/Block'); +const BlocksDAO = require('../dao/BlocksDAO'); + +class BlocksController { + constructor(knex) { + this.blocksDAO = new BlocksDAO(knex) + } + + getBlockByHash = async (request, response) => { + const {hash} = request.params + + const block = await this.blocksDAO.getBlockByHash(hash) + + if (!block) { + return response.status(404).send({message: 'not found'}) + } + + response.send(block); + } + + getBlocks = async (request, response) => { + const {from, to, order = 'desc'} = request.query + + const blocks = await this.blocksDAO.getBlocksPaginated(from, to, order) + + response.send(blocks); + } +} + +module.exports = BlocksController diff --git a/packages/api/src/controllers/DataContractsController.js b/packages/api/src/controllers/DataContractsController.js index 6367ae65d..d2879a3ac 100644 --- a/packages/api/src/controllers/DataContractsController.js +++ b/packages/api/src/controllers/DataContractsController.js @@ -1,38 +1,27 @@ const DataContract = require("../models/DataContract"); +const DataContractsDAO = require("../dao/DataContractsDAO"); class DataContractsController { constructor(knex) { - this.knex = knex + this.dataContractsDAO = new DataContractsDAO(knex) } getDataContracts = async (request, response) => { - const subquery = this.knex('data_contracts') - .select(this.knex.raw('data_contracts.id as id, data_contracts.identifier as identifier, data_contracts.version as version, rank() over (partition by identifier order by version desc) rank')) - .as('data_contracts') + const dataContracts = await this.dataContractsDAO.getDataContracts(); - const rows = await this.knex(subquery) - .select('id', 'identifier', 'version', 'rank') - .where('rank', '=', 1) - - response.send(rows.map(dataContract => DataContract.fromJSON(dataContract))); + response.send(dataContracts) } getDataContractByIdentifier = async (request, response) => { const {identifier} = request.params - const rows = await this.knex('data_contracts') - .select('data_contracts.identifier as identifier', 'data_contracts.schema as schema', 'data_contracts.version as version') - .where('data_contracts.identifier', identifier) - .orderBy('id', 'desc') - .limit(1); - - const [row] = rows + const dataContract = await this.dataContractsDAO.getDataContractByIdentifier(identifier) - if (!row) { + if (!dataContract) { response.status(404).send({message: 'not found'}) } - response.send({identifier: row.identifier, schema: row.schema, version: row.version}); + response.send(dataContract); } } diff --git a/packages/api/src/controllers/DocumentsController.js b/packages/api/src/controllers/DocumentsController.js new file mode 100644 index 000000000..114dd043c --- /dev/null +++ b/packages/api/src/controllers/DocumentsController.js @@ -0,0 +1,30 @@ +const Document = require('../models/Document'); +const DocumentsDAO = require('../dao/DocumentsDAO'); + +class DocumentsController { + constructor(knex) { + this.documentsDAO = new DocumentsDAO(knex) + } + + getDocumentByIdentifier = async (request, response) => { + const {identifier} = request.params + + const document = await this.documentsDAO.getDocumentByIdentifier(identifier) + + if (!document) { + response.status(404).send({message: 'not found'}) + } + + response.send(Document.fromRow(document)); + } + + getDocumentsByDataContract = async (request, response) => { + const {identifier} = request.params + + const documents = await this.documentsDAO.getDocumentsByDataContract(identifier) + + response.send(documents); + } +} + +module.exports = DocumentsController diff --git a/packages/api/src/controllers/MainController.js b/packages/api/src/controllers/MainController.js index 9860023e8..2a978c88c 100644 --- a/packages/api/src/controllers/MainController.js +++ b/packages/api/src/controllers/MainController.js @@ -1,21 +1,25 @@ +const BlocksDAO = require('../dao/BlocksDAO') +const DataContractsDAO = require('../dao/DataContractsDAO') +const TransactionsDAO = require('../dao/TransactionsDAO') + class MainController { constructor(knex) { - this.knex = knex; + this.blocksDAO = new BlocksDAO(knex) + this.dataContractsDAO = new DataContractsDAO(knex) + this.transactionsDAO = new TransactionsDAO(knex) } - getStatus = async (request, response) => { - const [result] = await this.knex('blocks').max('height') - - const {max} = result - - response.send({ - network: "dash-testnet-25", - appVersion: "1", - p2pVersion: "8", - blockVersion: "13", - blocksCount: max, - tenderdashVersion: "0.13.1" - }); + getStatus = async (request, response) => { + const max = await this.blocksDAO.getMaxHeight() + + response.send({ + network: "dash-testnet-25", + appVersion: "1", + p2pVersion: "8", + blockVersion: "13", + blocksCount: max, + tenderdashVersion: "0.13.1" + }); } search = async (request, response) => { @@ -27,42 +31,38 @@ class MainController { } if (/^[0-9]$/.test(query)) { - // search blocks by height - const [row] = await this.knex('blocks').select('hash', 'block_height').where('block_height', query) - - if (row) { - const {hash, block_height} = row + // search block by height + const block = await this.blocksDAO.getBlockByHeight(query) - return response.send({hash: hash, height: block_height}) + if (block) { + return response.send({block}) } } - // check if base64 and 44 length for Identity ids - if (/^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(query) && query.length === 44) { - // search blocks by height - const [row] = await this.knex('data_contracts').select('identifier').where('identifier', query) + if (/^[0-9A-F]{64,64}$/.test(query)) { + // search block by hash + const block = await this.blocksDAO.getBlockByHash(query) - if (row) { - return response.send({identifier: row.identifier}) + if (block) { + return response.send({block}) } - } - // search blocks - const [row] = await this.knex('blocks').select('hash', 'block_height').where('hash', query) + // search transactions + const transaction = await this.transactionsDAO.getTransactionByHash(query) - if (row) { - const {hash, block_height} = row - - return response.send({hash: hash, height: block_height}) + if (transaction) { + return response.send({transaction}) + } } - // search transactions - const [stRow] = await this.knex('state_transitions').select('hash').where('hash', query) - - if (stRow) { - const {hash} = stRow + // check if base64 and 44 length for Identity ids + if (/^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(query) && query.length === 44) { + // search block by height + const dataContract = await this.dataContractsDAO.getDataContractByIdentifier(query) - return response.send({hash}) + if (dataContract) { + return response.send({dataContract}) + } } response.status(404).send({message: 'not found'}) diff --git a/packages/api/src/controllers/TransactionsController.js b/packages/api/src/controllers/TransactionsController.js new file mode 100644 index 000000000..c237272a2 --- /dev/null +++ b/packages/api/src/controllers/TransactionsController.js @@ -0,0 +1,48 @@ +const cache = require("../cache"); +const Transaction = require("../models/Transaction"); +const TransactionsDAO = require("../dao/TransactionsDAO"); + +class TransactionsController { + constructor(client, knex) { + this.client = client + this.transactionsDAO = new TransactionsDAO(knex) + } + + getTransactions = async (request, response) => { + const {from, to} = request.query + + const transactions = await this.transactionsDAO.getTransactions(from, to) + + response.send(transactions); + } + + getTransactionByHash = async (request, reply) => { + const {txHash} = request.params; + + const transaction = await this.transactionsDAO.getTransactionByHash(txHash) + + if (!transaction) { + return reply.status(404).send({message: 'not found'}) + } + + reply.send(transaction) + } + + decode = async (request, reply) => { + const {base64} = request.body; + + const cached = cache.get('decoded_' + base64) + + if (cached) { + return reply.send(cached) + } + + const stateTransition = await this.client.platform.dpp.stateTransition.createFromBuffer(Buffer.from(base64, 'base64')); + + cache.set('decoded_' + base64, stateTransition) + + reply.send(stateTransition) + } +} + +module.exports = TransactionsController diff --git a/packages/api/src/dao/BlocksDAO.js b/packages/api/src/dao/BlocksDAO.js new file mode 100644 index 000000000..e08224615 --- /dev/null +++ b/packages/api/src/dao/BlocksDAO.js @@ -0,0 +1,104 @@ +const Block = require("../models/Block"); + +module.exports = class BlockDAO { + constructor(knex) { + this.knex = knex; + } + + getMaxHeight = async () => { + const [result] = await this.knex('blocks').max('height') + + const {max} = result + + return max + } + + getBlockByHash = async (blockHash) => { + const results = await this.knex + .select('blocks.hash as hash', 'state_transitions.hash as st_hash', 'blocks.height as height', 'blocks.timestamp as timestamp', 'blocks.block_version as block_version', 'blocks.app_version as app_version', 'blocks.l1_locked_height as l1_locked_height') + .from('blocks') + .leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash') + .where('blocks.hash', blockHash); + + const [block] = results + + if (!block) { + return null + } + + const txs = results.reduce((acc, value) => { + if (value.st_hash) { + return [...acc, value.st_hash] + } + + return acc + }, []) + + + return Block.fromRow({header: block, txs}); + } + + getBlockByHeight = async (height) => { + const results = await this.knex + .select('blocks.hash as hash', 'state_transitions.hash as st_hash', 'blocks.height as height', 'blocks.timestamp as timestamp', 'blocks.block_version as block_version', 'blocks.app_version as app_version', 'blocks.l1_locked_height as l1_locked_height') + .from('blocks') + .leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash') + .where('blocks.height', height); + + const [block] = results + + if (!block) { + return null + } + + const txs = results.reduce((acc, value) => { + if (value.st_hash) { + return [...acc, value.st_hash] + } + + return acc + }, []) + + if (!block.length) { + return null + } + + return Block.fromRow({header: block, txs}); + } + + getBlocksPaginated = async (from, to, order = 'desc') => { + const subquery = this.knex('blocks') + .select('blocks.hash as hash', 'blocks.height as height', 'blocks.timestamp as timestamp', 'blocks.block_version as block_version', 'blocks.app_version as app_version', 'blocks.l1_locked_height as l1_locked_height').as('blocks') + .where(function () { + if (from && to) { + this.where('height', '>=', from) + this.where('height', '<=', to) + } + }) + .limit(30) + .orderBy('blocks.height', order); + + const rows = await this.knex(subquery) + .select('blocks.hash as hash', 'height', 'timestamp', 'block_version', 'app_version', 'l1_locked_height', 'state_transitions.hash as st_hash') + .leftJoin('state_transitions', 'state_transitions.block_hash', 'blocks.hash') + .groupBy('blocks.hash', 'height', 'blocks.timestamp', 'block_version', 'app_version', 'l1_locked_height', 'state_transitions.hash') + .orderBy('height', 'desc') + + // map-reduce Blocks with Transactions + const blocksMap = rows.reduce((blocks, row) => { + const block = blocks[row.hash] + const {st_hash} = row + const txs = block?.txs || [] + + if (st_hash) { + txs.push(st_hash) + } + + return {...blocks, [row.hash]: {...row, txs}} + }, {}) + + return Object.keys(blocksMap).map(blockHash => Block.fromRow({ + header: blocksMap[blockHash], txs: blocksMap[blockHash].txs + })) + } +} diff --git a/packages/api/src/dao/DataContractsDAO.js b/packages/api/src/dao/DataContractsDAO.js new file mode 100644 index 000000000..e658fa730 --- /dev/null +++ b/packages/api/src/dao/DataContractsDAO.js @@ -0,0 +1,35 @@ +const DataContract = require("../models/DataContract"); + +module.exports = class DataContractsDAO { + constructor(knex) { + this.knex = knex; + } + + getDataContracts = async () => { + const subquery = this.knex('data_contracts') + .select(this.knex.raw('data_contracts.id as id, data_contracts.identifier as identifier, data_contracts.version as version, rank() over (partition by identifier order by version desc) rank')) + .as('data_contracts') + + const rows = await this.knex(subquery) + .select('id', 'identifier', 'version', 'rank') + .where('rank', '=', 1) + + return rows.map(dataContract => DataContract.fromRow(dataContract)); + } + + getDataContractByIdentifier = async (identifier) => { + const rows = await this.knex('data_contracts') + .select('data_contracts.identifier as identifier', 'data_contracts.schema as schema', 'data_contracts.version as version') + .where('data_contracts.identifier', identifier) + .orderBy('id', 'desc') + .limit(1); + + const [row] = rows + + if (!row) { + return null + } + + return DataContract.fromRow(row) + } +} diff --git a/packages/api/src/dao/DocumentsDAO.js b/packages/api/src/dao/DocumentsDAO.js new file mode 100644 index 000000000..a0a86a7f9 --- /dev/null +++ b/packages/api/src/dao/DocumentsDAO.js @@ -0,0 +1,41 @@ +const Document = require("../models/Document"); + +module.exports = class DocumentsDAO { + constructor(knex) { + this.knex = knex; + } + + getDocumentByIdentifier = async (identifier) => { + const subquery = this.knex('documents') + .select(this.knex.raw('documents.id as id, documents.identifier as identifier, data_contracts.identifier as data_contract_identifier, documents.data as data, documents.revision as revision, documents.state_transition_hash as state_transition_hash, documents.deleted as deleted, rank() over (partition by documents.identifier order by documents.id desc) rank')) + .leftJoin('data_contracts', 'data_contracts.id', 'documents.data_contract_id') + .where('documents.identifier', '=', identifier) + .as('documents') + + const rows = await this.knex(subquery) + .select('id', 'identifier', 'data_contract_identifier', 'data', 'revision', 'deleted', 'rank', 'state_transition_hash') + .limit(1); + + const [row] = rows + + if (!row) { + return null + } + + return Document.fromRow(row); + } + + getDocumentsByDataContract = async (identifier) => { + const subquery = this.knex('documents') + .select(this.knex.raw('documents.id as id, documents.identifier as identifier, data_contracts.identifier as data_contract_identifier, documents.data as data, documents.revision as revision, documents.state_transition_hash as state_transition_hash, documents.deleted as deleted, rank() over (partition by documents.identifier order by documents.id desc) rank')) + .leftJoin('data_contracts', 'data_contracts.id', 'documents.data_contract_id') + .where('data_contracts.identifier', identifier) + .as('documents') + + const rows = await this.knex(subquery) + .select('id', 'identifier', 'data_contract_identifier', 'data', 'revision', 'deleted', 'rank', 'state_transition_hash') + .orderBy('documents.id', 'asc') + + return rows.map((row) => Document.fromRow(row)); + } +} diff --git a/packages/api/src/controllers/TransactionController.js b/packages/api/src/dao/TransactionsDAO.js similarity index 50% rename from packages/api/src/controllers/TransactionController.js rename to packages/api/src/dao/TransactionsDAO.js index bd2df4a89..a078ca1e5 100644 --- a/packages/api/src/controllers/TransactionController.js +++ b/packages/api/src/dao/TransactionsDAO.js @@ -1,15 +1,25 @@ -const cache = require("../cache"); const Transaction = require("../models/Transaction"); -class TransactionController { - constructor(client, knex) { - this.client = client - this.knex = knex +module.exports = class TransactionsDAO { + constructor(knex) { + this.knex = knex; } - getTransactions = async (request, response) => { - const {from, to} = request.query + getTransactionByHash = async (hash) => { + const [row] = await this.knex('state_transitions') + .select('state_transitions.hash as hash', 'state_transitions.data as data', 'state_transitions.type as type', + 'state_transitions.index as index', 'blocks.height as block_height', 'blocks.timestamp as timestamp') + .where('state_transitions.hash', hash) + .leftJoin('blocks', 'blocks.hash', 'state_transitions.block_hash') + + if (!row) { + return null + } + + return Transaction.fromRow(row) + } + getTransactions = async (from, to) => { const rows = await this.knex .select('state_transitions.hash as hash', 'state_transitions.data as data', 'state_transitions.type as type', 'state_transitions.index as index', 'blocks.height as block_height', 'blocks.timestamp as timestamp') @@ -24,42 +34,6 @@ class TransactionController { .limit(30) .orderBy('blocks.height', 'desc') - const transactions = rows.map((row) => Transaction.fromJSON(row)) - - response.send(transactions); - } - - getTransactionByHash = async (request, reply) => { - const {txHash} = request.params; - - const [row] = await this.knex('state_transitions') - .select('state_transitions.hash as hash', 'state_transitions.data as data', 'state_transitions.type as type', - 'state_transitions.index as index', 'blocks.height as block_height', 'blocks.timestamp as timestamp') - .where('state_transitions.hash', txHash) - .leftJoin('blocks', 'blocks.hash', 'state_transitions.block_hash') - - if (row) { - return reply.send(Transaction.fromJSON(row)) - } - - reply.status(404).send({message: 'not found'}) - } - - decode = async (request, reply) => { - const {base64} = request.body; - - const cached = cache.get('decoded_' + base64) - - if (cached) { - return reply.send(cached) - } - - const stateTransition = await this.client.platform.dpp.stateTransition.createFromBuffer(Buffer.from(base64, 'base64')); - - cache.set('decoded_' + base64, stateTransition) - - reply.send(stateTransition) + return rows.map((row) => Transaction.fromRow(row)) } } - -module.exports = TransactionController diff --git a/packages/api/src/models/Block.js b/packages/api/src/models/Block.js index fca565be4..267b62004 100644 --- a/packages/api/src/models/Block.js +++ b/packages/api/src/models/Block.js @@ -9,7 +9,7 @@ module.exports = class Block { this.txs = txs; } - static fromJSON({header, txs}) { - return new Block(BlockHeader.fromJSON(header), txs) + static fromRow({header, txs}) { + return new Block(BlockHeader.fromRow(header), txs) } } diff --git a/packages/api/src/models/BlockHeader.js b/packages/api/src/models/BlockHeader.js index b36b92e2f..31b91d497 100644 --- a/packages/api/src/models/BlockHeader.js +++ b/packages/api/src/models/BlockHeader.js @@ -15,7 +15,7 @@ module.exports = class BlockHeader { this.l1LockedHeight = l1LockedHeight; } - static fromJSON({hash, height, timestamp, block_version, app_version, l1_locked_height}) { + static fromRow({hash, height, timestamp, block_version, app_version, l1_locked_height}) { return new BlockHeader(hash, height, new Date(timestamp), block_version, app_version, l1_locked_height) } } diff --git a/packages/api/src/models/DataContract.js b/packages/api/src/models/DataContract.js index 9ce65633e..34aa3b267 100644 --- a/packages/api/src/models/DataContract.js +++ b/packages/api/src/models/DataContract.js @@ -1,13 +1,15 @@ module.exports = class DataContract { identifier + schema version - constructor(identifier, version) { + constructor(identifier, schema, version) { this.identifier = identifier; + this.schema = schema; this.version = version; } - static fromJSON({identifier, version}) { - return new DataContract(identifier, version) + static fromRow({identifier, schema, version}) { + return new DataContract(identifier, schema, version) } } diff --git a/packages/api/src/models/Document.js b/packages/api/src/models/Document.js new file mode 100644 index 000000000..fe2068677 --- /dev/null +++ b/packages/api/src/models/Document.js @@ -0,0 +1,21 @@ +module.exports = class Document { + identifier + dataContractIdentifier + revision + stateTransitionHash + deleted + data + + constructor(identifier, dataContractIdentifier, revision, stateTransitionHash, deleted, data) { + this.identifier = identifier; + this.dataContractIdentifier = dataContractIdentifier; + this.revision = revision; + this.stateTransitionHash = stateTransitionHash; + this.deleted = deleted; + this.data = data; + } + + static fromRow({identifier, data_contract_identifier, revision, state_transition_hash, deleted, data}) { + return new Document(identifier, data_contract_identifier, revision, state_transition_hash, deleted, data) + } +} diff --git a/packages/api/src/models/Transaction.js b/packages/api/src/models/Transaction.js index 7aa268670..559299405 100644 --- a/packages/api/src/models/Transaction.js +++ b/packages/api/src/models/Transaction.js @@ -15,7 +15,7 @@ module.exports = class Transaction { this.timestamp = timestamp; } - static fromJSON({hash, index, block_height, type, data, timestamp}) { + static fromRow({hash, index, block_height, type, data, timestamp}) { return new Transaction(hash, index, block_height, type, data, timestamp) } } diff --git a/packages/api/src/routes.js b/packages/api/src/routes.js index e67408ea6..8a10afe25 100644 --- a/packages/api/src/routes.js +++ b/packages/api/src/routes.js @@ -2,10 +2,10 @@ * * @param fastify {Fastify} * @param mainController {MainController} - * @param blockController {BlockController} - * @param transactionController {TransactionController} + * @param blockController {BlocksController} + * @param transactionsController {TransactionsController} */ -module.exports = ({fastify, mainController, blockController, transactionController, dataContractsController}) => { +module.exports = ({fastify, mainController, blocksController, transactionsController, dataContractsController, documentsController}) => { const routes = [ { path: '/status', @@ -15,22 +15,22 @@ module.exports = ({fastify, mainController, blockController, transactionControll { path: '/block/:hash', method: 'GET', - handler: blockController.getBlockByHash + handler: blocksController.getBlockByHash }, { path: '/blocks', method: 'GET', - handler: blockController.getBlocks + handler: blocksController.getBlocks }, { path: '/transactions', method: 'GET', - handler: transactionController.getTransactions + handler: transactionsController.getTransactions }, { path: '/transaction/:txHash', method: 'GET', - handler: transactionController.getTransactionByHash + handler: transactionsController.getTransactionByHash }, { path: '/dataContracts', @@ -42,6 +42,16 @@ module.exports = ({fastify, mainController, blockController, transactionControll method: 'GET', handler: dataContractsController.getDataContractByIdentifier }, + { + path: '/dataContract/:identifier/documents', + method: 'GET', + handler: documentsController.getDocumentsByDataContract + }, + { + path: '/document/:identifier', + method: 'GET', + handler: documentsController.getDocumentByIdentifier + }, { path: '/search', method: 'GET', @@ -50,7 +60,7 @@ module.exports = ({fastify, mainController, blockController, transactionControll { path: '/transaction/decode', method: 'POST', - handler: transactionController.decode + handler: transactionsController.decode }, ] diff --git a/packages/frontend/src/util/Api.js b/packages/frontend/src/util/Api.js index 8a920721b..f2097ab9a 100644 --- a/packages/frontend/src/util/Api.js +++ b/packages/frontend/src/util/Api.js @@ -70,6 +70,14 @@ const getDataContracts = () => { return call(`dataContracts`, 'GET') } +const getDocumentByIdentifier = (identifier) => { + return call(`document/${identifier}`, 'GET') +} + +const getDocumentsByDataContract = (dataContractIdentifier) => { + return call(`dataContracts/${dataContractIdentifier}/documents`, 'GET') +} + const getStatus = () => { return call(`status`, 'GET') } @@ -82,4 +90,4 @@ const decodeTx = (base64) => { return call(`transaction/decode`, 'POST', {base64}) } -export {getStatus, getBlocks, getBlockByHash, getTransactions, getTransaction, search, decodeTx, getDataContractByIdentifier, getDataContracts} +export {getStatus, getBlocks, getBlockByHash, getTransactions, getTransaction, search, decodeTx, getDocumentsByDataContract, getDocumentByIdentifier, getDataContractByIdentifier, getDataContracts} diff --git a/packages/indexer/migrations/V13__add_documents.sql b/packages/indexer/migrations/V13__add_documents.sql new file mode 100644 index 000000000..1d75fee94 --- /dev/null +++ b/packages/indexer/migrations/V13__add_documents.sql @@ -0,0 +1,9 @@ +CREATE TABLE documents ( + id SERIAL PRIMARY KEY, + identifier char(44) NOT NULL, + revision int NOT NULL, + "data" jsonb DEFAULT NULL, + deleted boolean NOT NULL, + state_transition_hash char(64) NOT NULL references state_transitions(hash), + data_contract_id int NOT NULL references data_contracts(id) +); diff --git a/packages/indexer/src/entities/data_contract.rs b/packages/indexer/src/entities/data_contract.rs index 4304fe55c..df77615d4 100644 --- a/packages/indexer/src/entities/data_contract.rs +++ b/packages/indexer/src/entities/data_contract.rs @@ -3,19 +3,21 @@ use std::time::SystemTime; use chrono::{DateTime, Utc}; use dpp::data_contract::DocumentName; use dpp::data_contract::serialized_version::DataContractInSerializationFormat; +use dpp::data_contracts::SystemDataContract; use dpp::identifier::Identifier; use dpp::platform_value::string_encoding::Encoding; -use dpp::platform_value::Value; use dpp::state_transition::data_contract_create_transition::DataContractCreateTransition; use dpp::state_transition::data_contract_update_transition::accessors::DataContractUpdateTransitionAccessorsV0; use dpp::state_transition::data_contract_update_transition::DataContractUpdateTransition; use dpp::state_transition::StateTransition; +use serde_json::Value; use tokio_postgres::Row; #[derive(Clone)] pub struct DataContract { + pub id: Option, pub identifier: Identifier, - pub schema: BTreeMap, + pub schema: Option, pub version: u32, } @@ -31,8 +33,9 @@ impl From for DataContract { let id = data_contract.id; let version = data_contract.version; let schema = data_contract.document_schemas; + let schema_decoded = serde_json::to_value(schema).unwrap(); - return DataContract{ identifier: id, schema, version }; + return DataContract{ id: None, identifier: id, schema: Some(schema_decoded), version }; } } } @@ -52,8 +55,9 @@ impl From for DataContract { let id = data_contract.id; let version = data_contract.version; let schema = data_contract.document_schemas; + let schema_decoded = serde_json::to_value(schema).unwrap(); - return DataContract{ identifier: id, schema, version }; + return DataContract{ id: None, identifier: id, schema: Some(schema_decoded), version }; } } } @@ -61,3 +65,39 @@ impl From for DataContract { } } +impl From for DataContract { + fn from(data_contract: SystemDataContract) -> Self { + let identifier = data_contract.id(); + let source = data_contract.source().unwrap(); + let schema = source.document_schemas; + let schema_decoded = serde_json::to_value(schema).unwrap(); + + return DataContract { + id: None, + identifier, + schema: Some(schema_decoded), + version: 0 + } + } +} + + +impl From for DataContract { + fn from(row: Row) -> Self { + let id: i32 = row.get(0); + + let identifier_str: String = row.get(1); + let identifier = Identifier::from_string(&identifier_str, Encoding::Base58).unwrap(); + + let version:i32 = row.get(2); + + return DataContract{ + id: Some(id as u32), + identifier, + schema: None, + version: version as u32 + } + } +} + + diff --git a/packages/indexer/src/entities/document.rs b/packages/indexer/src/entities/document.rs new file mode 100644 index 000000000..7e46509e6 --- /dev/null +++ b/packages/indexer/src/entities/document.rs @@ -0,0 +1,80 @@ +use std::collections::BTreeMap; +use std::iter::Rev; +use std::time::SystemTime; +use chrono::{DateTime, Utc}; +use dpp::identifier::Identifier; +use dpp::platform_value::string_encoding::Encoding; +use dpp::prelude::Revision; +use dpp::state_transition::documents_batch_transition::document_base_transition::v0::v0_methods::DocumentBaseTransitionV0Methods; +use dpp::state_transition::documents_batch_transition::document_create_transition::v0::v0_methods::DocumentCreateTransitionV0Methods; +use dpp::state_transition::documents_batch_transition::document_delete_transition::v0::v0_methods::DocumentDeleteTransitionV0Methods; +use dpp::state_transition::documents_batch_transition::document_replace_transition::v0::v0_methods::DocumentReplaceTransitionV0Methods; +use dpp::state_transition::documents_batch_transition::document_transition::DocumentTransition; +use serde_json::Value; +use tokio_postgres::Row; + +#[derive(Clone)] +pub struct Document { + pub id: Option, + pub identifier: Identifier, + pub data_contract_identifier: Identifier, + pub data: Option, + pub deleted: bool, + pub revision: Revision, +} + +impl From for Document { + fn from(row: Row) -> Self { + let id: i32 = row.get(0); + + let identifier_str: String = row.get(1); + let identifier = Identifier::from_string(&identifier_str, Encoding::Base58).unwrap(); + + let data_contract_identifier_str: String = row.get(2); + let data_contract_identifier = Identifier::from_string(&data_contract_identifier_str, Encoding::Base58).unwrap(); + + let revision: i32 = row.get(3); + + let deleted: bool = row.get(4); + + return Document { id: Some(id as u32), deleted, identifier, data: None, data_contract_identifier, revision: Revision::from(revision as u64) }; + } +} + +impl From for Document { + fn from(value: DocumentTransition) -> Self { + match value { + DocumentTransition::Create(transition) => { + let base = transition.base().clone(); + let data = transition.data().clone(); + let data_decoded = serde_json::to_value(data).unwrap(); + let identifier = base.id(); + let data_contract_identifier = base.data_contract_id(); + let revision: Revision = Revision::from(0 as u64); + + return Document { id: None, identifier, data: Some(data_decoded), data_contract_identifier, revision, deleted: false }; + } + DocumentTransition::Replace(transition) => { + let base = transition.base().clone(); + let data = transition.data().clone(); + let data_decoded = serde_json::to_value(data).unwrap(); + let identifier = base.id(); + let data_contract_identifier = base.data_contract_id(); + let revision = transition.revision(); + + return Document { id: None, identifier, data: Some(data_decoded), data_contract_identifier, revision, deleted: false }; + + } + DocumentTransition::Delete(transition) => { + let base = transition.base().clone(); + let identifier = base.id(); + let data_contract_identifier = base.data_contract_id(); + let revision: Revision = Revision::from(0 as u64); + + return Document { id: None, identifier, data: None, data_contract_identifier, revision, deleted: true }; + } + } + + } +} + diff --git a/packages/indexer/src/entities/mod.rs b/packages/indexer/src/entities/mod.rs index 72b250d26..a4f264cca 100644 --- a/packages/indexer/src/entities/mod.rs +++ b/packages/indexer/src/entities/mod.rs @@ -1,3 +1,4 @@ pub mod block_header; pub mod block; pub mod data_contract; +pub mod document; diff --git a/packages/indexer/src/models/mod.rs b/packages/indexer/src/models/mod.rs index 958cc78f8..99529f9dc 100644 --- a/packages/indexer/src/models/mod.rs +++ b/packages/indexer/src/models/mod.rs @@ -1,4 +1,5 @@ use chrono::{DateTime, Utc}; +use dpp::state_transition::documents_batch_transition::{DocumentCreateTransition, DocumentDeleteTransition, DocumentReplaceTransition}; use serde::{Deserialize}; #[derive(Deserialize)] @@ -55,6 +56,12 @@ pub struct BlockWrapper { pub block: TDBlock, } +enum DocumentTransition { + Create(DocumentCreateTransition), + Replace(DocumentReplaceTransition), + Delete(DocumentDeleteTransition), +} + mod from_iso8601 { use chrono::{Utc, DateTime}; use serde::{self, Deserialize, Serializer, Deserializer}; diff --git a/packages/indexer/src/processor/psql/dao/mod.rs b/packages/indexer/src/processor/psql/dao/mod.rs index 0fad682c0..882063a75 100644 --- a/packages/indexer/src/processor/psql/dao/mod.rs +++ b/packages/indexer/src/processor/psql/dao/mod.rs @@ -6,7 +6,13 @@ use dpp::platform_value::string_encoding::Encoding; use dpp::state_transition::data_contract_create_transition::accessors::DataContractCreateTransitionAccessorsV0; use dpp::state_transition::data_contract_create_transition::DataContractCreateTransition; use sha256::{digest}; +use crate::entities::document::Document; use base64::{Engine as _, engine::{general_purpose}}; +use dpp::identifier::Identifier; +use dpp::platform_value::string_encoding::Encoding::{Base58, Base64}; +use dpp::serialization::PlatformSerializable; +use dpp::state_transition::documents_batch_transition::DocumentCreateTransition; +use dpp::state_transition::StateTransition; use crate::entities::block_header::BlockHeader; use crate::entities::data_contract::DataContract; @@ -44,6 +50,8 @@ impl PostgresDAO { let stmt = client.prepare_cached(query).await.unwrap(); client.query(&stmt, &[&hash, &data, &st_type, &index, &block_hash]).await.unwrap(); + + println!("Created ST with hash {} from block with hash {}", &hash, &block_hash); } pub async fn create_data_contract(&self, data_contract: DataContract) { @@ -60,6 +68,31 @@ impl PostgresDAO { let client = self.connection_pool.get().await.unwrap(); let stmt = client.prepare_cached(query).await.unwrap(); client.query(&stmt, &[&id_str, &schema_decoded, &version]).await.unwrap(); + + println!("Created DataContract {} [{} version]", id_str, version); + } + + pub async fn create_document(&self, document: Document, st_hash: String) -> Result<(), PoolError> { + let id = document.identifier; + let revision = document.revision; + let revision_i32 = revision as i32; + + let data = document.data; + + let client = self.connection_pool.get().await.unwrap(); + + let data_contract = self.get_data_contract_by_identifier(document.data_contract_identifier).await.unwrap().unwrap(); + let data_contract_id = data_contract.id.unwrap() as i32; + + let query = "INSERT INTO documents(identifier,revision,data,deleted,state_transition_hash,data_contract_id) VALUES ($1, $2, $3, $4, $5, $6);"; + + let stmt = client.prepare_cached(query).await.unwrap(); + + client.query(&stmt, &[&id.to_string(Encoding::Base58), &revision_i32, &data, &document.deleted, &st_hash, &data_contract_id]).await.unwrap(); + + println!("Created document {} [{} revision] [is_deleted {}]", document.identifier.to_string(Base58), revision_i32, document.deleted); + + Ok(()) } pub async fn get_block_header_by_height(&self, block_height: i32) -> Result, PoolError> { @@ -81,6 +114,40 @@ impl PostgresDAO { return Ok(block.cloned()); } + pub async fn get_data_contract_by_identifier(&self, identifier: Identifier) -> Result, PoolError> { + let client = self.connection_pool.get().await?; + + let stmt = client.prepare_cached("SELECT id,identifier,version FROM data_contracts where identifier = $1 ORDER by version DESC LIMIT 1;").await.unwrap(); + + let rows: Vec = client.query(&stmt, &[&identifier.to_string(Encoding::Base58)]) + .await.unwrap(); + + let blocks: Vec = rows + .into_iter() + .map(|row| { + row.into() + }).collect::>(); + + Ok(blocks.first().cloned()) + } + + pub async fn get_document_by_identifier(&self, identifier: Identifier) -> Result, PoolError> { + let client = self.connection_pool.get().await?; + + let stmt = client.prepare_cached("SELECT id,identifier,revision,deleted FROM documents where identifier = $1 ORDER BY id DESC LIMIT 1;").await.unwrap(); + + let rows: Vec = client.query(&stmt, &[&identifier.to_string(Encoding::Base58)]) + .await.unwrap(); + + let blocks: Vec = rows + .into_iter() + .map(|row| { + row.into() + }).collect::>(); + + Ok(blocks.first().cloned()) + } + pub async fn create_block(&self, block_header: BlockHeader) -> String { let client = self.connection_pool.get().await.unwrap(); diff --git a/packages/indexer/src/processor/psql/mod.rs b/packages/indexer/src/processor/psql/mod.rs index 3d3392869..bca38a859 100644 --- a/packages/indexer/src/processor/psql/mod.rs +++ b/packages/indexer/src/processor/psql/mod.rs @@ -2,19 +2,29 @@ mod dao; use std::num::ParseIntError; use dpp::state_transition::{StateTransition, StateTransitionLike}; -use deadpool_postgres::{ PoolError }; +use deadpool_postgres::{PoolError}; use dpp::state_transition::data_contract_create_transition::DataContractCreateTransition; use crate::processor::psql::dao::PostgresDAO; use base64::{Engine as _, engine::{general_purpose}}; +use dpp::data_contracts::SystemDataContract; +use dpp::platform_value::string_encoding::Encoding; +use dpp::platform_value::string_encoding::Encoding::Base58; use dpp::serialization::PlatformSerializable; +use dpp::state_transition::data_contract_create_transition::accessors::DataContractCreateTransitionAccessorsV0; +use dpp::state_transition::documents_batch_transition::accessors::DocumentsBatchTransitionAccessorsV0; +use dpp::state_transition::documents_batch_transition::{DocumentCreateTransition, DocumentReplaceTransition, DocumentsBatchTransition}; +use dpp::state_transition::documents_batch_transition::document_transition::DocumentTransition; +use sha256::digest; use dpp::state_transition::data_contract_update_transition::DataContractUpdateTransition; use crate::decoder::decoder::StateTransitionDecoder; use crate::entities::block::Block; use crate::entities::data_contract::DataContract; +use crate::entities::document::Document; +use crate::processor::psql::ProcessorError::UnexpectedError; pub enum ProcessorError { DatabaseError, - UnexpectedError + UnexpectedError, } impl From for ProcessorError { @@ -63,7 +73,17 @@ impl PSQLProcessor { self.dao.create_data_contract(data_contract).await; } - pub async fn handle_st(&self, block_hash: String, index: i32,state_transition: StateTransition) -> () { + pub async fn handle_documents_batch(&self, state_transition: DocumentsBatchTransition, st_hash: String) -> () { + let transitions = state_transition.transitions().clone(); + + for (i, document_transition) in transitions.iter().enumerate() { + let document = Document::from(document_transition.clone()); + + self.dao.create_document(document, st_hash.clone()).await; + } + } + + pub async fn handle_st(&self, block_hash: String, index: i32, state_transition: StateTransition) -> () { let mut st_type: i32 = 999; let mut bytes: Vec = Vec::new(); @@ -73,6 +93,7 @@ impl PSQLProcessor { bytes = PlatformSerializable::serialize_to_bytes(&StateTransition::DataContractCreate( st.clone() )).unwrap(); + self.dao.create_state_transition(block_hash.clone(), st_type, index, bytes).await; self.handle_data_contract_create(st).await; @@ -83,6 +104,7 @@ impl PSQLProcessor { bytes = PlatformSerializable::serialize_to_bytes(&StateTransition::DataContractUpdate( st.clone() )).unwrap(); + self.dao.create_state_transition(block_hash.clone(), st_type, index, bytes).await; self.handle_data_contract_update(st).await; @@ -93,6 +115,14 @@ impl PSQLProcessor { bytes = PlatformSerializable::serialize_to_bytes(&StateTransition::DocumentsBatch( st.clone() )).unwrap(); + + let st_hash = digest(bytes.clone()).to_uppercase(); + + self.dao.create_state_transition(block_hash.clone(), st_type, index, bytes.clone()).await; + + self.handle_documents_batch(st, st_hash).await; + + println!("Processed DocumentsBatch at block hash {}", block_hash); } StateTransition::IdentityCreate(st) => { st_type = st.state_transition_type() as i32; @@ -125,8 +155,6 @@ impl PSQLProcessor { )).unwrap(); } } - - self.dao.create_state_transition(block_hash, st_type, index, bytes).await; } pub async fn handle_block(&self, block: Block) -> Result<(), ProcessorError> { @@ -135,15 +163,19 @@ impl PSQLProcessor { match processed { None => { // TODO IMPLEMENT PSQL TRANSACTION - let block_height = block.header.height.clone(); + if (block.header.height == 1) { + self.handle_init_chain().await; + } + let block_hash = self.dao.create_block(block.header).await; if block.txs.len() as i32 == 0 { println!("No platform transactions at block height {}", block_height.clone()); } + println!("Processing block at height {}", block_height.clone()); for (i, tx_base_64) in block.txs.iter().enumerate() { let bytes = general_purpose::STANDARD.decode(tx_base_64).unwrap(); let st_result = self.decoder.decode(bytes).await; @@ -155,10 +187,43 @@ impl PSQLProcessor { Ok(()) } - Some(st) => { + Some(_) => { println!("Block at the height {} has been already processed", &block.header.height); Ok(()) } } } + + + pub async fn handle_init_chain(&self) -> () { + println!("Processing initChain"); + + let mut system_contract; + let mut data_contract; + + system_contract = SystemDataContract::Withdrawals; + data_contract = DataContract::from(system_contract); + println!("Processing SystemDataContract::Withdrawals {}", data_contract.identifier.to_string(Base58)); + self.dao.create_data_contract(data_contract).await; + + system_contract = SystemDataContract::MasternodeRewards; + data_contract = DataContract::from(system_contract); + println!("Processing SystemDataContract::MasternodeRewards {}", data_contract.identifier.to_string(Base58)); + self.dao.create_data_contract(data_contract).await; + + system_contract = SystemDataContract::FeatureFlags; + data_contract = DataContract::from(system_contract); + println!("Processing SystemDataContract::FeatureFlags {}", data_contract.identifier.to_string(Base58)); + self.dao.create_data_contract(data_contract).await; + + system_contract = SystemDataContract::DPNS; + data_contract = DataContract::from(system_contract); + println!("Processing SystemDataContract::DPNS {}", data_contract.identifier.to_string(Base58)); + self.dao.create_data_contract(data_contract).await; + + system_contract = SystemDataContract::Dashpay; + data_contract = DataContract::from(system_contract); + println!("Processing SystemDataContract::Dashpay {}", data_contract.identifier.to_string(Base58)); + self.dao.create_data_contract(data_contract).await; + } }