Skip to content

Commit

Permalink
added provenance pulse web app endpoints and db cache
Browse files Browse the repository at this point in the history
  • Loading branch information
jdfigure committed Feb 14, 2025
1 parent 03df665 commit ed04c2a
Show file tree
Hide file tree
Showing 21 changed files with 1,218 additions and 30 deletions.
12 changes: 12 additions & 0 deletions database/src/main/resources/db/migration/V1_98__Pulse_cache.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
CREATE TABLE IF NOT EXISTS pulse_cache
(
id SERIAL PRIMARY KEY,
cache_date DATE NOT NULL,
updated_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
type TEXT NOT NULL,
data JSONB NOT NULL
);

CREATE INDEX IF NOT EXISTS idx_pulse_cache_date ON pulse_cache (cache_date);
CREATE INDEX IF NOT EXISTS idx_pulse_cache_type ON pulse_cache (type);
CREATE UNIQUE INDEX IF NOT EXISTS idx_pulse_cache_date_type ON pulse_cache (cache_date, type);
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
ALTER TABLE pulse_cache
ADD COLUMN subtype TEXT;

CREATE INDEX IF NOT EXISTS idx_pulse_cache_subtype ON pulse_cache (subtype);
DROP INDEX IF EXISTS idx_pulse_cache_date_type;
CREATE UNIQUE INDEX IF NOT EXISTS idx_pulse_cache_date_type_subtype ON pulse_cache (cache_date, type, subtype);
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@ import java.util.concurrent.TimeUnit

@Configuration
class CacheConfig {

@Bean
fun cacheManager() =
CaffeineCacheManager("responses").apply {
setCaffeine(caffieneConfig())
setCaffeine(caffeineConfig())
}

fun caffieneConfig() =
fun caffeineConfig() =
com.github.benmanes.caffeine.cache.Caffeine.newBuilder()
.expireAfterWrite(30, TimeUnit.SECONDS)
.maximumSize(100)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.jetbrains.exposed.dao.id.EntityID
import org.jetbrains.exposed.dao.id.IdTable
import org.jetbrains.exposed.dao.id.IntIdTable
import org.jetbrains.exposed.sql.Op
import org.jetbrains.exposed.sql.and
import org.jetbrains.exposed.sql.insert
import org.jetbrains.exposed.sql.insertAndGetId
import org.jetbrains.exposed.sql.insertIgnore
Expand Down Expand Up @@ -69,6 +70,13 @@ class AccountRecord(id: EntityID<Int>) : IntEntity(id) {
AccountRecord.find { AccountTable.type inList types }.toList()
}

fun countActiveAccounts() = transaction {
AccountRecord.find {
(AccountTable.isContract eq Op.FALSE) and
(AccountTable.baseAccount.isNotNull()) and
(AccountTable.type eq "BaseAccount")
}.count()
}
fun findContractAccounts() = transaction {
AccountRecord.find { AccountTable.isContract eq Op.TRUE }.toList()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ package io.provenance.explorer.domain.entities

import io.provenance.explorer.OBJECT_MAPPER
import io.provenance.explorer.domain.core.sql.jsonb
import io.provenance.explorer.domain.models.explorer.pulse.PulseCacheType
import io.provenance.explorer.domain.models.explorer.pulse.PulseMetric
import io.provenance.explorer.model.ChainAum
import io.provenance.explorer.model.ChainMarketRate
import io.provenance.explorer.model.CmcHistoricalQuote
Expand All @@ -14,6 +16,7 @@ import org.jetbrains.exposed.dao.IntEntityClass
import org.jetbrains.exposed.dao.id.EntityID
import org.jetbrains.exposed.dao.id.IdTable
import org.jetbrains.exposed.dao.id.IntIdTable
import org.jetbrains.exposed.sql.Column
import org.jetbrains.exposed.sql.SortOrder
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
import org.jetbrains.exposed.sql.and
Expand Down Expand Up @@ -278,7 +281,7 @@ class ProcessQueueRecord(id: EntityID<Int>) : IntEntity(id) {
fun findByType(processType: ProcessQueueType) = transaction {
ProcessQueueRecord.find {
(ProcessQueueTable.processType eq processType.name) and
(ProcessQueueTable.processing eq false)
(ProcessQueueTable.processing eq false)
}.toList()
}

Expand All @@ -291,19 +294,63 @@ class ProcessQueueRecord(id: EntityID<Int>) : IntEntity(id) {
fun delete(processType: ProcessQueueType, value: String) = transaction {
ProcessQueueTable.deleteWhere {
(ProcessQueueTable.processType eq processType.name) and
(processValue eq value)
(processValue eq value)
}
}

fun insertIgnore(processType: ProcessQueueType, processValue: String) = transaction {
ProcessQueueTable.insertIgnore {
it[this.processType] = processType.name
it[this.processValue] = processValue
fun insertIgnore(processType: ProcessQueueType, processValue: String) =
transaction {
ProcessQueueTable.insertIgnore {
it[this.processType] = processType.name
it[this.processValue] = processValue
}
}
}
}

var processType by ProcessQueueTable.processType
var processValue by ProcessQueueTable.processValue
var processing by ProcessQueueTable.processing
}

object PulseCacheTable : IntIdTable(name = "pulse_cache") {
val cacheDate = date("cache_date")
val updatedTimestamp = datetime("updated_timestamp")
val data = jsonb<PulseCacheTable, PulseMetric>("data", OBJECT_MAPPER)
val type: Column<PulseCacheType> = enumerationByName("type", 128, PulseCacheType::class)
val subtype = text("subtype").nullable()
}

class PulseCacheRecord(id: EntityID<Int>) : IntEntity(id) {
companion object : IntEntityClass<PulseCacheRecord>(
PulseCacheTable
) {

fun upsert(date: LocalDate, type: PulseCacheType, data: PulseMetric, subtype: String? = null) = transaction {
findByDateAndType(date, type, subtype)?.apply {
this.data = data
this.updatedTimestamp = LocalDateTime.now()
} ?:
PulseCacheTable.insertIgnore {
it[this.cacheDate] = date
it[this.updatedTimestamp] = LocalDateTime.now()
it[this.type] = type
it[this.subtype] = subtype
it[this.data] = data
}
}

fun findByDateAndType(date: LocalDate, type: PulseCacheType, subtype: String? = null) =
transaction {
PulseCacheRecord.find {
(PulseCacheTable.cacheDate eq date) and
(PulseCacheTable.type eq type) and
(if (subtype != null) PulseCacheTable.subtype eq subtype else PulseCacheTable.subtype.isNull())
}.firstOrNull()
}
}

var type by PulseCacheTable.type
var data by PulseCacheTable.data
var updatedTimestamp by PulseCacheTable.updatedTimestamp
var subtype by PulseCacheTable.subtype
}
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ class NavEventsRecord(id: EntityID<Int>) : IntEntity(id) {
scopeId: String? = null,
fromDate: LocalDateTime? = null,
toDate: LocalDateTime? = null,
priceDenoms: List<String>? = null
priceDenoms: List<String>? = null,
source: String? = null
) = transaction {
var query = """
SELECT block_height, block_time, tx_hash, event_order,
Expand Down Expand Up @@ -106,6 +107,11 @@ class NavEventsRecord(id: EntityID<Int>) : IntEntity(id) {
}
}

source?.let {
query += " AND source = ?"
args.add(Pair(VarCharColumnType(), it))
}

query += " ORDER BY block_height DESC, event_order DESC"

query.execAndMap(args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@ import io.provenance.explorer.config.ExplorerProperties
import io.provenance.explorer.domain.core.logger
import io.provenance.explorer.domain.core.sql.jsonb
import io.provenance.explorer.domain.core.sql.toProcedureObject
import io.provenance.explorer.domain.entities.FeeType.BASE_FEE_OVERAGE
import io.provenance.explorer.domain.entities.FeeType.BASE_FEE_USED
import io.provenance.explorer.domain.entities.FeeType.CUSTOM_FEE
import io.provenance.explorer.domain.entities.FeeType.MSG_BASED_FEE
import io.provenance.explorer.domain.extensions.CUSTOM_FEE_MSG_TYPE
import io.provenance.explorer.domain.extensions.exec
import io.provenance.explorer.domain.extensions.execAndMap
Expand Down Expand Up @@ -43,6 +39,7 @@ import io.provenance.explorer.model.TxAssociatedValues
import io.provenance.explorer.model.TxFeepayer
import io.provenance.explorer.model.TxGasVolume
import io.provenance.explorer.model.TxStatus
import io.provenance.explorer.model.base.PagedResults
import io.provenance.explorer.model.base.stringfy
import io.provenance.explorer.service.AssetService
import org.jetbrains.exposed.dao.IntEntity
Expand All @@ -69,6 +66,7 @@ import org.jetbrains.exposed.sql.selectAll
import org.jetbrains.exposed.sql.transactions.TransactionManager
import org.jetbrains.exposed.sql.transactions.transaction
import java.math.BigDecimal
import java.time.LocalDate
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter

Expand Down Expand Up @@ -151,19 +149,19 @@ class TxCacheRecord(id: EntityID<Int>) : IntEntity(id) {
var join: ColumnSet = TxCacheTable

if (tqp.msgTypes.isNotEmpty()) {
join = join.innerJoin(TxMsgTypeQueryTable, { TxCacheTable.id }, { TxMsgTypeQueryTable.txHashId })
join = join.innerJoin(TxMsgTypeQueryTable, { TxCacheTable.id }, { txHashId })
}
if ((tqp.addressId != null && tqp.addressType != null) || tqp.address != null) {
join = join.innerJoin(TxAddressJoinTable, { TxCacheTable.id }, { TxAddressJoinTable.txHashId })
join = join.innerJoin(TxAddressJoinTable, { TxCacheTable.id }, { txHashId })
}
if (tqp.markerId != null || tqp.denom != null) {
join = join.innerJoin(TxMarkerJoinTable, { TxCacheTable.id }, { TxMarkerJoinTable.txHashId })
join = join.innerJoin(TxMarkerJoinTable, { TxCacheTable.id }, { txHashId })
}
if (tqp.nftId != null) {
join = join.innerJoin(TxNftJoinTable, { TxCacheTable.id }, { TxNftJoinTable.txHashId })
join = join.innerJoin(TxNftJoinTable, { TxCacheTable.id }, { txHashId })
}
if (tqp.ibcChannelIds.isNotEmpty()) {
join = join.innerJoin(TxIbcTable, { TxCacheTable.id }, { TxIbcTable.txHashId })
join = join.innerJoin(TxIbcTable, { TxCacheTable.id }, { txHashId })
}

val query = if (distinctQuery != null) join.slice(distinctQuery).selectAll() else join.selectAll()
Expand Down Expand Up @@ -206,7 +204,79 @@ class TxCacheRecord(id: EntityID<Int>) : IntEntity(id) {

query
}
}

fun countForDates(daysPrior: Int): List<Pair<LocalDate, Long>> = transaction {
val query = """
select sum(daily_tx_cnt.cnt) as count, ds
from (select count(*) cnt, tx_timestamp ts, date_trunc('day', tx_timestamp) ds
from tx_cache
where tx_timestamp > current_timestamp - interval '$daysPrior days'
group by ts, ds) as daily_tx_cnt
group by ds
order by ds;
""".trimIndent()
query.execAndMap {
Pair(
it.getTimestamp("ds").toLocalDateTime().toLocalDate(),
it.getLong("count")
)
}
}

fun pulseTransactionsWithValue(denom: String, afterDateTime: LocalDateTime, page: Int, count: Int): PagedResults<Map<String, kotlin.Any?>> = transaction {
val query = """
select tx.id as tx_id,
tx.hash,
tx.height,
tx.tx_timestamp,
mtype.category,
mtype.type,
mtype.proto_type,
mtype.module,
tme.event_type,
attr.attr_key,
attr.attr_value
from tx_cache tx
join tx_msg_event as tme on tx.id = tme.tx_hash_id
join tx_msg_event_attr as attr on tme.id = attr.tx_msg_event_id
join tx_message_type as mtype on mtype.id = tme.tx_msg_type_id
join tx_marker_join as denom on denom.tx_hash_id = tx.id
where tme.tx_msg_type_id IN
(select id from tx_message_type where module in ('exchange', 'bank'))
and tx.tx_timestamp > ?
and tx.error_code is null
and tx.codespace is null
and denom.denom = ?
and event_type = 'coin_spent'
and attr_key = 'amount'
and attr_value like ?
order by height desc, tx_id
""".trimIndent()
val arguments = mutableListOf<Pair<ColumnType, *>>(
Pair(JavaLocalDateTimeColumnType(), afterDateTime),
Pair(TextColumnType(), denom),
Pair(TextColumnType(), "%$denom%"),
)

val countQuery = "select count(*) from ($query) as count"
val rowCount = countQuery.execAndMap(arguments) {
it.getLong(1)
}.first()

arguments.add(Pair(IntegerColumnType(), count))
arguments.add(Pair(IntegerColumnType(), page * count))

"$query limit ? offset ?".execAndMap(arguments) {
val map = mutableMapOf<String, kotlin.Any?>()
(1..it.metaData.columnCount).forEach { index ->
map[it.metaData.getColumnName(index)] = it.getObject(index)
}
map // return a list of map because i like to party
}.let {
PagedResults(rowCount.div(count).toInt(), it, rowCount, emptyMap())
}
}
}

var hash by TxCacheTable.hash
var height by TxCacheTable.height
Expand Down Expand Up @@ -364,7 +434,7 @@ class TxMessageRecord(id: EntityID<Int>) : IntEntity(id) {

fun findByHashIdPaginated(hashId: Int, msgTypes: List<Int>, limit: Int, offset: Int) = transaction {
val query = TxMessageTable
.innerJoin(TxMsgTypeSubtypeTable, { TxMessageTable.id }, { TxMsgTypeSubtypeTable.txMsgId })
.innerJoin(TxMsgTypeSubtypeTable, { TxMessageTable.id }, { txMsgId })
.slice(tableColSet)
.select { TxMessageTable.txHashId eq hashId }
if (msgTypes.isNotEmpty()) {
Expand Down Expand Up @@ -416,17 +486,17 @@ class TxMessageRecord(id: EntityID<Int>) : IntEntity(id) {

if (tqp.msgTypes.isNotEmpty())
join = if (tqp.primaryTypesOnly)
join.innerJoin(TxMsgTypeSubtypeTable, { TxMessageTable.txHashId }, { TxMsgTypeSubtypeTable.txHashId })
join.innerJoin(TxMsgTypeSubtypeTable, { TxMessageTable.txHashId }, { txHashId })
else
join.innerJoin(TxMsgTypeQueryTable, { TxMessageTable.txHashId }, { TxMsgTypeQueryTable.txHashId })
join.innerJoin(TxMsgTypeQueryTable, { TxMessageTable.txHashId }, { txHashId })
if (tqp.txStatus != null)
join = join.innerJoin(TxCacheTable, { TxMessageTable.txHashId }, { TxCacheTable.id })
if ((tqp.addressId != null && tqp.addressType != null) || tqp.address != null)
join = join.innerJoin(TxAddressJoinTable, { TxMessageTable.txHashId }, { TxAddressJoinTable.txHashId })
join = join.innerJoin(TxAddressJoinTable, { TxMessageTable.txHashId }, { txHashId })
if (tqp.smCodeId != null)
join = join.innerJoin(TxSmCodeTable, { TxMessageTable.txHashId }, { TxSmCodeTable.txHashId })
join = join.innerJoin(TxSmCodeTable, { TxMessageTable.txHashId }, { txHashId })
if (tqp.smContractAddrId != null)
join = join.innerJoin(TxSmContractTable, { TxMessageTable.txHashId }, { TxSmContractTable.txHashId })
join = join.innerJoin(TxSmContractTable, { TxMessageTable.txHashId }, { txHashId })

val query = if (distinctQuery != null) join.slice(distinctQuery).selectAll() else join.selectAll()

Expand Down Expand Up @@ -772,13 +842,13 @@ class TxFeeRecord(id: EntityID<Int>) : IntEntity(id) {
}.let { (baseFeeOverage, baseFeeUsed) ->
val nhash = assetService.getAssetRaw(ExplorerProperties.UTILITY_TOKEN).second
// insert used fee
feeList.add(buildInsert(txInfo, BASE_FEE_USED.name, nhash.id.value, nhash.denom, baseFeeUsed))
feeList.add(buildInsert(txInfo, FeeType.BASE_FEE_USED.name, nhash.id.value, nhash.denom, baseFeeUsed))
// insert paid too much fee if > 0
if (baseFeeOverage > BigDecimal.ZERO) {
feeList.add(
buildInsert(
txInfo,
BASE_FEE_OVERAGE.name,
FeeType.BASE_FEE_OVERAGE.name,
nhash.id.value,
nhash.denom,
baseFeeOverage
Expand All @@ -789,7 +859,7 @@ class TxFeeRecord(id: EntityID<Int>) : IntEntity(id) {
if (tx.success()) {
msgBasedFeeList.forEach { fee ->
val feeType =
if (fee.msgType == CUSTOM_FEE_MSG_TYPE) CUSTOM_FEE.name else MSG_BASED_FEE.name
if (fee.msgType == CUSTOM_FEE_MSG_TYPE) FeeType.CUSTOM_FEE.name else FeeType.MSG_BASED_FEE.name
feeList.add(
buildInsert(
txInfo,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import io.provenance.explorer.config.ExplorerProperties.Companion.PROV_VAL_OPER_
import io.provenance.explorer.domain.entities.MissedBlocksRecord
import io.provenance.explorer.domain.exceptions.InvalidArgumentException
import io.provenance.explorer.domain.models.explorer.Addresses
import io.provenance.explorer.domain.models.explorer.pulse.MetricTrendType
import io.provenance.explorer.model.base.Bech32
import io.provenance.explorer.model.base.toBech32Data
import io.provenance.explorer.model.base.toMAddress
Expand Down Expand Up @@ -181,3 +182,10 @@ fun List<BigDecimal>.average() = this.fold(BigDecimal.ZERO, BigDecimal::add)
fun String.nullOrString() = this.ifBlank { null }

fun String.toNormalCase() = this.splitToWords().joinToString(" ")

fun BigDecimal.calculatePulseMetricTrend() =
when {
this > BigDecimal.ZERO -> MetricTrendType.UP
this < BigDecimal.ZERO -> MetricTrendType.DOWN
else -> MetricTrendType.FLAT
}
Loading

0 comments on commit ed04c2a

Please sign in to comment.