Skip to content

Commit

Permalink
Do one data query and re-use for each timeperiod
Browse files Browse the repository at this point in the history
  • Loading branch information
paullinator committed Feb 10, 2024
1 parent 069a573 commit 93674fc
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 53 deletions.
48 changes: 43 additions & 5 deletions src/cacheEngine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ import startOfMonth from 'date-fns/startOfMonth'
import sub from 'date-fns/sub'
import nano from 'nano'

import { getAnalytics } from './apiAnalytics'
import { config } from './config'
import { getAnalytic } from './dbutils'
import { asDbReq } from './dbutils'
import { initDbs } from './initDbs'
import { asApps } from './types'
import { datelog, snooze } from './util'
Expand Down Expand Up @@ -64,15 +65,52 @@ export async function cacheEngine(): Promise<void> {
) {
continue
}

const query = {
selector: {
status: { $eq: 'complete' },
usdValue: { $gte: 0 },
timestamp: { $gte: start, $lt: end }
},
fields: [
'orderId',
'depositCurrency',
'payoutCurrency',
'timestamp',
'usdValue'
],
use_index: 'timestamp-p',
sort: ['timestamp'],
limit: 1000000
}
const appAndPartnerId = `${app.appId}_${partnerId}`
let data
try {
data = await reportsTransactions.partitionedFind(
appAndPartnerId,
query
)
} catch (e) {
datelog('Error fetching transactions', e)
console.error(e)
continue
}

const dbReq = asDbReq(data)
const dbTxs = dbReq.docs

for (const timePeriod of TIME_PERIODS) {
const result = await getAnalytic(
const analytic = getAnalytics(
dbTxs,
start,
end,
app.appId,
partnerId,
timePeriod,
reportsTransactions
appAndPartnerId,
timePeriod
)
const { result } = analytic
if (result.numAllTxs === 0) continue

// Create cache docs
if (result != null) {
const cacheResult = result[timePeriod].map(bucket => {
Expand Down
51 changes: 3 additions & 48 deletions src/dbutils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { datelog, promiseTimeout } from './util'
const BATCH_ADVANCE = 100
const SIX_DAYS_IN_SECONDS = 6 * 24 * 60 * 60

const asDbReq = asObject({
export const asDbReq = asObject({
docs: asArray(
asObject({
orderId: asString,
Expand All @@ -21,6 +21,8 @@ const asDbReq = asObject({
)
})

export type DbReq = ReturnType<typeof asDbReq>

export const pagination = async <T>(
txArray: any[],
partition: nano.DocumentScope<T>
Expand Down Expand Up @@ -50,53 +52,6 @@ export const pagination = async <T>(
datelog(`total errors: ${numErrors}`)
}

export const getAnalytic = async (
start: number,
end: number,
appId: string,
partnerId: string,
timePeriod: string,
transactionDatabase: any
): Promise<AnalyticsResult | undefined> => {
const query = {
selector: {
status: { $eq: 'complete' },
usdValue: { $gte: 0 },
timestamp: { $gte: start, $lt: end }
},
fields: [
'orderId',
'depositCurrency',
'payoutCurrency',
'timestamp',
'usdValue'
],
use_index: 'timestamp-p',
sort: ['timestamp'],
limit: 1000000
}
try {
const appAndPartnerId = `${appId}_${partnerId}`
const data = await transactionDatabase.partitionedFind(
appAndPartnerId,
query
)

const analytic = getAnalytics(
asDbReq(data).docs,
start,
end,
appId,
appAndPartnerId,
timePeriod
)
return analytic.result.numAllTxs > 0 ? analytic : undefined
} catch (e) {
console.log(e)
throw new Error(`getAnalytic: Internal server error.`)
}
}

export const cacheAnalytic = async (
start: number,
end: number,
Expand Down

0 comments on commit 93674fc

Please sign in to comment.