diff --git a/models/core/src/core.ts b/models/core/src/core.ts index d03c01d3c75..3e68b08dbba 100644 --- a/models/core/src/core.ts +++ b/models/core/src/core.ts @@ -108,6 +108,7 @@ export class TDoc extends TObj implements Doc { @Prop(TypeTimestamp(), core.string.CreatedDate) @ReadOnly() + @Index(IndexKind.IndexedDsc) createdOn!: Timestamp } diff --git a/packages/core/src/measurements/context.ts b/packages/core/src/measurements/context.ts index ca42ebd1988..0f4d4062859 100644 --- a/packages/core/src/measurements/context.ts +++ b/packages/core/src/measurements/context.ts @@ -1,28 +1,29 @@ // Basic performance metrics suite. import { childMetrics, measure, newMetrics } from './metrics' -import { MeasureContext, MeasureLogger, Metrics, ParamType } from './types' +import { FullParamsType, MeasureContext, MeasureLogger, Metrics, ParamsType } from './types' /** * @public */ export class MeasureMetricsContext implements MeasureContext { private readonly name: string - private readonly params: Record + private readonly params: ParamsType logger: MeasureLogger metrics: Metrics private readonly done: (value?: number) => void constructor ( name: string, - params: Record, + params: ParamsType, + fullParams: FullParamsType = {}, metrics: Metrics = newMetrics(), logger?: MeasureLogger ) { this.name = name this.params = params this.metrics = metrics - this.done = measure(metrics, params) + this.done = measure(metrics, params, fullParams) this.logger = logger ?? { info: (msg, args) => { @@ -35,20 +36,21 @@ export class MeasureMetricsContext implements MeasureContext { } measure (name: string, value: number): void { - const c = new MeasureMetricsContext('#' + name, {}, childMetrics(this.metrics, ['#' + name])) + const c = new MeasureMetricsContext('#' + name, {}, {}, childMetrics(this.metrics, ['#' + name])) c.done(value) } - newChild (name: string, params: Record, logger?: MeasureLogger): MeasureContext { - return new MeasureMetricsContext(name, params, childMetrics(this.metrics, [name]), logger) + newChild (name: string, params: ParamsType, fullParams?: FullParamsType, logger?: MeasureLogger): MeasureContext { + return new MeasureMetricsContext(name, params, fullParams ?? {}, childMetrics(this.metrics, [name]), logger) } async with( name: string, - params: Record, - op: (ctx: MeasureContext) => T | Promise + params: ParamsType, + op: (ctx: MeasureContext) => T | Promise, + fullParams?: ParamsType ): Promise { - const c = this.newChild(name, params) + const c = this.newChild(name, params, fullParams) try { let value = op(c) if (value instanceof Promise) { diff --git a/packages/core/src/measurements/metrics.ts b/packages/core/src/measurements/metrics.ts index 6e1f54e0677..047a0837979 100644 --- a/packages/core/src/measurements/metrics.ts +++ b/packages/core/src/measurements/metrics.ts @@ -1,7 +1,8 @@ // Basic performance metrics suite. import { MetricsData } from '.' -import { Metrics, ParamType } from './types' +import { cutObjectArray } from '../utils' +import { FullParamsType, Metrics, ParamsType } from './types' /** * @public @@ -21,11 +22,30 @@ export function newMetrics (): Metrics { } } +function getUpdatedTopResult ( + current: Metrics['topResult'], + time: number, + params: FullParamsType +): Metrics['topResult'] { + if (current === undefined || current.length < 3 || current.some((it) => it.value < time)) { + const result = [ + ...(current ?? []), + { + value: time, + params: cutObjectArray(params) + } + ] + result.sort((a, b) => b.value - a.value) + return result.slice(0, 3) + } + return current +} + /** * Measure with tree expansion. Operation counter will be added only to leaf's. * @public */ -export function measure (metrics: Metrics, params: Record): () => void { +export function measure (metrics: Metrics, params: ParamsType, fullParams: FullParamsType = {}): () => void { const st = Date.now() return (value?: number) => { const ed = Date.now() @@ -47,10 +67,14 @@ export function measure (metrics: Metrics, params: Record): ( } param.value += value ?? ed - st param.operations++ + + param.topResult = getUpdatedTopResult(param.topResult, ed - st, fullParams) } // Update leaf data metrics.value += value ?? ed - st metrics.operations++ + + metrics.topResult = getUpdatedTopResult(metrics.topResult, ed - st, fullParams) } } @@ -84,11 +108,13 @@ export function metricsAggregate (m: Metrics): Metrics { .reduce((p, v) => { return p + v.value }, 0) + return { operations: m.operations, measurements: ms, params: m.params, - value: sumVal + value: sumVal, + topResult: m.topResult } } diff --git a/packages/core/src/measurements/types.ts b/packages/core/src/measurements/types.ts index c870c65d716..2052948d7f7 100644 --- a/packages/core/src/measurements/types.ts +++ b/packages/core/src/measurements/types.ts @@ -3,12 +3,26 @@ */ export type ParamType = string | number | boolean | undefined +/** + * @public + */ +export type ParamsType = Record + +/** + * @public + */ +export type FullParamsType = Record + /** * @public */ export interface MetricsData { operations: number value: number + topResult?: { + value: number + params: FullParamsType + }[] } /** @@ -31,9 +45,14 @@ export interface MeasureLogger { */ export interface MeasureContext { // Create a child metrics context - newChild: (name: string, params: Record, logger?: MeasureLogger) => MeasureContext + newChild: (name: string, params: ParamsType, fullParams?: FullParamsType, logger?: MeasureLogger) => MeasureContext - with: (name: string, params: Record, op: (ctx: MeasureContext) => T | Promise) => Promise + with: ( + name: string, + params: ParamsType, + op: (ctx: MeasureContext) => T | Promise, + fullParams?: FullParamsType + ) => Promise logger: MeasureLogger diff --git a/packages/core/src/server.ts b/packages/core/src/server.ts index 8eccbdf92ca..79dc3eb1be5 100644 --- a/packages/core/src/server.ts +++ b/packages/core/src/server.ts @@ -74,6 +74,7 @@ export interface ServerStorage extends LowLevelStorage { query: DocumentQuery, options?: FindOptions & { domain?: Domain // Allow to find for Doc's in specified domain only. + prefix?: string } ) => Promise> searchFulltext: (ctx: MeasureContext, query: SearchQuery, options: SearchOptions) => Promise diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index 7c13611ff0a..6b7b3d7ea58 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -15,29 +15,25 @@ import core, { AttachedDoc, - checkMixinKey, + BulkUpdateEvent, Class, Client, + DOMAIN_MODEL, Doc, DocumentQuery, - DOMAIN_MODEL, FindOptions, - findProperty, FindResult, - generateId, - getObjectValue, Hierarchy, IndexingUpdateEvent, - BulkUpdateEvent, Lookup, LookupData, - matchQuery, ModelDb, Ref, - resultSort, ReverseLookups, + SearchOptions, + SearchQuery, + SearchResult, SortingQuery, - toFindResult, Tx, TxCollectionCUD, TxCreateDoc, @@ -49,9 +45,13 @@ import core, { TxWorkspaceEvent, WithLookup, WorkspaceEvent, - SearchQuery, - SearchOptions, - SearchResult + checkMixinKey, + findProperty, + generateId, + getObjectValue, + matchQuery, + resultSort, + toFindResult } from '@hcengineering/core' import { deepEqual } from 'fast-equals' @@ -340,6 +340,32 @@ export class LiveQuery extends TxProcessor implements Client { } } + async queryFind( + _class: Ref>, + query: DocumentQuery, + options?: FindOptions + ): Promise> { + const current = this.findQuery(_class, query, options) + if (current === undefined) { + const q = this.createQuery( + _class, + query, + { + callback: () => { + // do nothing + }, + callbackId: generateId() + }, + options + ) + if (q.result instanceof Promise) { + q.result = await q.result + } + return toFindResult(this.clone(q.result), q.total) as FindResult + } + return toFindResult(this.clone((current?.result as T[]) ?? []), current.total) + } + private async checkSearch (q: Query, _id: Ref): Promise { const match = await this.client.findOne(q._class, { $search: q.query.$search, _id }, q.options) if (q.result instanceof Promise) { diff --git a/plugins/workbench-resources/src/components/statistics/MetricsInfo.svelte b/plugins/workbench-resources/src/components/statistics/MetricsInfo.svelte index 16da867404b..e3e4159567c 100644 --- a/plugins/workbench-resources/src/components/statistics/MetricsInfo.svelte +++ b/plugins/workbench-resources/src/components/statistics/MetricsInfo.svelte @@ -53,6 +53,29 @@ + {#if metrics.topResult !== undefined && metrics.topResult.length > 0 && metrics.topResult[0].value > 0 && Object.keys(metrics.topResult[0].params).length > 0} +
+ + +
+ Slowest result:{metrics.topResult[0].value} +
+
+ {#each metrics.topResult ?? [] as r} + + +
+ Time:{r.value} +
+
+
+          {JSON.stringify(r, null, 2)}
+        
+
+ {/each} +
+
+ {/if} {#each Object.entries(metrics.measurements) as [k, v], i (k)}
@@ -61,7 +84,12 @@ {#each Object.entries(metrics.params) as [k, v], i}
{#each Object.entries(v).toSorted((a, b) => b[1].value / (b[1].operations + 1) - a[1].value / (a[1].operations + 1)) as [kk, vv]} - + {@const childExpandable = + vv.topResult !== undefined && + vv.topResult.length > 0 && + vv.topResult[0].value > 0 && + Object.keys(vv.topResult[0].params).length > 0} +
# {k} = {kk} @@ -71,11 +99,29 @@
{vv.operations} - {showAvg(kk, vv.value, vv.operations)} + + {showAvg(kk, vv.value, vv.operations)} + {vv.value}
+ {#if childExpandable} +
+ {#each vv.topResult ?? [] as r} + + +
+ Time:{r.value} +
+
+
+                  {JSON.stringify(r, null, 2)}
+                
+
+ {/each} +
+ {/if} {/each}
diff --git a/server/core/package.json b/server/core/package.json index 2dfd0836474..7e206ff915e 100644 --- a/server/core/package.json +++ b/server/core/package.json @@ -34,6 +34,7 @@ "@hcengineering/core": "^0.6.28", "@hcengineering/platform": "^0.6.9", "@hcengineering/minio": "^0.6.0", + "@hcengineering/query": "^0.6.8", "fast-equals": "^2.0.3", "html-to-text": "^9.0.3" } diff --git a/server/core/src/indexer/indexer.ts b/server/core/src/indexer/indexer.ts index 78107310b86..fc29c73f5b6 100644 --- a/server/core/src/indexer/indexer.ts +++ b/server/core/src/indexer/indexer.ts @@ -327,16 +327,20 @@ export class FullTextIndexPipeline implements FullTextPipeline { }) while (!this.cancelling) { - await this.metrics.with('initialize-stages', {}, async () => { + await this.metrics.with('initialize-stages', { workspace: this.workspace.name }, async () => { await this.initializeStages() }) - await this.metrics.with('process-remove', {}, async () => { + await this.metrics.with('process-remove', { workspace: this.workspace.name }, async () => { await this.processRemove() }) const _classes = await rateLimitter.exec(() => { - return this.metrics.with('init-stages', {}, async () => await this.processIndex()) + return this.metrics.with( + 'processIndex', + { workspace: this.workspace.name }, + async (ctx) => await this.processIndex(ctx) + ) }) // Also update doc index state queries. @@ -374,7 +378,7 @@ export class FullTextIndexPipeline implements FullTextPipeline { console.log(this.workspace.name, 'Exit indexer', this.indexId) } - private async processIndex (): Promise>[]> { + private async processIndex (ctx: MeasureContext): Promise>[]> { let idx = 0 const _classUpdate = new Set>>() for (const st of this.stages) { @@ -387,14 +391,14 @@ export class FullTextIndexPipeline implements FullTextPipeline { if (!st.enabled) { break } - await this.metrics.with('flush', {}, async () => { + await ctx.with('flush', {}, async () => { await this.flush(true) }) const toSkip = Array.from(this.skipped.entries()) .filter((it) => it[1] > 3) .map((it) => it[0]) - let result = await this.metrics.with( + let result = await ctx.with( 'get-to-index', {}, async () => @@ -460,7 +464,7 @@ export class FullTextIndexPipeline implements FullTextPipeline { // Do Indexing this.currentStage = st - await this.metrics.with('collect', { collector: st.stageId }, async (ctx) => { + await ctx.with('collect', { collector: st.stageId }, async (ctx) => { await st.collect(toIndex, this, ctx) }) if (this.cancelling) { @@ -474,7 +478,7 @@ export class FullTextIndexPipeline implements FullTextPipeline { const toIndex2 = this.matchStates(nst) if (toIndex2.length > 0) { this.currentStage = nst - await this.metrics.with('collect', { collector: nst.stageId }, async (ctx) => { + await ctx.with('collect', { collector: nst.stageId }, async (ctx) => { await nst.collect(toIndex2, this, ctx) }) } @@ -575,152 +579,155 @@ export class FullTextIndexPipeline implements FullTextPipeline { // TODO: Move to migration async checkIndexConsistency (dbStorage: ServerStorage): Promise { await rateLimitter.exec(async () => { - if (process.env.MODEL_VERSION !== undefined && process.env.MODEL_VERSION !== '') { - const modelVersion = (await this.model.findAll(core.class.Version, {}))[0] - if (modelVersion !== undefined) { - const modelVersionString = versionToString(modelVersion) - if (modelVersionString !== process.env.MODEL_VERSION) { - console.error( - `Indexer: Model version mismatch model: ${modelVersionString} env: ${process.env.MODEL_VERSION}` - ) - return + await this.metrics.with('check-index-consistency', {}, async (ctx) => { + if (process.env.MODEL_VERSION !== undefined && process.env.MODEL_VERSION !== '') { + const modelVersion = (await this.model.findAll(core.class.Version, {}))[0] + if (modelVersion !== undefined) { + const modelVersionString = versionToString(modelVersion) + if (modelVersionString !== process.env.MODEL_VERSION) { + console.error( + `Indexer: Model version mismatch model: ${modelVersionString} env: ${process.env.MODEL_VERSION}` + ) + return + } } } - } - this.hierarchy.domains() - const allClasses = this.hierarchy.getDescendants(core.class.Doc) - for (const c of allClasses) { - if (this.cancelling) { - return - } + this.hierarchy.domains() + const allClasses = this.hierarchy.getDescendants(core.class.Doc) + for (const c of allClasses) { + if (this.cancelling) { + return + } - if (!isClassIndexable(this.hierarchy, c)) { - // No need, since no indexable fields or attachments. - continue - } + if (!isClassIndexable(this.hierarchy, c)) { + // No need, since no indexable fields or attachments. + continue + } - console.log(this.workspace.name, 'checking index', c) + console.log(this.workspace.name, 'checking index', c) - const generationId = generateId() + const generationId = generateId() - let lastId = '' + let lastId = '' - while (true) { - if (this.cancelling) { - return - } + while (true) { + if (this.cancelling) { + return + } + + let newDocs: DocIndexState[] = [] + let updates = new Map, DocumentUpdate>() + + try { + const docs = await dbStorage.findAll( + ctx, + c, + { _class: c, _id: { $gt: lastId as any } }, + { + limit: 10000, + sort: { _id: 1 }, + projection: { _id: 1, attachedTo: 1, attachedToClass: 1 } as any, + prefix: 'indexer' + } + ) - let newDocs: DocIndexState[] = [] - let updates = new Map, DocumentUpdate>() - - try { - const docs = await dbStorage.findAll( - this.metrics, - c, - { _class: c, _id: { $gt: lastId as any } }, - { - limit: 10000, - sort: { _id: 1 }, - projection: { _id: 1, attachedTo: 1, attachedToClass: 1 } as any + if (docs.length === 0) { + // All updated for this class + break } - ) - if (docs.length === 0) { - // All updated for this class + lastId = docs[docs.length - 1]._id + + const states = ( + await this.storage.findAll( + core.class.DocIndexState, + { + objectClass: c, + _id: { + $gte: docs[0]._id as any, + $lte: docs[docs.length - 1]._id as any + } + }, + { projection: { _id: 1 } } + ) + ).map((it) => it._id) + const statesSet = new Set(states) + + // create missing index states + newDocs = docs + .filter((it) => !statesSet.has(it._id as Ref)) + .map((it) => { + return createStateDoc(it._id, c, { + generationId, + stages: {}, + attributes: {}, + removed: false, + space: it.space, + attachedTo: (it as AttachedDoc)?.attachedTo ?? undefined, + attachedToClass: (it as AttachedDoc)?.attachedToClass ?? undefined + }) + }) + + // update generationId for existing index states + updates = new Map() + docs + .filter((it) => statesSet.has(it._id as Ref)) + .forEach((it) => { + updates.set(it._id as Ref, { generationId }) + }) + } catch (e) { + console.error(e) break } - lastId = docs[docs.length - 1]._id + try { + await this.storage.update(DOMAIN_DOC_INDEX_STATE, updates) + } catch (err: any) { + console.error(err) + } - const states = ( - await this.storage.findAll( - core.class.DocIndexState, - { - objectClass: c, - _id: { - $gte: docs[0]._id as any, - $lte: docs[docs.length - 1]._id as any - } - }, - { projection: { _id: 1 } } - ) - ).map((it) => it._id) - const statesSet = new Set(states) - - // create missing index states - newDocs = docs - .filter((it) => !statesSet.has(it._id as Ref)) - .map((it) => { - return createStateDoc(it._id, c, { - generationId, - stages: {}, - attributes: {}, - removed: false, - space: it.space, - attachedTo: (it as AttachedDoc)?.attachedTo ?? undefined, - attachedToClass: (it as AttachedDoc)?.attachedToClass ?? undefined - }) - }) - - // update generationId for existing index states - updates = new Map() - docs - .filter((it) => statesSet.has(it._id as Ref)) - .forEach((it) => { - updates.set(it._id as Ref, { generationId }) - }) - } catch (e) { - console.error(e) - break + try { + await this.storage.upload(DOMAIN_DOC_INDEX_STATE, newDocs) + } catch (err: any) { + console.error(err) + } } - try { - await this.storage.update(DOMAIN_DOC_INDEX_STATE, updates) - } catch (err: any) { - console.error(err) - } + // remove index states for documents that do not exist + const toRemove = ( + await this.storage.findAll( + core.class.DocIndexState, + { objectClass: c, generationId: { $ne: generationId } }, + { projection: { _id: 1 } } + ) + ).map((it) => it._id) - try { - await this.storage.upload(DOMAIN_DOC_INDEX_STATE, newDocs) - } catch (err: any) { - console.error(err) + if (toRemove.length > 0) { + await this.storage.clean(DOMAIN_DOC_INDEX_STATE, toRemove) + await this.storage.clean(DOMAIN_FULLTEXT_BLOB, toRemove) } } - // remove index states for documents that do not exist - const toRemove = ( - await this.storage.findAll( + // Clean for non existing classes + + while (true) { + const docRefs = await this.storage.findAll( core.class.DocIndexState, - { objectClass: c, generationId: { $ne: generationId } }, - { projection: { _id: 1 } } + { objectClass: { $nin: allClasses } }, + { projection: { _id: 1, objectClass: 1 }, limit: 10000 } ) - ).map((it) => it._id) - - if (toRemove.length > 0) { - await this.storage.clean(DOMAIN_DOC_INDEX_STATE, toRemove) - await this.storage.clean(DOMAIN_FULLTEXT_BLOB, toRemove) - } - } - - // Clean for non existing classes - - while (true) { - const docRefs = await this.storage.findAll( - core.class.DocIndexState, - { objectClass: { $nin: allClasses } }, - { projection: { _id: 1, objectClass: 1 }, limit: 10000 } - ) - const unknownClasses = docRefs.map((it) => it._id) + const unknownClasses = docRefs.map((it) => it._id) - console.log('cleaning', docRefs.length, Array.from(new Set(docRefs.map((it) => it.objectClass))).join(', ')) + console.log('cleaning', docRefs.length, Array.from(new Set(docRefs.map((it) => it.objectClass))).join(', ')) - if (unknownClasses.length > 0) { - await this.storage.clean(DOMAIN_DOC_INDEX_STATE, unknownClasses) - } else { - break + if (unknownClasses.length > 0) { + await this.storage.clean(DOMAIN_DOC_INDEX_STATE, unknownClasses) + } else { + break + } } - } + }) }) } } diff --git a/server/core/src/storage.ts b/server/core/src/storage.ts index 095e543fcbf..d849c2c96ee 100644 --- a/server/core/src/storage.ts +++ b/server/core/src/storage.ts @@ -58,7 +58,8 @@ import core, { generateId } from '@hcengineering/core' import { MinioService } from '@hcengineering/minio' -import { getResource } from '@hcengineering/platform' +import { Metadata, getResource } from '@hcengineering/platform' +import { LiveQuery as LQ } from '@hcengineering/query' import crypto from 'node:crypto' import { DbAdapter, DbAdapterConfiguration, TxAdapter } from './adapter' import { createContentAdapter } from './content' @@ -113,6 +114,10 @@ class TServerStorage implements ServerStorage { hashes!: string[] + triggerData = new Map, any>() + + liveQuery: LQ + constructor ( private readonly _domains: Record, private readonly defaultAdapter: string, @@ -128,6 +133,33 @@ class TServerStorage implements ServerStorage { metrics: MeasureContext, readonly model: Tx[] ) { + this.liveQuery = new LQ({ + getHierarchy (): Hierarchy { + return hierarchy + }, + getModel (): ModelDb { + return modelDb + }, + close: async () => {}, + findAll: async (_class, query, options) => { + return await metrics.with('query', {}, async (ctx) => await this.findAll(ctx, _class, query, options)) + }, + findOne: async (_class, query, options) => { + return ( + await metrics.with( + 'query', + {}, + async (ctx) => await this.findAll(ctx, _class, query, { ...options, limit: 1 }) + ) + )[0] + }, + tx: async (tx) => { + return {} + }, + searchFulltext: async (query: SearchQuery, options: SearchOptions) => { + return await metrics.with('query', {}, async (ctx) => await this.searchFulltext(ctx, query, options)) + } + }) this.hierarchy = hierarchy this.fulltext = indexFactory(this) @@ -165,16 +197,24 @@ class TServerStorage implements ServerStorage { const adapter = this.getAdapter(lastDomain as Domain) const toDelete = part.filter((it) => it._class === core.class.TxRemoveDoc).map((it) => it.objectId) - const toDeleteDocs = await ctx.with( - 'adapter-load', - { domain: lastDomain }, - async () => await adapter.load(lastDomain as Domain, toDelete) - ) - for (const ddoc of toDeleteDocs) { - removedDocs.set(ddoc._id, ddoc) + if (toDelete.length > 0) { + const toDeleteDocs = await ctx.with( + 'adapter-load', + { domain: lastDomain }, + async () => await adapter.load(lastDomain as Domain, toDelete) + ) + + for (const ddoc of toDeleteDocs) { + removedDocs.set(ddoc._id, ddoc) + } } - const r = await ctx.with('adapter-tx', {}, async () => await adapter.tx(...part)) + const r = await ctx.with('adapter-tx', { domain: lastDomain }, async () => await adapter.tx(...part)) + + // Update server live queries. + for (const t of part) { + await this.liveQuery.tx(t) + } if (Array.isArray(r)) { result.push(...r) } else { @@ -374,14 +414,19 @@ class TServerStorage implements ServerStorage { query: DocumentQuery, options?: FindOptions & { domain?: Domain // Allow to find for Doc's in specified domain only. + prefix?: string } ): Promise> { + const p = options?.prefix ?? 'client' const domain = options?.domain ?? this.hierarchy.getDomain(clazz) if (query?.$search !== undefined) { - return await ctx.with('client-fulltext-find-all', {}, (ctx) => this.fulltext.findAll(ctx, clazz, query, options)) + return await ctx.with(p + '-fulltext-find-all', {}, (ctx) => this.fulltext.findAll(ctx, clazz, query, options)) } - return await ctx.with('client-find-all', { _class: clazz }, () => - this.getAdapter(domain).findAll(clazz, query, options) + return await ctx.with( + p + '-find-all', + { _class: clazz }, + () => this.getAdapter(domain).findAll(clazz, query, options), + { clazz, query, options } ) } @@ -586,6 +631,10 @@ class TServerStorage implements ServerStorage { }, applyCtx: async (ctx, tx, broadcast) => { return await this.apply(ctx, tx, broadcast) + }, + // Will create a live query if missing and return values immediately if already asked. + queryFind: async (_class, query, options) => { + return await this.liveQuery.queryFind(_class, query, options) } } const triggers = await ctx.with('process-triggers', {}, async (ctx) => { @@ -707,11 +756,7 @@ class TServerStorage implements ServerStorage { query: DocumentQuery, options?: FindOptions ): Promise> => { - const domain = this.hierarchy.getDomain(clazz) - if (query?.$search !== undefined) { - return await ctx.with('full-text-find-all', {}, (ctx) => this.fulltext.findAll(ctx, clazz, query, options)) - } - return await ctx.with('find-all', { _class: clazz }, () => this.getAdapter(domain).findAll(clazz, query, options)) + return await this.findAll(ctx, clazz, query, { ...options, prefix: 'server' }) } const txToStore: Tx[] = [] const modelTx: Tx[] = [] @@ -751,7 +796,7 @@ class TServerStorage implements ServerStorage { // index object for (const _tx of txToProcess) { - await ctx.with('fulltext', {}, (ctx) => this.fulltext.tx(ctx, _tx)) + await ctx.with('fulltext-tx', {}, (ctx) => this.fulltext.tx(ctx, _tx)) } // index derived objects diff --git a/server/core/src/types.ts b/server/core/src/types.ts index 85fd645fd70..1dd4007da60 100644 --- a/server/core/src/types.ts +++ b/server/core/src/types.ts @@ -128,9 +128,6 @@ export interface TriggerControl { modelDb: ModelDb removedMap: Map, Doc> - // // An object cache, - // getCachedObject: (_class: Ref>, _id: Ref) => Promise - fulltextFx: (f: (adapter: FullTextAdapter) => Promise) => void // Since we don't have other storages let's consider adapter is MinioClient // Later can be replaced with generic one with bucket encapsulated inside. @@ -140,6 +137,13 @@ export interface TriggerControl { // Bulk operations in case trigger require some apply: (tx: Tx[], broadcast: boolean) => Promise applyCtx: (ctx: MeasureContext, tx: Tx[], broadcast: boolean) => Promise + + // Will create a live query if missing and return values immediately if already asked. + queryFind: ( + _class: Ref>, + query: DocumentQuery, + options?: FindOptions + ) => Promise> } /** @@ -283,7 +287,7 @@ export class DummyFullTextAdapter implements FullTextAdapter { async close (): Promise {} metrics (): MeasureContext { - return new MeasureMetricsContext('', {}) + return new MeasureMetricsContext('', {}, {}) } } diff --git a/server/server/src/metrics.ts b/server/server/src/metrics.ts index 16dc49c5a9f..6785e9dfc28 100644 --- a/server/server/src/metrics.ts +++ b/server/server/src/metrics.ts @@ -25,7 +25,7 @@ export function getMetricsContext (): MeasureContext { console.info('please provide apm server url for monitoring') const metrics = newMetrics() - metricsContext = new MeasureMetricsContext('System', {}, metrics) + metricsContext = new MeasureMetricsContext('System', {}, {}, metrics) if (metricsFile !== undefined || metricsConsole) { console.info('storing measurements into local file', metricsFile)