diff --git a/models/core/src/core.ts b/models/core/src/core.ts index 3debc7ac83d..dffd58af235 100644 --- a/models/core/src/core.ts +++ b/models/core/src/core.ts @@ -17,7 +17,6 @@ import { DOMAIN_BLOB, DOMAIN_CONFIGURATION, DOMAIN_DOC_INDEX_STATE, - DOMAIN_FULLTEXT_BLOB, DOMAIN_MIGRATION, DOMAIN_MODEL, IndexKind, @@ -38,7 +37,6 @@ import { type Enum, type EnumOf, type FieldIndex, - type FullTextData, type FullTextSearchContext, type IndexStageState, type IndexingConfiguration, @@ -309,10 +307,6 @@ export class TPluginConfiguration extends TDoc implements PluginConfiguration { enabled!: boolean beta!: boolean } -@Model(core.class.FulltextData, core.class.Doc, DOMAIN_FULLTEXT_BLOB) -export class TFulltextData extends TDoc implements FullTextData { - data!: any -} @Model(core.class.DocIndexState, core.class.Doc, DOMAIN_DOC_INDEX_STATE) export class TDocIndexState extends TDoc implements DocIndexState { diff --git a/models/core/src/index.ts b/models/core/src/index.ts index 382bb7180ca..c4df62fdb69 100644 --- a/models/core/src/index.ts +++ b/models/core/src/index.ts @@ -49,7 +49,6 @@ import { TEnum, TEnumOf, TFullTextSearchContext, - TFulltextData, TIndexConfiguration, TIndexStageState, TInterface, @@ -164,7 +163,6 @@ export function createModel (builder: Builder): void { TUserStatus, TEnum, TTypeAny, - TFulltextData, TTypeRelatedDocument, TDocIndexState, TIndexStageState, diff --git a/models/server-attachment/src/index.ts b/models/server-attachment/src/index.ts index ffabc76d7ef..f4f64756b15 100644 --- a/models/server-attachment/src/index.ts +++ b/models/server-attachment/src/index.ts @@ -16,20 +16,6 @@ import { type Builder } from '@hcengineering/model' -import attachment from '@hcengineering/attachment' -import core from '@hcengineering/core' -import serverAttachment from '@hcengineering/server-attachment' -import serverCore from '@hcengineering/server-core' - export { serverAttachmentId } from '@hcengineering/server-attachment' -export function createModel (builder: Builder): void { - builder.createDoc(serverCore.class.Trigger, core.space.Model, { - trigger: serverAttachment.trigger.OnAttachmentDelete, - txMatch: { - _class: core.class.TxCollectionCUD, - 'tx.objectClass': attachment.class.Attachment, - 'tx._class': core.class.TxRemoveDoc - } - }) -} +export function createModel (builder: Builder): void {} diff --git a/packages/core/src/classes.ts b/packages/core/src/classes.ts index da3af8dd9ab..2288dc777fd 100644 --- a/packages/core/src/classes.ts +++ b/packages/core/src/classes.ts @@ -337,12 +337,6 @@ export const DOMAIN_TRANSIENT = 'transient' as Domain */ export const DOMAIN_BLOB = 'blob' as Domain -/** - * Special domain to access s3 blob data. - * @public - */ -export const DOMAIN_FULLTEXT_BLOB = 'fulltext-blob' as Domain - /** * Special domain to access s3 blob data. * @public @@ -494,14 +488,6 @@ export function versionToString (version: Version | Data): string { return `${version?.major}.${version?.minor}.${version?.patch}` } -/** - * Blob data from s3 storage - * @public - */ -export interface FullTextData extends Doc { - data: any -} - /** * @public * diff --git a/packages/core/src/component.ts b/packages/core/src/component.ts index 92ec1ad5b63..2af98232608 100644 --- a/packages/core/src/component.ts +++ b/packages/core/src/component.ts @@ -30,7 +30,6 @@ import type { DomainIndexConfiguration, Enum, EnumOf, - FullTextData, FullTextSearchContext, Hyperlink, IndexStageState, @@ -134,7 +133,6 @@ export default plugin(coreId, { Version: '' as Ref>, PluginConfiguration: '' as Ref>, UserStatus: '' as Ref>, - FulltextData: '' as Ref>, TypeRelatedDocument: '' as Ref>>, DocIndexState: '' as Ref>, IndexStageState: '' as Ref>, diff --git a/packages/core/src/utils.ts b/packages/core/src/utils.ts index 299f6fd9851..4d185dc29c9 100644 --- a/packages/core/src/utils.ts +++ b/packages/core/src/utils.ts @@ -28,7 +28,6 @@ import { DocIndexState, DOMAIN_BLOB, DOMAIN_DOC_INDEX_STATE, - DOMAIN_FULLTEXT_BLOB, DOMAIN_MODEL, DOMAIN_TRANSIENT, FullTextSearchContext, @@ -713,7 +712,6 @@ export function isClassIndexable (hierarchy: Hierarchy, c: Ref>): boo domain === DOMAIN_TX || domain === DOMAIN_MODEL || domain === DOMAIN_BLOB || - domain === DOMAIN_FULLTEXT_BLOB || domain === DOMAIN_TRANSIENT ) { hierarchy.setClassifierProp(c, 'class_indexed', false) diff --git a/packages/presentation/src/file.ts b/packages/presentation/src/file.ts index 6888ddf3eae..f5f6482eb3e 100644 --- a/packages/presentation/src/file.ts +++ b/packages/presentation/src/file.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { concatLink, type Blob, type Ref } from '@hcengineering/core' +import { type Blob, type Ref } from '@hcengineering/core' import { PlatformError, Severity, Status, getMetadata, getResource } from '@hcengineering/platform' import { type PopupAlignment } from '@hcengineering/ui' import { writable } from 'svelte/store' @@ -54,25 +54,6 @@ export async function uploadFile (file: File): Promise> { return (await resp.text()) as Ref } -/** - * @public - */ -export async function deleteFile (id: string): Promise { - const uploadUrl = getMetadata(plugin.metadata.UploadURL) ?? '' - - const url = concatLink(uploadUrl, `?file=${id}`) - const resp = await fetch(url, { - method: 'DELETE', - headers: { - Authorization: 'Bearer ' + (getMetadata(plugin.metadata.Token) as string) - } - }) - - if (resp.status !== 200) { - throw new Error('Failed to delete file') - } -} - /** * @public */ diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index 42b1e4728c1..d7621db5442 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -1276,9 +1276,8 @@ export class LiveQuery implements WithTx, Client { for (const tx of txes) { if (tx._class === core.class.TxWorkspaceEvent) { const evt = tx as TxWorkspaceEvent - console.info('checking workspace event', evt._id, evt.params) - await this.checkUpdateEvents(tx as TxWorkspaceEvent) - await this.changePrivateHandler(tx as TxWorkspaceEvent) + await this.checkUpdateEvents(evt) + await this.changePrivateHandler(evt) } result.push(await this._tx(tx, docCache)) } diff --git a/plugins/attachment-resources/src/components/AttachmentRefInput.svelte b/plugins/attachment-resources/src/components/AttachmentRefInput.svelte index 6612fbd8a6f..b846113488c 100644 --- a/plugins/attachment-resources/src/components/AttachmentRefInput.svelte +++ b/plugins/attachment-resources/src/components/AttachmentRefInput.svelte @@ -19,7 +19,6 @@ import { DraftController, createQuery, - deleteFile, draftsStore, getClient, getFileMetadata, @@ -228,8 +227,6 @@ attachment.attachedToClass, 'attachments' ) - } else { - await deleteFile(attachment.file) } } @@ -249,14 +246,6 @@ export function removeDraft (removeFiles: boolean): void { draftController.remove() - if (removeFiles) { - newAttachments.forEach((p) => { - const attachment = attachments.get(p) - if (attachment !== undefined) { - void deleteFile(attachment.file) - } - }) - } } export function createAttachments (): Promise { diff --git a/plugins/attachment-resources/src/components/AttachmentStyledBox.svelte b/plugins/attachment-resources/src/components/AttachmentStyledBox.svelte index b928017e00b..4c32a88c674 100644 --- a/plugins/attachment-resources/src/components/AttachmentStyledBox.svelte +++ b/plugins/attachment-resources/src/components/AttachmentStyledBox.svelte @@ -20,7 +20,6 @@ import { createQuery, DraftController, - deleteFile, draftsStore, getClient, getFileMetadata, @@ -243,8 +242,6 @@ 'attachments' ) dispatch('detached', attachment._id) - } else { - await deleteFile(attachment.file) } } @@ -263,14 +260,6 @@ if (draftKey) { DraftController.remove(draftKey) } - if (removeFiles) { - newAttachments.forEach((p) => { - const attachment = attachments.get(p) - if (attachment !== undefined) { - deleteFile(attachment.file) - } - }) - } } export async function saveNewAttachment (_id: Ref): Promise { diff --git a/plugins/attachment-resources/src/index.ts b/plugins/attachment-resources/src/index.ts index 617ebc191a7..61557c3998d 100644 --- a/plugins/attachment-resources/src/index.ts +++ b/plugins/attachment-resources/src/index.ts @@ -17,7 +17,7 @@ import activity, { type ActivityMessage, type DocUpdateMessage } from '@hcengine import attachment, { type Attachment } from '@hcengineering/attachment' import core, { SortingOrder, type Markup, type ObjQueryType, type SortingQuery } from '@hcengineering/core' import { type IntlString, type Resources } from '@hcengineering/platform' -import { PDFViewer, deleteFile, getClient, uploadFile } from '@hcengineering/presentation' +import { PDFViewer, getClient, uploadFile } from '@hcengineering/presentation' import AccordionEditor from './components/AccordionEditor.svelte' import AddAttachment from './components/AddAttachment.svelte' @@ -262,8 +262,7 @@ export default async (): Promise => ({ AttachmentsUpdatedMessage }, helper: { - UploadFile: uploadFile, - DeleteFile: deleteFile + UploadFile: uploadFile }, actionImpl: { AddAttachmentToSaved, diff --git a/plugins/contact-resources/src/components/EditableAvatar.svelte b/plugins/contact-resources/src/components/EditableAvatar.svelte index a6463b941e0..1af9e556485 100644 --- a/plugins/contact-resources/src/components/EditableAvatar.svelte +++ b/plugins/contact-resources/src/components/EditableAvatar.svelte @@ -53,13 +53,6 @@ return result } - export async function removeAvatar (avatar: string) { - if (!avatar.includes('://')) { - const deleteFile = await getResource(attachment.helper.DeleteFile) - await deleteFile(avatar) - } - } - function handlePopupSubmit ( submittedAvatarType: AvatarType, submittedAvatar: Ref | undefined | null, diff --git a/plugins/gmail-resources/src/components/NewMessage.svelte b/plugins/gmail-resources/src/components/NewMessage.svelte index a9c3f832968..506e61330dc 100644 --- a/plugins/gmail-resources/src/components/NewMessage.svelte +++ b/plugins/gmail-resources/src/components/NewMessage.svelte @@ -141,7 +141,6 @@ const query = createQuery() async function removeAttachment (attachment: Attachment): Promise { - const deleteFile = await getResource(attachmentP.helper.DeleteFile) await client.removeCollection( attachment._class, attachment.space, @@ -150,7 +149,6 @@ attachment.attachedToClass, 'attachments' ) - await deleteFile(attachment.file) } let attachments: Attachment[] = [] diff --git a/plugins/gmail-resources/src/components/NewMessages.svelte b/plugins/gmail-resources/src/components/NewMessages.svelte index 38c3693d2a0..69445914ac3 100644 --- a/plugins/gmail-resources/src/components/NewMessages.svelte +++ b/plugins/gmail-resources/src/components/NewMessages.svelte @@ -166,7 +166,6 @@ const attachmentsQ = createQuery() async function removeAttachment (attachment: Attachment): Promise { - const deleteFile = await getResource(attachmentP.helper.DeleteFile) await client.removeCollection( attachment._class, attachment.space, @@ -175,7 +174,6 @@ attachment.attachedToClass, 'attachments' ) - await deleteFile(attachment.file) } let attachments: Attachment[] = [] diff --git a/plugins/recruit-resources/src/components/CreateCandidate.svelte b/plugins/recruit-resources/src/components/CreateCandidate.svelte index 58aefd929a1..29dae8999b6 100644 --- a/plugins/recruit-resources/src/components/CreateCandidate.svelte +++ b/plugins/recruit-resources/src/components/CreateCandidate.svelte @@ -47,8 +47,7 @@ InlineAttributeBar, KeyedAttribute, MessageBox, - MultipleDraftController, - deleteFile + MultipleDraftController } from '@hcengineering/presentation' import type { Candidate, CandidateDraft } from '@hcengineering/recruit' import { recognizeDocument } from '@hcengineering/rekoni' @@ -452,16 +451,6 @@ } } - async function deleteResume (): Promise { - if (object.resumeUuid) { - try { - await deleteFile(object.resumeUuid) - } catch (err) { - console.error(err) - } - } - } - async function createAttachment (file: File) { loading = true try { @@ -559,7 +548,6 @@ (result?: boolean) => { if (result === true) { dispatch('close') - deleteResume() resetObject() draftController.remove() } diff --git a/plugins/tracker-resources/src/components/SubIssues.svelte b/plugins/tracker-resources/src/components/SubIssues.svelte index 52499454072..ab366d8090f 100644 --- a/plugins/tracker-resources/src/components/SubIssues.svelte +++ b/plugins/tracker-resources/src/components/SubIssues.svelte @@ -15,7 +15,7 @@ diff --git a/server-plugins/attachment-resources/src/index.ts b/server-plugins/attachment-resources/src/index.ts index b277437f921..a7caae8490b 100644 --- a/server-plugins/attachment-resources/src/index.ts +++ b/server-plugins/attachment-resources/src/index.ts @@ -14,35 +14,7 @@ // limitations under the License. // -import type { Attachment } from '@hcengineering/attachment' -import type { Tx, TxRemoveDoc } from '@hcengineering/core' -import { TxProcessor } from '@hcengineering/core' -import type { TriggerControl } from '@hcengineering/server-core' - -/** - * @public - */ -export async function OnAttachmentDelete ( - tx: Tx, - { removedMap, ctx, storageAdapter, workspace }: TriggerControl -): Promise { - const rmTx = TxProcessor.extractTx(tx) as TxRemoveDoc - - // Obtain document being deleted. - const attach = removedMap.get(rmTx.objectId) as Attachment - - if (attach === undefined) { - return [] - } - - await storageAdapter.remove(ctx, workspace, [attach.file]) - - return [] -} - // eslint-disable-next-line @typescript-eslint/explicit-function-return-type export default async () => ({ - trigger: { - OnAttachmentDelete - } + trigger: {} }) diff --git a/server-plugins/attachment/src/index.ts b/server-plugins/attachment/src/index.ts index 354d302da67..694840b2686 100644 --- a/server-plugins/attachment/src/index.ts +++ b/server-plugins/attachment/src/index.ts @@ -14,9 +14,8 @@ // limitations under the License. // -import type { Plugin, Resource } from '@hcengineering/platform' +import type { Plugin } from '@hcengineering/platform' import { plugin } from '@hcengineering/platform' -import type { TriggerFunc } from '@hcengineering/server-core' /** * @public @@ -26,8 +25,4 @@ export const serverAttachmentId = 'server-attachment' as Plugin /** * @public */ -export default plugin(serverAttachmentId, { - trigger: { - OnAttachmentDelete: '' as Resource - } -}) +export default plugin(serverAttachmentId, {}) diff --git a/server-plugins/contact-resources/src/index.ts b/server-plugins/contact-resources/src/index.ts index d42140bf384..f227ffdd780 100644 --- a/server-plugins/contact-resources/src/index.ts +++ b/server-plugins/contact-resources/src/index.ts @@ -45,7 +45,7 @@ import core, { } from '@hcengineering/core' import notification, { Collaborators } from '@hcengineering/notification' import { getMetadata } from '@hcengineering/platform' -import serverCore, { TriggerControl, removeAllObjects } from '@hcengineering/server-core' +import serverCore, { TriggerControl } from '@hcengineering/server-core' import { workbenchId } from '@hcengineering/workbench' export async function OnSpaceTypeMembers (tx: Tx, control: TriggerControl): Promise { @@ -137,8 +137,6 @@ export async function OnContactDelete ( return [] } - await removeAllObjects(ctx, storageAdapter, workspace, removeContact.avatar) - const result: Tx[] = [] const members = await findAll(contact.class.Member, { contact: removeContact._id }) diff --git a/server-plugins/drive-resources/src/index.ts b/server-plugins/drive-resources/src/index.ts index ce88aba57b1..d234bf1f413 100644 --- a/server-plugins/drive-resources/src/index.ts +++ b/server-plugins/drive-resources/src/index.ts @@ -20,10 +20,10 @@ import { type Ref, type Tx, type TxRemoveDoc, - TxProcessor, DocumentQuery, FindOptions, - FindResult + FindResult, + TxProcessor } from '@hcengineering/core' import drive, { type File, type Folder } from '@hcengineering/drive' import type { TriggerControl } from '@hcengineering/server-core' @@ -44,8 +44,6 @@ export async function OnFileDelete ( return [] } - await storageAdapter.remove(ctx, workspace, [attach.file]) - return [] } diff --git a/server/account/src/operations.ts b/server/account/src/operations.ts index 2adc5424612..50e1774b3a6 100644 --- a/server/account/src/operations.ts +++ b/server/account/src/operations.ts @@ -970,7 +970,6 @@ export async function createWorkspace ( async (value) => { await updateInfo({ createProgress: 20 + Math.round((Math.min(value, 100) / 100) * 70) }) }, - true, getStorageAdapter() ) const modelVersion = getModelVersion() diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts index 2676fda96ad..fd5f9f74470 100644 --- a/server/backup/src/backup.ts +++ b/server/backup/src/backup.ts @@ -14,13 +14,14 @@ // limitations under the License. // +import { Analytics } from '@hcengineering/analytics' import core, { AttachedDoc, BackupClient, Client as CoreClient, Doc, Domain, - DOMAIN_FULLTEXT_BLOB, + DOMAIN_BLOB, DOMAIN_MODEL, DOMAIN_TRANSIENT, MeasureContext, @@ -33,7 +34,7 @@ import core, { type Blob, type DocIndexState } from '@hcengineering/core' -import type { StorageAdapter } from '@hcengineering/server-core' +import { type StorageAdapter, fullTextPushStagePrefix } from '@hcengineering/server-core' import { BlobClient, connect } from '@hcengineering/server-tool' import { mkdtemp, writeFile } from 'node:fs/promises' import { PassThrough } from 'node:stream' @@ -43,7 +44,6 @@ import { Writable } from 'stream' import { extract, Pack, pack } from 'tar-stream' import { createGunzip, gunzipSync, gzipSync } from 'zlib' import { BackupStorage } from './storage' -import { Analytics } from '@hcengineering/analytics' export * from './storage' const dataBlobSize = 50 * 1024 * 1024 @@ -231,7 +231,6 @@ export async function cloneWorkspace ( targetWorkspaceId: WorkspaceId, clearTime: boolean = true, progress: (value: number) => Promise, - skipFullText: boolean, storageAdapter: StorageAdapter ): Promise { await ctx.with( @@ -264,10 +263,6 @@ export async function cloneWorkspace ( let i = 0 for (const c of domains) { - if (skipFullText && c === DOMAIN_FULLTEXT_BLOB) { - ctx.info('clone skip domain...', { domain: c, workspace: targetWorkspaceId.name }) - continue - } ctx.info('clone domain...', { domain: c, workspace: targetWorkspaceId.name }) // We need to clean target connection before copying something. @@ -347,7 +342,7 @@ export async function cloneWorkspace ( try { docs = await ctx.with('load-docs', {}, async (ctx) => await sourceConnection.loadDocs(c, needRetrieve)) if (clearTime) { - docs = prepareClonedDocuments(docs, sourceConnection, skipFullText) + docs = prepareClonedDocuments(docs, sourceConnection) } const executor = new RateLimiter(10) for (const d of docs) { @@ -422,11 +417,7 @@ export async function cloneWorkspace ( ) } -function prepareClonedDocuments ( - docs: Doc[], - sourceConnection: CoreClient & BackupClient, - skipFullText: boolean -): Doc[] { +function prepareClonedDocuments (docs: Doc[], sourceConnection: CoreClient & BackupClient): Doc[] { docs = docs.map((p) => { let collectionCud = false try { @@ -436,8 +427,13 @@ function prepareClonedDocuments ( } // if full text is skipped, we need to clean stages for indexes. - if (p._class === core.class.DocIndexState && skipFullText) { - ;(p as DocIndexState).stages = {} + if (p._class === core.class.DocIndexState) { + for (const k of Object.keys((p as DocIndexState).stages)) { + if (k.startsWith(fullTextPushStagePrefix)) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete (p as DocIndexState).stages[k] + } + } } if (collectionCud) { @@ -556,6 +552,7 @@ export async function backup ( (it) => it !== DOMAIN_TRANSIENT && it !== DOMAIN_MODEL && + it !== ('fulltext-blob' as Domain) && !options.skipDomains.includes(it) && (options.include === undefined || options.include.has(it)) ) @@ -1090,6 +1087,7 @@ export async function restore ( const infoFile = 'backup.json.gz' if (!(await storage.exists(infoFile))) { + ctx.error('file not pressent', { file: infoFile }) throw new Error(`${infoFile} should present to restore`) } const backupInfo: BackupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString()) @@ -1097,13 +1095,14 @@ export async function restore ( if (opt.date !== -1) { const bk = backupInfo.snapshots.findIndex((it) => it.date === opt.date) if (bk === -1) { + ctx.error('could not restore to', { date: opt.date, file: infoFile, workspaceId: workspaceId.name }) throw new Error(`${infoFile} could not restore to ${opt.date}. Snapshot is missing.`) } snapshots = backupInfo.snapshots.slice(0, bk + 1) } else { opt.date = snapshots[snapshots.length - 1].date } - console.log('restore to ', opt.date, new Date(opt.date)) + ctx.info('restore to ', { id: opt.date, date: new Date(opt.date).toDateString() }) const rsnapshots = Array.from(snapshots).reverse() // Collect all possible domains @@ -1112,7 +1111,7 @@ export async function restore ( Object.keys(s.domains).forEach((it) => domains.add(it as Domain)) } - console.log('connecting:', transactorUrl, workspaceId.name) + ctx.info('connecting:', { transactorUrl, workspace: workspaceId.name }) const connection = (await connect(transactorUrl, workspaceId, undefined, { mode: 'backup', model: 'upgrade' @@ -1127,6 +1126,9 @@ export async function restore ( domains.add(d) } + // We do not backup elastic anymore + domains.delete('fulltext-blob' as Domain) + let uploadedMb = 0 let uploaded = 0 @@ -1138,7 +1140,8 @@ export async function restore ( uploadedMb = newId ctx.info('Uploaded', { msg, - written: newDownloadedMb + written: newDownloadedMb, + workspace: workspaceId.name }) } } @@ -1167,7 +1170,7 @@ export async function restore ( } if (el > 2500) { - console.log(c, ' loaded from server', loaded, el, chunks) + ctx.info('loaded from server', { domain: c, loaded, el, chunks, workspace: workspaceId.name }) el = 0 chunks = 0 } @@ -1180,8 +1183,12 @@ export async function restore ( await connection.closeChunk(idx) } } - console.log(' loaded', loaded) - console.log('\tcompare documents', changeset.size, serverChangeset.size) + ctx.info('loaded', { loaded, workspace: workspaceId.name }) + ctx.info('\tcompare documents', { + size: changeset.size, + serverSize: serverChangeset.size, + workspace: workspaceId.name + }) // Let's find difference const docsToAdd = new Map( @@ -1208,7 +1215,13 @@ export async function restore ( if (sendSize > dataUploadSize || (doc === undefined && docs.length > 0)) { totalSend += docs.length - console.log('upload', docs.length, `send: ${totalSend} from ${docsToAdd.size + totalSend}`, 'size:', sendSize) + ctx.info('upload', { + docs: docs.length, + totalSend, + from: docsToAdd.size + totalSend, + sendSize, + workspace: workspaceId.name + }) await connection.upload(c, docs) docs.length = 0 sendSize = 0 @@ -1224,13 +1237,13 @@ export async function restore ( const sDigest = await loadDigest(ctx, storage, [s], c) const requiredDocs = new Map(Array.from(sDigest.entries()).filter(([it]) => docsToAdd.has(it))) if (requiredDocs.size > 0) { - console.log('updating', c, requiredDocs.size) + ctx.info('updating', { domain: c, requiredDocs: requiredDocs.size, workspace: workspaceId.name }) // We have required documents here. for (const sf of d.storage ?? []) { if (docsToAdd.size === 0) { break } - console.log('processing', sf, processed) + ctx.info('processing', { storageFile: sf, processed, workspace: workspaceId.name }) const readStream = await storage.load(sf) const ex = extract() @@ -1332,11 +1345,19 @@ export async function restore ( } await sendChunk(undefined, 0) - if (docsToRemove.length > 0 && opt.merge !== true) { - console.log('cleanup', docsToRemove.length) - while (docsToRemove.length > 0) { - const part = docsToRemove.splice(0, 10000) - await connection.clean(c, part) + if (c !== DOMAIN_BLOB) { + // Clean domain documents if not blob + if (docsToRemove.length > 0 && opt.merge !== true) { + ctx.info('cleanup', { toRemove: docsToRemove.length, workspace: workspaceId.name, domain: c }) + while (docsToRemove.length > 0) { + const part = docsToRemove.splice(0, 10000) + try { + await connection.clean(c, part) + } catch (err: any) { + ctx.error('failed to clean, will retry', { error: err, workspaceId: workspaceId.name }) + docsToRemove.push(...part) + } + } } } } @@ -1352,7 +1373,7 @@ export async function restore ( continue } await limiter.exec(async () => { - console.log('processing domain', c) + ctx.info('processing domain', { domain: c, workspaceId: workspaceId.name }) let retry = 5 let delay = 1 while (retry > 0) { @@ -1360,13 +1381,13 @@ export async function restore ( try { await processDomain(c) if (delay > 1) { - console.log('retry-success') + ctx.warn('retry-success', { retry, delay, workspaceId: workspaceId.name }) } break } catch (err: any) { - console.error('error', err) + ctx.error('failed to process domain', { err, domain: c, workspaceId: workspaceId.name }) if (retry !== 0) { - console.log('cool-down to retry', delay) + ctx.warn('cool-down to retry', { delay, domain: c, workspaceId: workspaceId.name }) await new Promise((resolve) => setTimeout(resolve, delay * 1000)) delay++ } diff --git a/server/core/src/indexer/indexer.ts b/server/core/src/indexer/indexer.ts index ce110941714..37712cfb970 100644 --- a/server/core/src/indexer/indexer.ts +++ b/server/core/src/indexer/indexer.ts @@ -13,6 +13,7 @@ // limitations under the License. // +import { Analytics } from '@hcengineering/analytics' import core, { type Class, DOMAIN_DOC_INDEX_STATE, @@ -37,7 +38,6 @@ import { type DbAdapter } from '../adapter' import { RateLimiter } from '../limitter' import type { IndexedDoc } from '../types' import { type FullTextPipeline, type FullTextPipelineStage } from './types' -import { Analytics } from '@hcengineering/analytics' export * from './content' export * from './field' @@ -383,8 +383,12 @@ export class FullTextIndexPipeline implements FullTextPipeline { // We need to send index update event clearTimeout(this.updateBroadcast) this.updateBroadcast = setTimeout(() => { - this.broadcastUpdate(Array.from(this.broadcastClasses.values())) - this.broadcastClasses.clear() + this.broadcastClasses.delete(core.class.DocIndexState) + if (this.broadcastClasses.size > 0) { + const toSend = Array.from(this.broadcastClasses.values()) + this.broadcastClasses.clear() + this.broadcastUpdate(toSend) + } }, 5000) await new Promise((resolve) => { diff --git a/server/core/src/indexer/types.ts b/server/core/src/indexer/types.ts index fa1deb3ee79..3ce2f3c9524 100644 --- a/server/core/src/indexer/types.ts +++ b/server/core/src/indexer/types.ts @@ -108,3 +108,8 @@ export const fieldStateId = 'fld-v15' * @public */ export const fullTextPushStageId = 'fts-v17' + +/** + * @public + */ +export const fullTextPushStagePrefix = 'fts-' diff --git a/server/elastic/src/__tests__/backup.test.ts b/server/elastic/src/__tests__/backup.test.ts deleted file mode 100644 index 7013346b52c..00000000000 --- a/server/elastic/src/__tests__/backup.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { DbAdapter } from '@hcengineering/server-core' -import { Domain, getWorkspaceId, Hierarchy, MeasureMetricsContext } from '@hcengineering/core' -import { createElasticBackupDataAdapter } from '../backup' - -import { Client } from '@elastic/elasticsearch' - -describe('Elastic Data Adapter', () => { - const url = process.env.ELASTIC_URL ?? 'http://localhost:9200/' - const domain = 'test' as Domain - - let adapter: DbAdapter - - beforeEach(async () => { - adapter = await createElasticBackupDataAdapter( - new MeasureMetricsContext('test', {}), - new Hierarchy(), - url, - getWorkspaceId('ws1', '') - ) - }) - - afterEach(async () => { - await adapter.close() - }) - - it('should init', () => { - expect(adapter).toBeTruthy() - }) - - describe('Scroll Contexts', () => { - let client: Client - - beforeEach(async () => { - client = new Client({ node: url }) - await client.cluster.putSettings({ - body: { - persistent: { 'search.max_open_scroll_context': '2' }, - transient: { 'search.max_open_scroll_context': '2' } - } - }) - }) - - // Use afterEach() to make sure we clean up even if test fail - afterEach(async () => { - await client.cluster.putSettings({ - body: { - persistent: { 'search.max_open_scroll_context': null }, - transient: { 'search.max_open_scroll_context': null } - } - }) - await client.close() - }) - - it('should get properly closed', async () => { - const ctx = new MeasureMetricsContext('test', {}) - for (let i = 0; i <= 3; i++) { - const cursor = adapter.find(ctx, domain) - await cursor.next(ctx) - await cursor.close(ctx) - } - }) - }) -}) diff --git a/server/elastic/src/adapter.ts b/server/elastic/src/adapter.ts index af7e64304a6..2ae65b372c5 100644 --- a/server/elastic/src/adapter.ts +++ b/server/elastic/src/adapter.ts @@ -18,7 +18,6 @@ import { Class, Doc, DocumentQuery, - FullTextData, IndexingConfiguration, MeasureContext, Ref, @@ -54,8 +53,8 @@ function getIndexVersion (): string { class ElasticAdapter implements FullTextAdapter { private readonly workspaceString: string - private readonly getFulltextDocId: (doc: Ref) => Ref - private readonly getDocId: (fulltext: Ref) => Ref + private readonly getFulltextDocId: (doc: Ref) => Ref + private readonly getDocId: (fulltext: Ref) => Ref private readonly indexName: string constructor ( @@ -67,7 +66,7 @@ class ElasticAdapter implements FullTextAdapter { ) { this.indexName = `${indexBaseName}_${indexVersion}` this.workspaceString = toWorkspaceString(workspaceId) - this.getFulltextDocId = (doc) => `${doc}@${this.workspaceString}` as Ref + this.getFulltextDocId = (doc) => `${doc}@${this.workspaceString}` as Ref this.getDocId = (fulltext) => fulltext.slice(0, -1 * (this.workspaceString.length + 1)) as Ref } diff --git a/server/elastic/src/backup.ts b/server/elastic/src/backup.ts deleted file mode 100644 index 04643663c22..00000000000 --- a/server/elastic/src/backup.ts +++ /dev/null @@ -1,333 +0,0 @@ -// -// Copyright © 2022 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { ApiResponse, Client } from '@elastic/elasticsearch' -import { SearchResponse } from '@elastic/elasticsearch/api/types' -import core, { - Class, - Doc, - DocumentQuery, - DocumentUpdate, - Domain, - FindOptions, - FindResult, - FullTextData, - Hierarchy, - IndexingConfiguration, - MeasureContext, - Ref, - Space, - StorageIterator, - Tx, - TxResult, - WorkspaceId, - toWorkspaceString -} from '@hcengineering/core' -import { getMetadata, PlatformError, unknownStatus } from '@hcengineering/platform' -import serverCore, { DbAdapter, IndexedDoc } from '@hcengineering/server-core' -import { createHash } from 'node:crypto' - -function getIndexName (): string { - return getMetadata(serverCore.metadata.ElasticIndexName) ?? 'storage_index' -} - -function getIndexVersion (): string { - return getMetadata(serverCore.metadata.ElasticIndexVersion) ?? 'v1' -} - -class ElasticDataAdapter implements DbAdapter { - private readonly workspaceString: string - private readonly getFulltextDocId: (doc: Ref) => Ref - private readonly getDocId: (fulltext: Ref) => Ref - private readonly indexName: string - - constructor ( - readonly workspaceId: WorkspaceId, - private readonly client: Client, - readonly indexBaseName: string, - readonly indexVersion: string - ) { - this.indexName = `${indexBaseName}_${indexVersion}` - this.workspaceString = toWorkspaceString(workspaceId) - this.getFulltextDocId = (doc) => `${doc}@${this.workspaceString}` as Ref - this.getDocId = (fulltext) => fulltext.slice(0, -1 * (this.workspaceString.length + 1)) as Ref - } - - async findAll( - ctx: MeasureContext, - _class: Ref>, - query: DocumentQuery, - options?: FindOptions - ): Promise> { - return Object.assign([], { total: 0 }) - } - - async tx (ctx: MeasureContext, ...tx: Tx[]): Promise { - return [] - } - - async createIndexes (domain: Domain, config: Pick, 'indexes'>): Promise {} - async removeOldIndex (domain: Domain, deletePattern: RegExp, keepPattern: RegExp): Promise {} - - async close (): Promise { - await this.client.close() - } - - find (ctx: MeasureContext, domain: Domain): StorageIterator { - let listRecieved = false - let pos = 0 - let buffer: { _id: string, data: IndexedDoc }[] = [] - let resp: ApiResponse | null = null - let finished = false - // eslint-disable-next-line @typescript-eslint/naming-convention - let scroll_id: string | undefined - - const stIterator = { - next: async () => { - try { - if (!listRecieved) { - const q = { - index: this.indexName, - type: '_doc', - scroll: '23h', - // search_type: 'scan', //if I use search_type then it requires size otherwise it shows 0 result - size: 100, - body: { - query: { - bool: { - must: { - match: { - workspaceId: { query: this.workspaceString, operator: 'and' } - } - } - } - } - } - } - resp = await this.client.search(q) - if (resp.statusCode !== 200) { - if (resp.body?.error?.type === 'index_not_found_exception') { - return undefined - } - console.error('failed elastic query', q, resp) - throw new PlatformError(unknownStatus(`failed to elastic query ${JSON.stringify(resp)}`)) - } - buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source })) - if (buffer.length === 0) { - finished = true - await stIterator.close() - } - scroll_id = (resp.body as SearchResponse)._scroll_id - listRecieved = true - } - if (resp !== null && pos === buffer.length && !finished) { - const params = { - scroll_id, - scroll: '23h' - } - resp = await this.client.scroll(params, { maxRetries: 5 }) - - if (resp.statusCode !== 200) { - console.error('failed elastic query scroll', scroll_id, resp) - throw new PlatformError(unknownStatus(`failed to elastic query ${JSON.stringify(resp)}`)) - } - buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source })) - if (buffer.length === 0) { - finished = true - await stIterator.close() - } - pos = 0 - } - if (pos < buffer.length) { - const item = buffer[pos] - const hash = createHash('sha256') - const json = JSON.stringify(item.data) - hash.update(json) - const digest = hash.digest('base64') - const result = { - id: this.getDocId(item._id as Ref), - hash: digest, - size: json.length - } - pos++ - return result - } - } catch (e: any) { - if (e?.meta?.body?.error?.type === 'index_not_found_exception') { - return undefined - } - await stIterator.close() - console.error('elastic error:', e) - throw new PlatformError(e) - } - }, - close: async () => { - if (scroll_id != null) { - await this.client.clearScroll({ scroll_id }) - scroll_id = undefined - } - } - } - return stIterator - } - - async load (ctx: MeasureContext, domain: Domain, docs: Ref[]): Promise { - const result: Doc[] = [] - const toLoad = [...docs] - - while (toLoad.length > 0) { - const part = toLoad.splice(0, 5000) - const resp = await this.client.search({ - index: this.indexName, - type: '_doc', - body: { - query: { - bool: { - must: [ - { - terms: { - _id: part.map(this.getFulltextDocId), - boost: 1.0 - } - }, - { - match: { - workspaceId: { query: this.workspaceString, operator: 'and' } - } - } - ] - } - }, - size: part.length - } - }) - const buffer = resp.body.hits.hits.map((hit: any) => ({ _id: hit._id, data: hit._source })) - - for (const item of buffer) { - const dta: FullTextData = { - _id: this.getDocId(item._id) as Ref, // Export without workspace portion of ID - _class: core.class.FulltextData, - space: 'fulltext-blob' as Ref, - modifiedOn: item.data.modifiedOn, - modifiedBy: item.data.modifiedBy, - data: item.data - } - result.push(dta) - } - } - return result - } - - async upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise { - while (docs.length > 0) { - const part = docs.splice(0, 10000) - try { - await this.client.deleteByQuery( - { - type: '_doc', - index: this.indexName, - body: { - query: { - bool: { - must: [ - { - terms: { - _id: part.map((it) => this.getFulltextDocId(it._id)), - boost: 1.0 - } - }, - { - match: { - workspaceId: { query: this.workspaceString, operator: 'and' } - } - } - ] - } - }, - size: part.length - } - }, - undefined - ) - } catch (err: any) { - console.error(err) - } - - const operations = part.flatMap((doc) => [ - { index: { _index: this.indexName, _id: this.getFulltextDocId(doc._id) } }, - { - ...(doc as FullTextData).data, - workspaceId: this.workspaceString - } - ]) - - await this.client.bulk({ refresh: true, body: operations }) - } - } - - async update (ctx: MeasureContext, domain: Domain, operations: Map, DocumentUpdate>): Promise { - throw new Error('Method not implemented.') - } - - async clean (ctx: MeasureContext, domain: Domain, docs: Ref[]): Promise { - while (docs.length > 0) { - const part = docs.splice(0, 10000) - await this.client.deleteByQuery( - { - type: '_doc', - index: this.indexName, - body: { - query: { - bool: { - must: [ - { - terms: { - _id: part.map(this.getFulltextDocId), - boost: 1.0 - } - }, - { - match: { - workspaceId: { query: this.workspaceString, operator: 'and' } - } - } - ] - } - }, - size: part.length - } - }, - undefined - ) - } - } -} - -/** - * @public - */ -export async function createElasticBackupDataAdapter ( - ctx: MeasureContext, - hierarchy: Hierarchy, - url: string, - workspaceId: WorkspaceId -): Promise { - const client = new Client({ - node: url - }) - const indexBaseName = getIndexName() - const indexVersion = getIndexVersion() - return new ElasticDataAdapter(workspaceId, client, indexBaseName, indexVersion) -} diff --git a/server/elastic/src/index.ts b/server/elastic/src/index.ts index 185e4869f68..7da598e6324 100644 --- a/server/elastic/src/index.ts +++ b/server/elastic/src/index.ts @@ -15,4 +15,3 @@ // export { createElasticAdapter } from './adapter' -export { createElasticBackupDataAdapter } from './backup' diff --git a/server/front/src/index.ts b/server/front/src/index.ts index 27fddd533cd..4327b550422 100644 --- a/server/front/src/index.ts +++ b/server/front/src/index.ts @@ -17,7 +17,7 @@ import { Analytics } from '@hcengineering/analytics' import { MeasureContext, Blob as PlatformBlob, WorkspaceId, metricsAggregate, type Ref } from '@hcengineering/core' import { Token, decodeToken } from '@hcengineering/server-token' -import { StorageAdapter, removeAllObjects } from '@hcengineering/storage' +import { StorageAdapter } from '@hcengineering/storage' import bp from 'body-parser' import cors from 'cors' import express, { Request, Response } from 'express' @@ -498,43 +498,6 @@ export function start ( ) }) - const handleDelete = async (req: Request, res: Response): Promise => { - try { - const authHeader = req.headers.authorization - if (authHeader === undefined) { - res.status(403).send() - return - } - - const token = authHeader.split(' ')[1] - const payload = decodeToken(token) - const uuid = req.query.file as string - if (uuid === '') { - res.status(500).send() - return - } - - // TODO: We need to allow delete only of user attached documents. (https://front.hc.engineering/workbench/platform/tracker/TSK-1081) - await config.storageAdapter.remove(ctx, payload.workspace, [uuid]) - - // TODO: Add support for related documents. - // TODO: Move support of image resize/format change to separate place. - await removeAllObjects(ctx, config.storageAdapter, payload.workspace, uuid) - - res.status(200).send() - } catch (error: any) { - Analytics.handleError(error) - ctx.error('failed to delete', { url: req.url }) - res.status(500).send() - } - } - - // eslint-disable-next-line @typescript-eslint/no-misused-promises - app.delete('/files', handleDelete) - - // eslint-disable-next-line @typescript-eslint/no-misused-promises - app.delete('/files/*', handleDelete) - // todo remove it after update all customers chrome extensions app.get('/import', (req, res) => { try { diff --git a/server/server-pipeline/src/pipeline.ts b/server/server-pipeline/src/pipeline.ts index 654214f722b..a11467f3921 100644 --- a/server/server-pipeline/src/pipeline.ts +++ b/server/server-pipeline/src/pipeline.ts @@ -2,14 +2,13 @@ import { DOMAIN_BENCHMARK, DOMAIN_BLOB, - DOMAIN_FULLTEXT_BLOB, DOMAIN_MODEL, DOMAIN_TRANSIENT, DOMAIN_TX, type BrandingMap, type MeasureContext } from '@hcengineering/core' -import { createElasticAdapter, createElasticBackupDataAdapter } from '@hcengineering/elastic' +import { createElasticAdapter } from '@hcengineering/elastic' import { BlobLookupMiddleware, ConfigurationMiddleware, @@ -79,7 +78,6 @@ export function createServerPipeline ( [DOMAIN_TX]: 'MongoTx', [DOMAIN_TRANSIENT]: 'InMemory', [DOMAIN_BLOB]: 'StorageData', - [DOMAIN_FULLTEXT_BLOB]: 'FullTextBlob', [DOMAIN_MODEL]: 'Null', [DOMAIN_BENCHMARK]: 'Benchmark', ...extensions?.domains @@ -107,10 +105,6 @@ export function createServerPipeline ( factory: createStorageDataAdapter, url: dbUrl }, - FullTextBlob: { - factory: createElasticBackupDataAdapter, - url: opt.fullTextUrl - }, Benchmark: { factory: createBenchmarkAdapter, url: ''