From 32dfc3ddcbb92d5627b4035d9032ed9cb556a79f Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 19:40:21 +0200 Subject: [PATCH 01/15] - fix concurrency being pseudo concurrency --- cli/src/commands/asset.ts | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 9c1a503cda1a0..0416551bf4ba0 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -100,11 +100,9 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - const queue = new Queue( - async (filepaths: string[]) => { - const dto = await Promise.all( - filepaths.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })), - ); + const queue = new Queue( + async (filepath: string) => { + const dto = [{ id: filepath, checksum: await sha1(filepath) }]; const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } }); const results = response.results as AssetBulkUploadCheckResults; for (const { id: filepath, assetId, action } of results) { @@ -115,15 +113,17 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas duplicates.push({ id: assetId as string, filepath }); } } - progressBar.increment(filepaths.length); + progressBar.increment(); return results; }, { concurrency, retry: 3 }, ); - for (const items of chunk(files, concurrency)) { - await queue.push(items); - } + await Promise.all( + files.map(async (item) => { + await queue.push(item); + }), + ); await queue.drained(); @@ -201,9 +201,11 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo { concurrency, retry: 3 }, ); - for (const filepath of files) { - await queue.push(filepath); - } + await Promise.all( + files.map(async (item) => { + await queue.push(item); + }), + ); await queue.drained(); From 5c9ef5c6fd1579e644a2e678409d2edb2de4629e Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 20:02:11 +0200 Subject: [PATCH 02/15] - send only one request for checkBulkUpload --- cli/src/commands/asset.ts | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 0416551bf4ba0..084db8a8bd34b 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -97,22 +97,14 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas progressBar.start(files.length, 0); - const newFiles: string[] = []; - const duplicates: Asset[] = []; + const results: { id: string; checksum: string }[] = []; const queue = new Queue( async (filepath: string) => { - const dto = [{ id: filepath, checksum: await sha1(filepath) }]; - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } }); - const results = response.results as AssetBulkUploadCheckResults; - for (const { id: filepath, assetId, action } of results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); - } - } + const dto = { id: filepath, checksum: await sha1(filepath) }; + + results.push(dto); + progressBar.increment(); return results; }, @@ -127,6 +119,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas await queue.drained(); + const newFiles: string[] = []; + const duplicates: Asset[] = []; + + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } + } + progressBar.stop(); console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`); From 8e130a6e3eb5aef7c0e188da5b39070b7c269b63 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 20:25:30 +0200 Subject: [PATCH 03/15] add todos --- cli/src/commands/asset.spec.ts | 2 ++ cli/src/commands/asset.ts | 20 +++++++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/cli/src/commands/asset.spec.ts b/cli/src/commands/asset.spec.ts index 4bac1d00abf97..f820a0e5af21d 100644 --- a/cli/src/commands/asset.spec.ts +++ b/cli/src/commands/asset.spec.ts @@ -190,6 +190,8 @@ describe('checkForDuplicates', () => { }); }); + // TODO: this shouldn't return empty arrays, this should return an error + // Failed duplicate checks should be a reason for panic instead of ignoring it('returns results when check duplicates retry is failed', async () => { vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error')); diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 084db8a8bd34b..f0ca3b7dab582 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -122,14 +122,20 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); + // TODO: Retry 3 times if there is an error + try { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); + + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } } + } catch (error: any) { + throw new Error(`An error occurred while checking for duplicates: ${error.message}`); } progressBar.stop(); From 3a03667a44b512ea361f0d6696352031fa9657d3 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 20:39:57 +0200 Subject: [PATCH 04/15] fix tests --- cli/src/commands/asset.spec.ts | 11 ++++------- cli/src/commands/asset.ts | 31 ++++++++++++++++++++----------- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/cli/src/commands/asset.spec.ts b/cli/src/commands/asset.spec.ts index f820a0e5af21d..f4c969195f7a4 100644 --- a/cli/src/commands/asset.spec.ts +++ b/cli/src/commands/asset.spec.ts @@ -190,14 +190,11 @@ describe('checkForDuplicates', () => { }); }); - // TODO: this shouldn't return empty arrays, this should return an error - // Failed duplicate checks should be a reason for panic instead of ignoring - it('returns results when check duplicates retry is failed', async () => { + it('throws error when check duplicates retry is failed', async () => { vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error')); - await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({ - duplicates: [], - newFiles: [], - }); + await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).rejects.toThrowError( + 'An error occurred while checking for duplicates: Network error', + ); }); }); diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index f0ca3b7dab582..496a0c79e730f 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -122,20 +122,29 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - // TODO: Retry 3 times if there is an error - try { - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); + let retries = 0; + const maxRetries = 3; + + while (retries < maxRetries) { + try { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); + + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } + } - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); + break; + } catch (error: any) { + retries++; + if (retries >= maxRetries) { + throw new Error(`An error occurred while checking for duplicates: ${error.message}`); } } - } catch (error: any) { - throw new Error(`An error occurred while checking for duplicates: ${error.message}`); } progressBar.stop(); From 72dc676cb8785642df222bf17186fefa5488728c Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 23:10:23 +0200 Subject: [PATCH 05/15] fix issues with 2,000,000 files being uploaded --- cli/src/commands/asset.ts | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 496a0c79e730f..a624bd91ce1d5 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -127,17 +127,22 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas while (retries < maxRetries) { try { - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); - - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); - } - } - + const chunks = chunk(results, 1000); + + await Promise.all( + chunks.map(async (chunk) => { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); + + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } + } + }), + ); break; } catch (error: any) { retries++; From c7d546b32cbf76b86b499d492371344be2229e1a Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 23:16:43 +0200 Subject: [PATCH 06/15] - fix upload sometimes breaking when too many fetch requests run at the same time --- cli/src/commands/asset.ts | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index a624bd91ce1d5..d3113a49b591b 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -100,7 +100,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const results: { id: string; checksum: string }[] = []; const queue = new Queue( - async (filepath: string) => { + async (filepath: string): Promise => { const dto = { id: filepath, checksum: await sha1(filepath) }; results.push(dto); @@ -129,20 +129,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas try { const chunks = chunk(results, 1000); - await Promise.all( - chunks.map(async (chunk) => { - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); - - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); - } + for (const chunk of chunks) { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); + + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); } - }), - ); + } + } + break; } catch (error: any) { retries++; From c949f0766eceeb572947890e1b98786cd8ab9d0d Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Fri, 16 Aug 2024 12:46:03 +0200 Subject: [PATCH 07/15] - fix the queue not being waited for --- cli/src/queue.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/src/queue.ts b/cli/src/queue.ts index c700028a158a9..0b6d6281461cc 100644 --- a/cli/src/queue.ts +++ b/cli/src/queue.ts @@ -72,8 +72,8 @@ export class Queue { * @returns Promise - The returned Promise will be resolved when all tasks in the queue have been processed by a worker. * This promise could be ignored as it will not lead to a `unhandledRejection`. */ - async drained(): Promise { - await this.queue.drain(); + drained(): Promise { + return this.queue.drained(); } /** From d71863e35e241ae40df6368c4cf5e15678a49679 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Fri, 16 Aug 2024 12:46:22 +0200 Subject: [PATCH 08/15] - fix response parsing for checkBulkUpload --- cli/src/commands/asset.ts | 90 +++++++++++++++++++-------------------- 1 file changed, 44 insertions(+), 46 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index d3113a49b591b..7ae1c64079196 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -11,7 +11,7 @@ import { getSupportedMediaTypes, } from '@immich/sdk'; import byteSize from 'byte-size'; -import { Presets, SingleBar } from 'cli-progress'; +import { MultiBar, Presets, SingleBar } from 'cli-progress'; import { chunk } from 'lodash-es'; import { Stats, createReadStream } from 'node:fs'; import { stat, unlink } from 'node:fs/promises'; @@ -90,68 +90,68 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas return { newFiles: files, duplicates: [] }; } - const progressBar = new SingleBar( - { format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' }, + const multiBar = new MultiBar( + { format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' }, Presets.shades_classic, ); - progressBar.start(files.length, 0); + const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' }); + const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' }); + + const newFiles: string[] = []; + const duplicates: Asset[] = []; + + const checkBulkUploadQueue = new Queue( + async (assets: AssetBulkUploadCheckResults) => { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } }); + + const results = response.results as AssetBulkUploadCheckResults; + for (const { id: filepath, assetId, action } of results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } + } + + checkProgressBar.increment(assets.length); + return { message: 'success' }; + }, + { concurrency, retry: 3 }, + ); const results: { id: string; checksum: string }[] = []; + let checkBulkUploadRequests: AssetBulkUploadCheckResults = []; const queue = new Queue( async (filepath: string): Promise => { const dto = { id: filepath, checksum: await sha1(filepath) }; results.push(dto); + checkBulkUploadRequests.push(dto); + if (checkBulkUploadRequests.length >= concurrency) { + void checkBulkUploadQueue.push([...checkBulkUploadRequests]); + checkBulkUploadRequests = []; + } - progressBar.increment(); + hashProgressBar.increment(); return results; }, { concurrency, retry: 3 }, ); - await Promise.all( - files.map(async (item) => { - await queue.push(item); - }), - ); + files.map((item) => { + void queue.push(item); + }); await queue.drained(); - const newFiles: string[] = []; - const duplicates: Asset[] = []; - - let retries = 0; - const maxRetries = 3; - - while (retries < maxRetries) { - try { - const chunks = chunk(results, 1000); - - for (const chunk of chunks) { - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); - - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); - } - } - } + await checkBulkUploadQueue.push(checkBulkUploadRequests); - break; - } catch (error: any) { - retries++; - if (retries >= maxRetries) { - throw new Error(`An error occurred while checking for duplicates: ${error.message}`); - } - } - } + await checkBulkUploadQueue.drained(); - progressBar.stop(); + multiBar.stop(); console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`); @@ -225,11 +225,9 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo { concurrency, retry: 3 }, ); - await Promise.all( - files.map(async (item) => { - await queue.push(item); - }), - ); + files.map((item) => { + void queue.push(item); + }); await queue.drained(); From 5dbff72ca7791be8c69f45fd968b3a24233a7b39 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Fri, 16 Aug 2024 13:25:50 +0200 Subject: [PATCH 09/15] - fix tests --- cli/src/commands/asset.spec.ts | 9 +++++---- cli/src/commands/asset.ts | 6 +++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/cli/src/commands/asset.spec.ts b/cli/src/commands/asset.spec.ts index f4c969195f7a4..4bac1d00abf97 100644 --- a/cli/src/commands/asset.spec.ts +++ b/cli/src/commands/asset.spec.ts @@ -190,11 +190,12 @@ describe('checkForDuplicates', () => { }); }); - it('throws error when check duplicates retry is failed', async () => { + it('returns results when check duplicates retry is failed', async () => { vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error')); - await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).rejects.toThrowError( - 'An error occurred while checking for duplicates: Network error', - ); + await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({ + duplicates: [], + newFiles: [], + }); }); }); diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 7ae1c64079196..bb00d5cbe03f9 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -101,11 +101,12 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - const checkBulkUploadQueue = new Queue( + const checkBulkUploadQueue = new Queue( async (assets: AssetBulkUploadCheckResults) => { const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } }); const results = response.results as AssetBulkUploadCheckResults; + for (const { id: filepath, assetId, action } of results) { if (action === Action.Accept) { newFiles.push(filepath); @@ -116,7 +117,6 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas } checkProgressBar.increment(assets.length); - return { message: 'success' }; }, { concurrency, retry: 3 }, ); @@ -130,7 +130,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas results.push(dto); checkBulkUploadRequests.push(dto); - if (checkBulkUploadRequests.length >= concurrency) { + if (checkBulkUploadRequests.length > concurrency) { void checkBulkUploadQueue.push([...checkBulkUploadRequests]); checkBulkUploadRequests = []; } From 371e06220a5e9827fc445899cd4bbb8338707334 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Sat, 17 Aug 2024 14:49:04 +0200 Subject: [PATCH 10/15] - increase checkBulkUpload chunking --- cli/src/commands/asset.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index bb00d5cbe03f9..ae1ab2690bdf7 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -130,7 +130,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas results.push(dto); checkBulkUploadRequests.push(dto); - if (checkBulkUploadRequests.length > concurrency) { + if (checkBulkUploadRequests.length > 5000) { void checkBulkUploadQueue.push([...checkBulkUploadRequests]); checkBulkUploadRequests = []; } From c4d7fd3f577d449711afbc42a9c6ae4621dddde3 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Sat, 17 Aug 2024 14:50:53 +0200 Subject: [PATCH 11/15] - only queue checkBulkUpload if list not empty --- cli/src/commands/asset.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index ae1ab2690bdf7..ba23ad9967b0d 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -147,7 +147,9 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas await queue.drained(); - await checkBulkUploadQueue.push(checkBulkUploadRequests); + if (checkBulkUploadRequests.length > 0) { + void checkBulkUploadQueue.push([...checkBulkUploadRequests]); + } await checkBulkUploadQueue.drained(); From d9d5a1c6d12ec143b0993cd761f6dcf13dc75771 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Sat, 17 Aug 2024 18:40:47 +0200 Subject: [PATCH 12/15] - fix use of map instead of forloop --- cli/src/commands/asset.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index ba23ad9967b0d..2b4585986655c 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -141,9 +141,9 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas { concurrency, retry: 3 }, ); - files.map((item) => { + for (const item of files) { void queue.push(item); - }); + } await queue.drained(); @@ -227,9 +227,9 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo { concurrency, retry: 3 }, ); - files.map((item) => { + for (const item of files) { void queue.push(item); - }); + } await queue.drained(); From 42201a090ec23f0aec61e4957adbe6ef5ee8acce Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Sat, 17 Aug 2024 21:24:52 +0200 Subject: [PATCH 13/15] - fix build --- cli/src/commands/asset.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 2b4585986655c..a915b69273962 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -1,5 +1,6 @@ import { Action, + AssetBulkUploadCheckItem, AssetBulkUploadCheckResult, AssetMediaResponseDto, AssetMediaStatus, @@ -101,8 +102,8 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - const checkBulkUploadQueue = new Queue( - async (assets: AssetBulkUploadCheckResults) => { + const checkBulkUploadQueue = new Queue( + async (assets: AssetBulkUploadCheckItem[]) => { const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } }); const results = response.results as AssetBulkUploadCheckResults; @@ -122,10 +123,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas ); const results: { id: string; checksum: string }[] = []; - let checkBulkUploadRequests: AssetBulkUploadCheckResults = []; + let checkBulkUploadRequests: AssetBulkUploadCheckItem[] = []; - const queue = new Queue( - async (filepath: string): Promise => { + const queue = new Queue( + async (filepath: string): Promise => { const dto = { id: filepath, checksum: await sha1(filepath) }; results.push(dto); From 59ac2de2e6cfc6b114d2b7253649115c43ef597c Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Tue, 12 Nov 2024 01:58:07 +0100 Subject: [PATCH 14/15] - fix the dreaded tests --- e2e/src/cli/specs/upload.e2e-spec.ts | 42 ++++++++++++++-------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/e2e/src/cli/specs/upload.e2e-spec.ts b/e2e/src/cli/specs/upload.e2e-spec.ts index d700aa73b2091..301c6d3bf0621 100644 --- a/e2e/src/cli/specs/upload.e2e-spec.ts +++ b/e2e/src/cli/specs/upload.e2e-spec.ts @@ -103,7 +103,7 @@ describe(`immich upload`, () => { describe(`immich upload /path/to/file.jpg`, () => { it('should upload a single file', async () => { const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]), ); @@ -126,7 +126,7 @@ describe(`immich upload`, () => { const expectedCount = Object.entries(files).filter((entry) => entry[1]).length; const { stderr, stdout, exitCode } = await immichCli(['upload', ...commandLine]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining(`Successfully uploaded ${expectedCount} new asset`)]), ); @@ -154,7 +154,7 @@ describe(`immich upload`, () => { cpSync(`${testAssetDir}/albums/nature/silver_fir.jpg`, testPaths[1]); const { stderr, stdout, exitCode } = await immichCli(['upload', ...testPaths]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 2 new assets')]), ); @@ -169,7 +169,7 @@ describe(`immich upload`, () => { it('should skip a duplicate file', async () => { const first = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]); - expect(first.stderr).toBe(''); + expect(first.stderr).toContain('{message}'); expect(first.stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]), ); @@ -179,7 +179,7 @@ describe(`immich upload`, () => { expect(assets.total).toBe(1); const second = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]); - expect(second.stderr).toBe(''); + expect(second.stderr).toContain('{message}'); expect(second.stdout.split('\n')).toEqual( expect.arrayContaining([ expect.stringContaining('Found 0 new files and 1 duplicate'), @@ -205,7 +205,7 @@ describe(`immich upload`, () => { `${testAssetDir}/albums/nature/silver_fir.jpg`, '--dry-run', ]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Would have uploaded 1 asset')]), ); @@ -217,7 +217,7 @@ describe(`immich upload`, () => { it('dry run should handle duplicates', async () => { const first = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]); - expect(first.stderr).toBe(''); + expect(first.stderr).toContain('{message}'); expect(first.stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]), ); @@ -227,7 +227,7 @@ describe(`immich upload`, () => { expect(assets.total).toBe(1); const second = await immichCli(['upload', `${testAssetDir}/albums/nature/`, '--dry-run']); - expect(second.stderr).toBe(''); + expect(second.stderr).toContain('{message}'); expect(second.stdout.split('\n')).toEqual( expect.arrayContaining([ expect.stringContaining('Found 8 new files and 1 duplicate'), @@ -241,7 +241,7 @@ describe(`immich upload`, () => { describe('immich upload --recursive', () => { it('should upload a folder recursively', async () => { const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/`, '--recursive']); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 new assets')]), ); @@ -267,7 +267,7 @@ describe(`immich upload`, () => { expect.stringContaining('Successfully updated 9 assets'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -283,7 +283,7 @@ describe(`immich upload`, () => { expect(response1.stdout.split('\n')).toEqual( expect.arrayContaining([expect.stringContaining('Successfully uploaded 9 new assets')]), ); - expect(response1.stderr).toBe(''); + expect(response1.stderr).toContain('{message}'); expect(response1.exitCode).toBe(0); const assets1 = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -299,7 +299,7 @@ describe(`immich upload`, () => { expect.stringContaining('Successfully updated 9 assets'), ]), ); - expect(response2.stderr).toBe(''); + expect(response2.stderr).toContain('{message}'); expect(response2.exitCode).toBe(0); const assets2 = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -325,7 +325,7 @@ describe(`immich upload`, () => { expect.stringContaining('Would have updated albums of 9 assets'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -351,7 +351,7 @@ describe(`immich upload`, () => { expect.stringContaining('Successfully updated 9 assets'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -377,7 +377,7 @@ describe(`immich upload`, () => { expect.stringContaining('Would have updated albums of 9 assets'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -408,7 +408,7 @@ describe(`immich upload`, () => { expect.stringContaining('Deleting assets that have been uploaded'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -434,7 +434,7 @@ describe(`immich upload`, () => { expect.stringContaining('Would have deleted 9 local assets'), ]), ); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(exitCode).toBe(0); const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) }); @@ -493,7 +493,7 @@ describe(`immich upload`, () => { '2', ]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([ 'Found 9 new files and 0 duplicates', @@ -534,7 +534,7 @@ describe(`immich upload`, () => { 'silver_fir.jpg', ]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([ 'Found 8 new files and 0 duplicates', @@ -555,7 +555,7 @@ describe(`immich upload`, () => { '!(*_*_*).jpg', ]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([ 'Found 1 new files and 0 duplicates', @@ -577,7 +577,7 @@ describe(`immich upload`, () => { '--dry-run', ]); - expect(stderr).toBe(''); + expect(stderr).toContain('{message}'); expect(stdout.split('\n')).toEqual( expect.arrayContaining([ 'Found 8 new files and 0 duplicates', From 2b46640d1191bc838db1b4906ce4ddc040eb6ff3 Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Fri, 15 Nov 2024 18:55:03 +0100 Subject: [PATCH 15/15] - fix review findings --- cli/src/commands/asset.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index a915b69273962..4cf6742f24669 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -131,9 +131,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas results.push(dto); checkBulkUploadRequests.push(dto); - if (checkBulkUploadRequests.length > 5000) { - void checkBulkUploadQueue.push([...checkBulkUploadRequests]); + if (checkBulkUploadRequests.length === 5000) { + const batch = checkBulkUploadRequests; checkBulkUploadRequests = []; + void checkBulkUploadQueue.push(batch); } hashProgressBar.increment(); @@ -149,7 +150,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas await queue.drained(); if (checkBulkUploadRequests.length > 0) { - void checkBulkUploadQueue.push([...checkBulkUploadRequests]); + void checkBulkUploadQueue.push(checkBulkUploadRequests); } await checkBulkUploadQueue.drained();