diff --git a/__tests__/utils/config.spec.ts b/__tests__/utils/config.spec.ts index 577dab8e..cacff73e 100644 --- a/__tests__/utils/config.spec.ts +++ b/__tests__/utils/config.spec.ts @@ -7,6 +7,16 @@ describe('Max chunk size tests', () => { expect(getMaxChunksSize()).toBe(15 * 1024 * 1024) }) + test('Returning valid config for chunking v2 minimum size', () => { + Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 4 * 1024 * 1024 }}}}) + expect(getMaxChunksSize()).toBe(5 * 1024 * 1024) + }) + + test('Returning valid config for chunking v2 maximum chunk count', () => { + Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 5 * 1024 * 1024 }}}}) + expect(getMaxChunksSize(50 * 1024 * 1024 * 10000)).toBe(5 * 1024 * 1024 * 10) + }) + test('Returning disabled chunking config', () => { Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 0 }}}}) expect(getMaxChunksSize()).toBe(0) diff --git a/__tests__/utils/upload.spec.ts b/__tests__/utils/upload.spec.ts index 69c8063d..bf18eb6b 100644 --- a/__tests__/utils/upload.spec.ts +++ b/__tests__/utils/upload.spec.ts @@ -67,6 +67,33 @@ describe('Initialize chunks upload temporary workspace', () => { url, }) }) + + test('Init random workspace for file destination', async () => { + axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress?.())) + + // mock the current location for our assert on the URL + Object.defineProperty(window, 'location', { + value: new URL('https://cloud.domain.com'), + configurable: true, + }) + + // mock the current user + document.head.setAttribute('data-user', 'test') + + const url = await initChunkWorkspace('https://cloud.domain.com/remote.php/dav/files/test/image.jpg') + + expect(url.startsWith('https://cloud.domain.com/remote.php/dav/uploads/test/web-file-upload-')).toBe(true) + expect(url.length).toEqual('https://cloud.domain.com/remote.php/dav/uploads/test/web-file-upload-123456789abcdefg'.length) + + expect(axiosMock.request).toHaveBeenCalledTimes(1) + expect(axiosMock.request).toHaveBeenCalledWith({ + method: 'MKCOL', + url, + headers: { + Destination: 'https://cloud.domain.com/remote.php/dav/files/test/image.jpg', + }, + }) + }) }) describe('Upload data', () => { @@ -112,6 +139,29 @@ describe('Upload data', () => { }) }) + test('Upload data stream with destination', async () => { + axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress())) + + const url = 'https://cloud.domain.com/remote.php/dav/files/test/image.jpg' + const blob = new Blob([new ArrayBuffer(50 * 1024 * 1024)]) + const signal = new AbortController().signal + const onUploadProgress = vi.fn() + await uploadData(url, blob, signal, onUploadProgress, url) + + expect(onUploadProgress).toHaveBeenCalledTimes(1) + expect(axiosMock.request).toHaveBeenCalledTimes(1) + expect(axiosMock.request).toHaveBeenCalledWith({ + method: 'PUT', + url, + data: blob, + signal, + onUploadProgress, + headers: { + Destination: url, + }, + }) + }) + test('Upload cancellation', async () => { axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress())) diff --git a/lib/upload.ts b/lib/upload.ts index 8efc3bdd..cbfa7771 100644 --- a/lib/upload.ts +++ b/lib/upload.ts @@ -25,7 +25,7 @@ export class Upload { private _response: AxiosResponse|null = null constructor(source: string, chunked = false, size: number, file: File) { - const chunks = getMaxChunksSize() > 0 ? Math.ceil(size / getMaxChunksSize()) : 1 + const chunks = Math.min(getMaxChunksSize() > 0 ? Math.ceil(size / getMaxChunksSize()) : 1, 10000) this._source = source this._isChunked = chunked && getMaxChunksSize() > 0 && chunks > 1 this._chunks = this._isChunked ? chunks : 1 diff --git a/lib/uploader.ts b/lib/uploader.ts index 8f5f2cf8..9cfe662b 100644 --- a/lib/uploader.ts +++ b/lib/uploader.ts @@ -170,7 +170,7 @@ export class Uploader { // If manually disabled or if the file is too small // TODO: support chunk uploading in public pages - const maxChunkSize = getMaxChunksSize() + const maxChunkSize = getMaxChunksSize(file.size) const disabledChunkUpload = maxChunkSize === 0 || file.size < maxChunkSize || this._isPublic @@ -188,7 +188,7 @@ export class Uploader { logger.debug('Initializing chunked upload', { file, upload }) // Let's initialize a chunk upload - const tempUrl = await initChunkWorkspace() + const tempUrl = await initChunkWorkspace(destinationFile) const chunksQueue: Array> = [] // Generate chunks array @@ -201,12 +201,12 @@ export class Uploader { // Init request queue const request = () => { - return uploadData(`${tempUrl}/${bufferEnd}`, blob, upload.signal, () => this.updateStats()) + return uploadData(`${tempUrl}/${chunk+1}`, blob, upload.signal, () => this.updateStats(), destinationFile) // Update upload progress on chunk completion .then(() => { upload.uploaded = upload.uploaded + maxChunkSize }) .catch((error) => { if (!(error instanceof CanceledError)) { - logger.error(`Chunk ${bufferStart} - ${bufferEnd} uploading failed`) + logger.error(`Chunk ${chunk+1} ${bufferStart} - ${bufferEnd} uploading failed`) upload.status = UploadStatus.FAILED } throw error diff --git a/lib/utils/config.ts b/lib/utils/config.ts index 0c4777e2..c0d5bbbb 100644 --- a/lib/utils/config.ts +++ b/lib/utils/config.ts @@ -1,4 +1,4 @@ -export const getMaxChunksSize = function(): number { +export const getMaxChunksSize = function(fileSize: number | undefined = undefined): number { const maxChunkSize = window.OC?.appConfig?.files?.max_chunk_size if (maxChunkSize <= 0) { return 0 @@ -9,5 +9,13 @@ export const getMaxChunksSize = function(): number { return 10 * 1024 * 1024 } - return Number(maxChunkSize) + // v2 of chunked upload requires chunks to be 5 MB at minimum + const minimumChunkSize = Math.max(Number(maxChunkSize), 5 * 1024 * 1024) + + if (fileSize === undefined) { + return minimumChunkSize + } + + // Adapt chunk size to fit the file in 10000 chunks for chunked upload v2 + return Math.max(minimumChunkSize, Math.ceil(fileSize / 10000)) } diff --git a/lib/utils/upload.ts b/lib/utils/upload.ts index 664bd47f..5716af26 100644 --- a/lib/utils/upload.ts +++ b/lib/utils/upload.ts @@ -12,7 +12,7 @@ type UploadData = Blob | (() => Promise) /** * Upload some data to a given path */ -export const uploadData = async function(url: string, uploadData: UploadData, signal: AbortSignal, onUploadProgress = () => {}): Promise { +export const uploadData = async function(url: string, uploadData: UploadData, signal: AbortSignal, onUploadProgress = () => {}, destinationFile: string | undefined = undefined): Promise { let data: Blob if (uploadData instanceof Blob) { @@ -21,12 +21,15 @@ export const uploadData = async function(url: string, uploadData: UploadData, si data = await uploadData() } + const headers = destinationFile ? { Destination: destinationFile } : undefined + return await axios.request({ method: 'PUT', url, data, signal, onUploadProgress, + headers, }) } @@ -57,15 +60,17 @@ export const getChunk = function(file: File, start: number, length: number): Pro /** * Create a temporary upload workspace to upload the chunks to */ -export const initChunkWorkspace = async function(): Promise { +export const initChunkWorkspace = async function(destinationFile: string | undefined = undefined): Promise { const chunksWorkspace = generateRemoteUrl(`dav/uploads/${getCurrentUser()?.uid}`) const hash = [...Array(16)].map(() => Math.floor(Math.random() * 16).toString(16)).join('') const tempWorkspace = `web-file-upload-${hash}` const url = `${chunksWorkspace}/${tempWorkspace}` + const headers = destinationFile ? { Destination: destinationFile } : undefined await axios.request({ method: 'MKCOL', url, + headers, }) return url