Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Implement support for chunking v2 API #925

Merged
merged 1 commit into from
Sep 5, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions __tests__/utils/config.spec.ts
Original file line number Diff line number Diff line change
@@ -7,6 +7,16 @@ describe('Max chunk size tests', () => {
expect(getMaxChunksSize()).toBe(15 * 1024 * 1024)
})

test('Returning valid config for chunking v2 minimum size', () => {
Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 4 * 1024 * 1024 }}}})
expect(getMaxChunksSize()).toBe(5 * 1024 * 1024)
})

test('Returning valid config for chunking v2 maximum chunk count', () => {
Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 5 * 1024 * 1024 }}}})
expect(getMaxChunksSize(50 * 1024 * 1024 * 10000)).toBe(5 * 1024 * 1024 * 10)
})

test('Returning disabled chunking config', () => {
Object.assign(window, {OC: {appConfig: {files: { max_chunk_size: 0 }}}})
expect(getMaxChunksSize()).toBe(0)
50 changes: 50 additions & 0 deletions __tests__/utils/upload.spec.ts
Original file line number Diff line number Diff line change
@@ -67,6 +67,33 @@ describe('Initialize chunks upload temporary workspace', () => {
url,
})
})

test('Init random workspace for file destination', async () => {
axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress?.()))

// mock the current location for our assert on the URL
Object.defineProperty(window, 'location', {
value: new URL('https://cloud.domain.com'),
configurable: true,
})

// mock the current user
document.head.setAttribute('data-user', 'test')

const url = await initChunkWorkspace('https://cloud.domain.com/remote.php/dav/files/test/image.jpg')

expect(url.startsWith('https://cloud.domain.com/remote.php/dav/uploads/test/web-file-upload-')).toBe(true)
expect(url.length).toEqual('https://cloud.domain.com/remote.php/dav/uploads/test/web-file-upload-123456789abcdefg'.length)

expect(axiosMock.request).toHaveBeenCalledTimes(1)
expect(axiosMock.request).toHaveBeenCalledWith({
method: 'MKCOL',
url,
headers: {
Destination: 'https://cloud.domain.com/remote.php/dav/files/test/image.jpg',
},
})
})
})

describe('Upload data', () => {
@@ -112,6 +139,29 @@ describe('Upload data', () => {
})
})

test('Upload data stream with destination', async () => {
axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress()))

const url = 'https://cloud.domain.com/remote.php/dav/files/test/image.jpg'
const blob = new Blob([new ArrayBuffer(50 * 1024 * 1024)])
const signal = new AbortController().signal
const onUploadProgress = vi.fn()
await uploadData(url, blob, signal, onUploadProgress, url)

expect(onUploadProgress).toHaveBeenCalledTimes(1)
expect(axiosMock.request).toHaveBeenCalledTimes(1)
expect(axiosMock.request).toHaveBeenCalledWith({
method: 'PUT',
url,
data: blob,
signal,
onUploadProgress,
headers: {
Destination: url,
},
})
})

test('Upload cancellation', async () => {
axiosMock.request = vi.fn((config: any) => Promise.resolve(config?.onUploadProgress()))

2 changes: 1 addition & 1 deletion lib/upload.ts
Original file line number Diff line number Diff line change
@@ -25,7 +25,7 @@ export class Upload {
private _response: AxiosResponse|null = null

constructor(source: string, chunked = false, size: number, file: File) {
const chunks = getMaxChunksSize() > 0 ? Math.ceil(size / getMaxChunksSize()) : 1
const chunks = Math.min(getMaxChunksSize() > 0 ? Math.ceil(size / getMaxChunksSize()) : 1, 10000)
this._source = source
this._isChunked = chunked && getMaxChunksSize() > 0 && chunks > 1
this._chunks = this._isChunked ? chunks : 1
8 changes: 4 additions & 4 deletions lib/uploader.ts
Original file line number Diff line number Diff line change
@@ -170,7 +170,7 @@ export class Uploader {

// If manually disabled or if the file is too small
// TODO: support chunk uploading in public pages
const maxChunkSize = getMaxChunksSize()
const maxChunkSize = getMaxChunksSize(file.size)
const disabledChunkUpload = maxChunkSize === 0
|| file.size < maxChunkSize
|| this._isPublic
@@ -188,7 +188,7 @@ export class Uploader {
logger.debug('Initializing chunked upload', { file, upload })

// Let's initialize a chunk upload
const tempUrl = await initChunkWorkspace()
const tempUrl = await initChunkWorkspace(destinationFile)
const chunksQueue: Array<Promise<any>> = []

// Generate chunks array
@@ -201,12 +201,12 @@ export class Uploader {

// Init request queue
const request = () => {
return uploadData(`${tempUrl}/${bufferEnd}`, blob, upload.signal, () => this.updateStats())
return uploadData(`${tempUrl}/${chunk+1}`, blob, upload.signal, () => this.updateStats(), destinationFile)
// Update upload progress on chunk completion
.then(() => { upload.uploaded = upload.uploaded + maxChunkSize })
.catch((error) => {
if (!(error instanceof CanceledError)) {
logger.error(`Chunk ${bufferStart} - ${bufferEnd} uploading failed`)
logger.error(`Chunk ${chunk+1} ${bufferStart} - ${bufferEnd} uploading failed`)
upload.status = UploadStatus.FAILED
}
throw error
12 changes: 10 additions & 2 deletions lib/utils/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export const getMaxChunksSize = function(): number {
export const getMaxChunksSize = function(fileSize: number | undefined = undefined): number {
const maxChunkSize = window.OC?.appConfig?.files?.max_chunk_size
if (maxChunkSize <= 0) {
return 0
@@ -9,5 +9,13 @@ export const getMaxChunksSize = function(): number {
return 10 * 1024 * 1024
}

return Number(maxChunkSize)
// v2 of chunked upload requires chunks to be 5 MB at minimum
const minimumChunkSize = Math.max(Number(maxChunkSize), 5 * 1024 * 1024)

if (fileSize === undefined) {
return minimumChunkSize
}

// Adapt chunk size to fit the file in 10000 chunks for chunked upload v2
return Math.max(minimumChunkSize, Math.ceil(fileSize / 10000))
}
9 changes: 7 additions & 2 deletions lib/utils/upload.ts
Original file line number Diff line number Diff line change
@@ -12,7 +12,7 @@ type UploadData = Blob | (() => Promise<Blob>)
/**
* Upload some data to a given path
*/
export const uploadData = async function(url: string, uploadData: UploadData, signal: AbortSignal, onUploadProgress = () => {}): Promise<AxiosResponse> {
export const uploadData = async function(url: string, uploadData: UploadData, signal: AbortSignal, onUploadProgress = () => {}, destinationFile: string | undefined = undefined): Promise<AxiosResponse> {
let data: Blob

if (uploadData instanceof Blob) {
@@ -21,12 +21,15 @@ export const uploadData = async function(url: string, uploadData: UploadData, si
data = await uploadData()
}

const headers = destinationFile ? { Destination: destinationFile } : undefined

return await axios.request({
method: 'PUT',
url,
data,
signal,
onUploadProgress,
headers,
})
}

@@ -57,15 +60,17 @@ export const getChunk = function(file: File, start: number, length: number): Pro
/**
* Create a temporary upload workspace to upload the chunks to
*/
export const initChunkWorkspace = async function(): Promise<string> {
export const initChunkWorkspace = async function(destinationFile: string | undefined = undefined): Promise<string> {
const chunksWorkspace = generateRemoteUrl(`dav/uploads/${getCurrentUser()?.uid}`)
const hash = [...Array(16)].map(() => Math.floor(Math.random() * 16).toString(16)).join('')
const tempWorkspace = `web-file-upload-${hash}`
const url = `${chunksWorkspace}/${tempWorkspace}`
const headers = destinationFile ? { Destination: destinationFile } : undefined

await axios.request({
method: 'MKCOL',
url,
headers,
})

return url