Skip to content

Commit

Permalink
chore(storage): explicitly validate for the upload data payload size
Browse files Browse the repository at this point in the history
  • Loading branch information
AllanZhengYP committed Nov 1, 2024
1 parent 07883e4 commit 5a9a212
Show file tree
Hide file tree
Showing 7 changed files with 125 additions and 75 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,17 @@ describe('uploadData with key', () => {
);
});

it('should NOT throw if data size is unknown', async () => {
uploadData({
key: 'key',
data: {} as any,
});
expect(mockCreateUploadTask).toHaveBeenCalled();
it('should throw if data size is unknown', async () => {
expect(() =>
uploadData({
key: 'key',
data: {} as any,
}),
).toThrow(
expect.objectContaining(
validationErrorMap[StorageValidationErrorCode.InvalidUploadSource],
),
);
});
});

Expand Down Expand Up @@ -166,12 +171,17 @@ describe('uploadData with path', () => {
);
});

it('should NOT throw if data size is unknown', async () => {
uploadData({
path: testPath,
data: {} as any,
});
expect(mockCreateUploadTask).toHaveBeenCalled();
it('should throw if data size is unknown', async () => {
expect(() =>
uploadData({
path: testPath,
data: {} as any,
}),
).toThrow(
expect.objectContaining(
validationErrorMap[StorageValidationErrorCode.InvalidUploadSource],
),
);
});
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ describe('getMultipartUploadHandlers with key', () => {
data: twoPartsPayload,
options: options as StorageOptions,
},
byteLength(twoPartsPayload),
byteLength(twoPartsPayload)!,
);
const result = await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -293,7 +293,7 @@ describe('getMultipartUploadHandlers with key', () => {
checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32,
},
},
byteLength(twoPartsPayload),
byteLength(twoPartsPayload)!,
);
await multipartUploadJob();

Expand Down Expand Up @@ -346,10 +346,13 @@ describe('getMultipartUploadHandlers with key', () => {

it('should throw if unsupported payload type is provided', async () => {
mockMultipartUploadSuccess();
const { multipartUploadJob } = getMultipartUploadHandlers({
key: defaultKey,
data: 1 as any,
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
key: defaultKey,
data: 1 as any,
},
1,
);
await expect(multipartUploadJob()).rejects.toThrow(
expect.objectContaining(
validationErrorMap[StorageValidationErrorCode.InvalidUploadSource],
Expand Down Expand Up @@ -427,10 +430,13 @@ describe('getMultipartUploadHandlers with key', () => {
mockCreateMultipartUpload.mockReset();
mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
key: defaultKey,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
key: defaultKey,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
});

Expand All @@ -440,10 +446,13 @@ describe('getMultipartUploadHandlers with key', () => {
mockCompleteMultipartUpload.mockReset();
mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
key: defaultKey,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
key: defaultKey,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
});

Expand All @@ -458,10 +467,13 @@ describe('getMultipartUploadHandlers with key', () => {
});
mockUploadPart.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
key: defaultKey,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
key: defaultKey,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
expect(mockUploadPart).toHaveBeenCalledTimes(2);
expect(mockCompleteMultipartUpload).not.toHaveBeenCalled();
Expand All @@ -481,7 +493,7 @@ describe('getMultipartUploadHandlers with key', () => {
bucket: { bucketName: mockBucket, region: mockRegion },
},
},
byteLength(mockData),
byteLength(mockData)!,
);
await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -510,7 +522,7 @@ describe('getMultipartUploadHandlers with key', () => {
bucket: 'default-bucket',
},
},
byteLength(mockData),
byteLength(mockData)!,
);
await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -800,10 +812,13 @@ describe('getMultipartUploadHandlers with key', () => {

describe('cancel()', () => {
it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => {
const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({
key: defaultKey,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob, onCancel } = getMultipartUploadHandlers(
{
key: defaultKey,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
let partCount = 0;
mockMultipartUploadCancellation(() => {
partCount++;
Expand Down Expand Up @@ -1007,7 +1022,7 @@ describe('getMultipartUploadHandlers with path', () => {
path: inputPath,
data: twoPartsPayload,
},
byteLength(twoPartsPayload),
byteLength(twoPartsPayload)!,
);
const result = await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -1056,7 +1071,7 @@ describe('getMultipartUploadHandlers with path', () => {
checksumAlgorithm: CHECKSUM_ALGORITHM_CRC32,
},
},
byteLength(twoPartsPayload),
byteLength(twoPartsPayload)!,
);
await multipartUploadJob();

Expand Down Expand Up @@ -1109,10 +1124,13 @@ describe('getMultipartUploadHandlers with path', () => {

it('should throw if unsupported payload type is provided', async () => {
mockMultipartUploadSuccess();
const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: 1 as any,
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
path: testPath,
data: 1 as any,
},
1,
);
await expect(multipartUploadJob()).rejects.toThrow(
expect.objectContaining(
validationErrorMap[StorageValidationErrorCode.InvalidUploadSource],
Expand Down Expand Up @@ -1190,10 +1208,13 @@ describe('getMultipartUploadHandlers with path', () => {
mockCreateMultipartUpload.mockReset();
mockCreateMultipartUpload.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
path: testPath,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
});

Expand All @@ -1203,10 +1224,13 @@ describe('getMultipartUploadHandlers with path', () => {
mockCompleteMultipartUpload.mockReset();
mockCompleteMultipartUpload.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
path: testPath,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
});

Expand All @@ -1221,10 +1245,13 @@ describe('getMultipartUploadHandlers with path', () => {
});
mockUploadPart.mockRejectedValueOnce(new Error('error'));

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob } = getMultipartUploadHandlers(
{
path: testPath,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
await expect(multipartUploadJob()).rejects.toThrow('error');
expect(mockUploadPart).toHaveBeenCalledTimes(2);
expect(mockCompleteMultipartUpload).not.toHaveBeenCalled();
Expand Down Expand Up @@ -1273,7 +1300,7 @@ describe('getMultipartUploadHandlers with path', () => {
bucket: { bucketName: mockBucket, region: mockRegion },
},
},
byteLength(mockData),
byteLength(mockData)!,
);
await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -1304,7 +1331,7 @@ describe('getMultipartUploadHandlers with path', () => {
bucket: 'default-bucket',
},
},
byteLength(mockData),
byteLength(mockData)!,
);
await multipartUploadJob();
await expect(
Expand Down Expand Up @@ -1596,10 +1623,13 @@ describe('getMultipartUploadHandlers with path', () => {

describe('cancel()', () => {
it('should abort in-flight uploadPart requests and throw if upload is canceled', async () => {
const { multipartUploadJob, onCancel } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
});
const { multipartUploadJob, onCancel } = getMultipartUploadHandlers(
{
path: testPath,
data: new ArrayBuffer(8 * MB),
},
8 * MB,
);
let partCount = 0;
mockMultipartUploadCancellation(() => {
partCount++;
Expand Down
Loading

0 comments on commit 5a9a212

Please sign in to comment.