Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: merge changes from storage-browser/integrity to storage-browser/main #13645

Merged
merged 4 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/aws-amplify/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,7 @@
"name": "[Storage] uploadData (S3)",
"path": "./dist/esm/storage/index.mjs",
"import": "{ uploadData }",
"limit": "20.18 kB"
"limit": "20.30 kB"
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -927,6 +927,84 @@ describe('getMultipartUploadHandlers with path', () => {
expect(mockCompleteMultipartUpload).not.toHaveBeenCalled();
});

describe('overwrite prevention', () => {
beforeEach(() => {
mockHeadObject.mockReset();
mockUploadPart.mockReset();
});

it('should upload if target key is not found', async () => {
expect.assertions(7);
const notFoundError = new Error('mock message');
notFoundError.name = 'NotFound';
mockHeadObject.mockRejectedValueOnce(notFoundError);
mockMultipartUploadSuccess();

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
options: { preventOverwrite: true },
});
await multipartUploadJob();

expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1);
expect(mockUploadPart).toHaveBeenCalledTimes(2);
expect(mockHeadObject).toHaveBeenCalledTimes(1);
await expect(mockHeadObject).toBeLastCalledWithConfigAndInput(
expect.objectContaining({
credentials,
region,
}),
expect.objectContaining({
Bucket: bucket,
Key: testPath,
}),
);
expect(mockCompleteMultipartUpload).toHaveBeenCalledTimes(1);
});

it('should not upload if target key already exists', async () => {
expect.assertions(6);
mockHeadObject.mockResolvedValueOnce({
ContentLength: 0,
$metadata: {},
});
mockMultipartUploadSuccess();

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
options: { preventOverwrite: true },
});

await expect(multipartUploadJob()).rejects.toThrow(
'At least one of the pre-conditions you specified did not hold',
);
expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1);
expect(mockUploadPart).toHaveBeenCalledTimes(2);
expect(mockCompleteMultipartUpload).not.toHaveBeenCalled();
});

it('should not upload if HeadObject fails with other error', async () => {
expect.assertions(6);
const accessDeniedError = new Error('mock error');
accessDeniedError.name = 'AccessDenied';
mockHeadObject.mockRejectedValueOnce(accessDeniedError);
mockMultipartUploadSuccess();

const { multipartUploadJob } = getMultipartUploadHandlers({
path: testPath,
data: new ArrayBuffer(8 * MB),
options: { preventOverwrite: true },
});

await expect(multipartUploadJob()).rejects.toThrow('mock error');
expect(mockCreateMultipartUpload).toHaveBeenCalledTimes(1);
expect(mockUploadPart).toHaveBeenCalledTimes(2);
expect(mockCompleteMultipartUpload).not.toHaveBeenCalled();
});
});

describe('bucket passed in options', () => {
const mockData = 'Ü'.repeat(4 * MB);
it('should override bucket in putObject call when bucket as object', async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
import { AWSCredentials } from '@aws-amplify/core/internals/utils';
import { Amplify } from '@aws-amplify/core';

import { putObject } from '../../../../../src/providers/s3/utils/client/s3data';
import {
headObject,
putObject,
} from '../../../../../src/providers/s3/utils/client/s3data';
import { calculateContentMd5 } from '../../../../../src/providers/s3/utils';
import { putObjectJob } from '../../../../../src/providers/s3/apis/uploadData/putObjectJob';
import '../testUtils';
Expand Down Expand Up @@ -38,6 +41,7 @@ const credentials: AWSCredentials = {
const identityId = 'identityId';
const mockFetchAuthSession = jest.mocked(Amplify.Auth.fetchAuthSession);
const mockPutObject = jest.mocked(putObject);
const mockHeadObject = jest.mocked(headObject);
const bucket = 'bucket';
const region = 'region';

Expand Down Expand Up @@ -307,6 +311,81 @@ describe('putObjectJob with path', () => {
expect(calculateContentMd5).toHaveBeenCalledWith('data');
});

describe('overwrite prevention', () => {
beforeEach(() => {
mockHeadObject.mockClear();
});

it('should upload if target key is not found', async () => {
expect.assertions(3);
const notFoundError = new Error('mock message');
notFoundError.name = 'NotFound';
mockHeadObject.mockRejectedValueOnce(notFoundError);

const job = putObjectJob(
{
path: testPath,
data: 'data',
options: { preventOverwrite: true },
},
new AbortController().signal,
);
await job();

await expect(mockHeadObject).toBeLastCalledWithConfigAndInput(
{
credentials,
region: 'region',
},
{
Bucket: 'bucket',
Key: testPath,
},
);
expect(mockHeadObject).toHaveBeenCalledTimes(1);
expect(mockPutObject).toHaveBeenCalledTimes(1);
});

it('should not upload if target key already exists', async () => {
expect.assertions(3);
mockHeadObject.mockResolvedValueOnce({
ContentLength: 0,
$metadata: {},
});
const job = putObjectJob(
{
path: testPath,
data: 'data',
options: { preventOverwrite: true },
},
new AbortController().signal,
);
await expect(job()).rejects.toThrow(
'At least one of the pre-conditions you specified did not hold',
);
expect(mockHeadObject).toHaveBeenCalledTimes(1);
expect(mockPutObject).not.toHaveBeenCalled();
});

it('should not upload if HeadObject fails with other error', async () => {
expect.assertions(3);
const accessDeniedError = new Error('mock error');
accessDeniedError.name = 'AccessDenied';
mockHeadObject.mockRejectedValueOnce(accessDeniedError);
const job = putObjectJob(
{
path: testPath,
data: 'data',
options: { preventOverwrite: true },
},
new AbortController().signal,
);
await expect(job()).rejects.toThrow('mock error');
expect(mockHeadObject).toHaveBeenCalledTimes(1);
expect(mockPutObject).not.toHaveBeenCalled();
});
});

describe('bucket passed in options', () => {
it('should override bucket in putObject call when bucket as object', async () => {
const abortController = new AbortController();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import {
} from '../../../utils/client/s3data';
import { getStorageUserAgentValue } from '../../../utils/userAgent';
import { logger } from '../../../../../utils';
import { validateObjectNotExists } from '../validateObjectNotExists';

import { uploadPartExecutor } from './uploadPartExecutor';
import { getUploadsCacheKey, removeCachedUpload } from './uploadCache';
Expand Down Expand Up @@ -92,6 +93,7 @@ export const getMultipartUploadHandlers = (
contentEncoding,
contentType = 'application/octet-stream',
metadata,
preventOverwrite,
onProgress,
} = uploadDataOptions ?? {};

Expand Down Expand Up @@ -175,6 +177,13 @@ export const getMultipartUploadHandlers = (

await Promise.all(concurrentUploadPartExecutors);

if (preventOverwrite) {
await validateObjectNotExists(resolvedS3Config, {
Bucket: resolvedBucket,
Key: finalKey,
});
}

const { ETag: eTag } = await completeMultipartUpload(
{
...resolvedS3Config,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import { putObject } from '../../utils/client/s3data';
import { getStorageUserAgentValue } from '../../utils/userAgent';
import { STORAGE_INPUT_KEY } from '../../utils/constants';

import { validateObjectNotExists } from './validateObjectNotExists';

/**
* Get a function the returns a promise to call putObject API to S3.
*
Expand All @@ -41,10 +43,18 @@ export const putObjectJob =
contentDisposition,
contentEncoding,
contentType = 'application/octet-stream',
preventOverwrite,
metadata,
onProgress,
} = uploadDataOptions ?? {};

if (preventOverwrite) {
await validateObjectNotExists(s3Config, {
Bucket: bucket,
Key: finalKey,
});
}

const { ETag: eTag, VersionId: versionId } = await putObject(
{
...s3Config,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

import { StorageError } from '../../../../errors/StorageError';
import { ResolvedS3Config } from '../../types/options';
import { HeadObjectInput, headObject } from '../../utils/client/s3data';

export const validateObjectNotExists = async (
s3Config: ResolvedS3Config,
input: HeadObjectInput,
): Promise<void> => {
try {
await headObject(s3Config, input);

throw new StorageError({
name: 'PreconditionFailed',
message: 'At least one of the pre-conditions you specified did not hold',
});
} catch (error) {
const serviceError = error as StorageError;
if (serviceError.name !== 'NotFound') {
throw error;
}
}
};
5 changes: 5 additions & 0 deletions packages/storage/src/providers/s3/types/options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,11 @@ export type UploadDataOptions = CommonOptions &
* @see https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html#UserMetadata
*/
metadata?: Record<string, string>;
/**
* Enforces target key does not already exist in S3 before committing upload.
* @default false
*/
preventOverwrite?: boolean;
};

/** @deprecated Use {@link UploadDataOptionsWithPath} instead. */
Expand Down
Loading