Skip to content

Commit

Permalink
feat(storage): update uploadData API to accept optional storage bucke…
Browse files Browse the repository at this point in the history
…t param (#5540)
  • Loading branch information
NikaHsn authored and Nika Hassani committed Nov 20, 2024
1 parent 26be63d commit ec5c892
Show file tree
Hide file tree
Showing 19 changed files with 466 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ class StorageCategory extends AmplifyCategory<StoragePluginInterface> {
required StoragePath path,
void Function(StorageTransferProgress)? onProgress,
StorageUploadDataOptions? options,
StorageBucket? bucket,
}) {
return identifyCall(
StorageCategoryMethod.uploadData,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ abstract class StoragePluginInterface extends AmplifyPluginInterface {
required StorageDataPayload data,
void Function(StorageTransferProgress)? onProgress,
StorageUploadDataOptions? options,
StorageBucket? bucket,
}) {
throw UnimplementedError('uploadData() has not been implemented.');
}
Expand Down
10 changes: 9 additions & 1 deletion packages/amplify_core/lib/src/types/storage/bucket_info.dart
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
import 'package:amplify_core/amplify_core.dart';

/// {@template amplify_core.storage.bucket_info}
/// Presents a storage bucket information.
/// {@endtemplate}
class BucketInfo {
class BucketInfo with AWSEquatable<BucketInfo> {
/// {@macro amplify_core.storage.bucket_info}
const BucketInfo({required this.bucketName, required this.region});
final String bucketName;
final String region;

@override
List<Object?> get props => [
bucketName,
region,
];
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,7 @@ class StorageBucketFromOutputs implements StorageBucket {
BucketInfo resolveBucketInfo(StorageOutputs? storageOutputs) {
assert(
storageOutputs != null,
const InvalidStorageBucketException(
'Amplify Outputs file does not have storage configuration.',
recoverySuggestion:
'Make sure Amplify Storage is configured and the Amplify Outputs '
'file has storage configuration.',
),
'storageOutputs can not be null',
);
final buckets = storageOutputs!.buckets;
if (buckets == null) {
Expand Down
87 changes: 87 additions & 0 deletions packages/amplify_core/test/types/storage/storage_bucket_test.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import 'package:amplify_core/amplify_core.dart';
import 'package:amplify_core/src/config/amplify_outputs/storage/bucket_outputs.dart';
import 'package:amplify_core/src/config/amplify_outputs/storage/storage_outputs.dart';
import 'package:test/test.dart';

void main() {
group('Storage bucket resolve BucketInfo', () {
const defaultBucketOutputs = BucketOutputs(
name: 'default-bucket-friendly-name',
bucketName: 'default-bucket-unique-name',
awsRegion: 'default-bucket-aws-region',
);
const secondBucketOutputs = BucketOutputs(
name: 'second-bucket-friendly-name',
bucketName: 'second-bucket-unique-name',
awsRegion: 'second-bucket-aws-region',
);
final defaultBucketInfo = BucketInfo(
bucketName: defaultBucketOutputs.bucketName,
region: defaultBucketOutputs.awsRegion,
);
final secondBucketInfo = BucketInfo(
bucketName: secondBucketOutputs.bucketName,
region: secondBucketOutputs.awsRegion,
);
final testStorageOutputsMultiBucket = StorageOutputs(
awsRegion: defaultBucketOutputs.awsRegion,
bucketName: defaultBucketOutputs.bucketName,
buckets: [
defaultBucketOutputs,
secondBucketOutputs,
],
);
final testStorageOutputsSingleBucket = StorageOutputs(
awsRegion: defaultBucketOutputs.awsRegion,
bucketName: defaultBucketOutputs.bucketName,
);

test(
'should return same bucket info when storage bucket is created from'
' a bucket info', () {
final storageBucket = StorageBucket.fromBucketInfo(
defaultBucketInfo,
);
final bucketInfo = storageBucket.resolveBucketInfo(null);
expect(bucketInfo, defaultBucketInfo);
});

test(
'should return bucket info when storage bucket is created from'
' buckets in storage outputs', () {
final storageBucket = StorageBucket.fromOutputs(secondBucketOutputs.name);
final bucketInfo =
storageBucket.resolveBucketInfo(testStorageOutputsMultiBucket);
expect(bucketInfo, secondBucketInfo);
});

test(
'should throw assertion error when storage bucket is created from'
' outputs and storage outputs is null', () {
final storageBucket =
StorageBucket.fromOutputs(defaultBucketOutputs.name);
expect(
() => storageBucket.resolveBucketInfo(null),
throwsA(isA<AssertionError>()),
);
});
test(
'should throw exception when storage bucket is created from outputs and'
' storage outputs does not have buckets', () {
final storageBucket = StorageBucket.fromOutputs('bucket-name');
expect(
() => storageBucket.resolveBucketInfo(testStorageOutputsSingleBucket),
throwsA(isA<InvalidStorageBucketException>()),
);
});
test(
'should throw exception when storage bucket is created from outputs and'
' bucket name does not match any bucket in storage outputs', () {
final storageBucket = StorageBucket.fromOutputs('invalid-bucket-name');
expect(
() => storageBucket.resolveBucketInfo(testStorageOutputsMultiBucket),
throwsA(isA<InvalidStorageBucketException>()),
);
});
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface
required StoragePath path,
void Function(S3TransferProgress)? onProgress,
StorageUploadDataOptions? options,
StorageBucket? bucket,
}) {
final s3PluginOptions = reifyPluginOptions(
pluginOptions: options?.pluginOptions,
Expand All @@ -290,6 +291,7 @@ class AmplifyStorageS3Dart extends StoragePluginInterface
dataPayload: data,
options: s3Options,
onProgress: onProgress,
bucket: bucket,
);

return S3UploadDataOperation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,15 @@ import 'package:smithy_aws/smithy_aws.dart';
/// It holds Amazon S3 client information.
@internal
class S3ClientInfo {
const S3ClientInfo({required this.client, required this.config});
const S3ClientInfo({
required this.client,
required this.config,
required this.bucketName,
required this.awsRegion,
});

final S3Client client;
final S3ClientConfig config;
final String bucketName;
final String awsRegion;
}
Original file line number Diff line number Diff line change
Expand Up @@ -330,15 +330,13 @@ class StorageS3Service {
FutureOr<void> Function()? onError,
StorageBucket? bucket,
}) {
// ignore: invalid_use_of_internal_member
final bucketName = bucket?.resolveBucketInfo(_storageOutputs).bucketName ??
_storageOutputs.bucketName;
final s3ClientInfo = _getS3ClientInfo(bucket);
final s3ClientInfo = getS3ClientInfo(storageBucket: bucket);
final uploadDataTask = S3UploadTask.fromDataPayload(
dataPayload,
s3Client: s3ClientInfo.client,
defaultS3ClientConfig: s3ClientInfo.config,
bucket: bucketName,
s3ClientConfig: s3ClientInfo.config,
bucket: s3ClientInfo.bucketName,
awsRegion: s3ClientInfo.awsRegion,
path: path,
options: options,
pathResolver: _pathResolver,
Expand Down Expand Up @@ -376,8 +374,9 @@ class StorageS3Service {
final uploadDataTask = S3UploadTask.fromAWSFile(
localFile,
s3Client: _defaultS3Client,
defaultS3ClientConfig: _defaultS3ClientConfig,
s3ClientConfig: _defaultS3ClientConfig,
bucket: _storageOutputs.bucketName,
awsRegion: _storageOutputs.awsRegion,
path: path,
options: uploadDataOptions,
pathResolver: _pathResolver,
Expand Down Expand Up @@ -604,29 +603,42 @@ class StorageS3Service {
Future<void> abortIncompleteMultipartUploads() async {
final records = await _transferDatabase
.getMultipartUploadRecordsCreatedBefore(_serviceStartingTime);

for (final record in records) {
final bucketInfo = BucketInfo(
bucketName: record.bucketName ?? _storageOutputs.bucketName,
region: record.awsRegion ?? _storageOutputs.awsRegion,
);
final request = s3.AbortMultipartUploadRequest.build((builder) {
builder
..bucket = _storageOutputs.bucketName
..bucket = bucketInfo.bucketName
..key = record.objectKey
..uploadId = record.uploadId;
});
final s3Client = getS3ClientInfo(
storageBucket: StorageBucket.fromBucketInfo(bucketInfo),
).client;

try {
await _defaultS3Client.abortMultipartUpload(request).result;
await s3Client.abortMultipartUpload(request).result;
await _transferDatabase.deleteTransferRecords(record.uploadId);
} on Exception catch (error) {
_logger.error('Failed to abort multipart upload due to: $error');
}
}
}

S3ClientInfo _getS3ClientInfo(StorageBucket? storageBucket) {
/// Creates and caches [S3ClientInfo] given the optional [storageBucket]
/// parameter. If the optional parameter is not provided it uses
/// StorageOutputs default bucket to create the [S3ClientInfo].
@internal
@visibleForTesting
S3ClientInfo getS3ClientInfo({StorageBucket? storageBucket}) {
if (storageBucket == null) {
return S3ClientInfo(
client: _defaultS3Client,
config: _defaultS3ClientConfig,
bucketName: _storageOutputs.bucketName,
awsRegion: _storageOutputs.awsRegion,
);
}
// ignore: invalid_use_of_internal_member
Expand Down Expand Up @@ -658,6 +670,8 @@ class StorageS3Service {
final s3ClientInfo = S3ClientInfo(
client: s3Client,
config: s3ClientConfig,
bucketName: bucketInfo.bucketName,
awsRegion: bucketInfo.region,
);
_s3ClientsInfo[bucketInfo.bucketName] = s3ClientInfo;
return s3ClientInfo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,10 @@ const fallbackContentType = 'application/octet-stream';
class S3UploadTask {
S3UploadTask._({
required s3.S3Client s3Client,
required smithy_aws.S3ClientConfig defaultS3ClientConfig,
required smithy_aws.S3ClientConfig s3ClientConfig,
required S3PathResolver pathResolver,
required String bucket,
required String awsRegion,
required StoragePath path,
required StorageUploadDataOptions options,
S3DataPayload? dataPayload,
Expand All @@ -59,9 +60,10 @@ class S3UploadTask {
required AWSLogger logger,
required transfer.TransferDatabase transferDatabase,
}) : _s3Client = s3Client,
_defaultS3ClientConfig = defaultS3ClientConfig,
_s3ClientConfig = s3ClientConfig,
_pathResolver = pathResolver,
_bucket = bucket,
_awsRegion = awsRegion,
_path = path,
_options = options,
_dataPayload = dataPayload,
Expand All @@ -81,19 +83,21 @@ class S3UploadTask {
S3UploadTask.fromDataPayload(
S3DataPayload dataPayload, {
required s3.S3Client s3Client,
required smithy_aws.S3ClientConfig defaultS3ClientConfig,
required smithy_aws.S3ClientConfig s3ClientConfig,
required S3PathResolver pathResolver,
required String bucket,
required String awsRegion,
required StoragePath path,
required StorageUploadDataOptions options,
void Function(S3TransferProgress)? onProgress,
required AWSLogger logger,
required transfer.TransferDatabase transferDatabase,
}) : this._(
s3Client: s3Client,
defaultS3ClientConfig: defaultS3ClientConfig,
s3ClientConfig: s3ClientConfig,
pathResolver: pathResolver,
bucket: bucket,
awsRegion: awsRegion,
path: path,
dataPayload: dataPayload,
options: options,
Expand All @@ -108,19 +112,21 @@ class S3UploadTask {
S3UploadTask.fromAWSFile(
AWSFile localFile, {
required s3.S3Client s3Client,
required smithy_aws.S3ClientConfig defaultS3ClientConfig,
required smithy_aws.S3ClientConfig s3ClientConfig,
required S3PathResolver pathResolver,
required String bucket,
required String awsRegion,
required StoragePath path,
required StorageUploadDataOptions options,
void Function(S3TransferProgress)? onProgress,
required AWSLogger logger,
required transfer.TransferDatabase transferDatabase,
}) : this._(
s3Client: s3Client,
defaultS3ClientConfig: defaultS3ClientConfig,
s3ClientConfig: s3ClientConfig,
pathResolver: pathResolver,
bucket: bucket,
awsRegion: awsRegion,
path: path,
localFile: localFile,
options: options,
Expand All @@ -135,9 +141,10 @@ class S3UploadTask {
final Completer<S3Item> _uploadCompleter = Completer();

final s3.S3Client _s3Client;
final smithy_aws.S3ClientConfig _defaultS3ClientConfig;
final smithy_aws.S3ClientConfig _s3ClientConfig;
final S3PathResolver _pathResolver;
final String _bucket;
final String _awsRegion;
final StoragePath _path;
final StorageUploadDataOptions _options;
final void Function(S3TransferProgress)? _onProgress;
Expand Down Expand Up @@ -191,7 +198,7 @@ class S3UploadTask {
/// Should be used only internally.
Future<void> start() async {
if (_s3PluginOptions.useAccelerateEndpoint &&
_defaultS3ClientConfig.usePathStyle) {
_s3ClientConfig.usePathStyle) {
_completeUploadWithError(s3_exception.accelerateEndpointUnusable);
return;
}
Expand Down Expand Up @@ -328,7 +335,7 @@ class S3UploadTask {
try {
_putObjectOperation = _s3Client.putObject(
putObjectRequest,
s3ClientConfig: _defaultS3ClientConfig.copyWith(
s3ClientConfig: _s3ClientConfig.copyWith(
useAcceleration: _s3PluginOptions.useAccelerateEndpoint,
),
);
Expand Down Expand Up @@ -497,6 +504,8 @@ class S3UploadTask {
TransferRecord(
uploadId: uploadId,
objectKey: _resolvedPath,
bucketName: _bucket,
awsRegion: _awsRegion,
createdAt: DateTime.now(),
),
);
Expand Down Expand Up @@ -651,7 +660,7 @@ class S3UploadTask {
try {
final operation = _s3Client.uploadPart(
request,
s3ClientConfig: _defaultS3ClientConfig.copyWith(
s3ClientConfig: _s3ClientConfig.copyWith(
useAcceleration: _s3PluginOptions.useAccelerateEndpoint,
),
);
Expand Down
Loading

0 comments on commit ec5c892

Please sign in to comment.