Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Functionality to create storage buckets in GCP and AWS #291

Merged
merged 14 commits into from
Apr 26, 2021
17 changes: 16 additions & 1 deletion libcloudforensics/providers/aws/internal/account.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from typing import Optional, TYPE_CHECKING
import boto3

from libcloudforensics.providers.aws.internal import ec2, ebs, kms
from libcloudforensics.providers.aws.internal import ec2, ebs, kms, s3

if TYPE_CHECKING:
import botocore
Expand All @@ -40,6 +40,7 @@ class AWSAccount:
_ec2 (AWSEC2): An AWS EC2 client object.
_ebs (AWSEBS): An AWS EBS client object.
_kms (AWSKMS): An AWS KMS client object.
_s3 (AWSS3): An AWS S3 client object.
"""

def __init__(self,
Expand Down Expand Up @@ -83,6 +84,7 @@ def __init__(self,
self._ec2 = None # type: Optional[ec2.EC2]
self._ebs = None # type: Optional[ebs.EBS]
self._kms = None # type: Optional[kms.KMS]
self._s3 = None # type: Optional[s3.S3]

@property
def ec2(self) -> ec2.EC2:
Expand Down Expand Up @@ -123,6 +125,19 @@ def kms(self) -> kms.KMS:
self._kms = kms.KMS(self)
return self._kms

@property
def s3(self) -> s3.S3:
"""Get an AWS S3 object for the account.

Returns:
AWSS3: Object that represents AWS S3 services.
"""

if self._s3:
return self._s3
self._s3 = s3.S3(self)
return self._s3

def ClientApi(self,
service: str,
region: Optional[str] = None) -> 'botocore.client.EC2': # pylint: disable=no-member
Expand Down
1 change: 1 addition & 0 deletions libcloudforensics/providers/aws/internal/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
ACCOUNT_SERVICE = 'sts'
KMS_SERVICE = 'kms'
CLOUDTRAIL_SERVICE = 'cloudtrail'
S3_SERVICE = 's3'

# Resource types constant
INSTANCE = 'instance'
Expand Down
83 changes: 83 additions & 0 deletions libcloudforensics/providers/aws/internal/s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bucket functionality."""

from typing import TYPE_CHECKING, Dict, Optional, Any

from libcloudforensics import errors
from libcloudforensics.providers.aws.internal import common

if TYPE_CHECKING:
# TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
# the following cyclic import, as it it only used for type hints
from libcloudforensics.providers.aws.internal import account # pylint: disable=cyclic-import


class S3:
"""Class that represents AWS S3 storage services.

Attributes:
aws_account (AWSAccount): The account for the resource.
name (str): The name of the bucket.
region (str): The region in which the bucket resides.
"""

def __init__(self,
aws_account: 'account.AWSAccount') -> None:
"""Initialize the AWS S3 resource.

Args:
aws_account (AWSAccount): The account for the resource.
"""

self.aws_account = aws_account

def CreateBucket(
self,
name: str,
region: Optional[str] = None,
acl: str = 'private') -> Dict[str, Any]:
"""Create an S3 storage bucket.

Args:
name (str): The name of the bucket.
region (str): Optional. The region in which the bucket resides.
acl (str): Optional. The canned ACL with which to create the bucket.
Default is 'private'.
Appropriate values for the Canned ACLs are here:
https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl # pylint: disable=line-too-long

Returns:
Dict: An API operation object for a S3 bucket.
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Bucket.create # pylint: disable=line-too-long

Raises:
ResourceCreationError: If the bucket couldn't be created.
"""

client = self.aws_account.ClientApi(common.S3_SERVICE)
try:
bucket = client.create_bucket(
Bucket=name,
ACL=acl,
CreateBucketConfiguration={
'LocationConstraint': region or self.aws_account.default_region
}) # type: Dict[str, Any]
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

CreateBucketConfiguration={  # type: Dict[str, Any]
              'LocationConstraint': region or self.aws_account.default_region
          })

Should fix the mypy problem

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok

return bucket
except client.exceptions.ClientError as exception:
raise errors.ResourceCreationError(
'Could not create bucket {0:s}: {1:s}'.format(
name, str(exception)),
__name__) from exception
37 changes: 35 additions & 2 deletions libcloudforensics/providers/gcp/internal/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,7 @@ def GetBucketACLs(self,
# Can change to removeprefix() in 3.9
bucket = bucket[5:]
gcs_bac = self.GcsApi().bucketAccessControls()
request = gcs_bac.list(
bucket=bucket, userProject=user_project)
request = gcs_bac.list(bucket=bucket, userProject=user_project)
# https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
ac_response = request.execute()
for item in ac_response.get('items', []):
Expand Down Expand Up @@ -239,3 +238,37 @@ def GetBucketSize(self,
elif val > ret[bucket]:
ret[bucket] = val
return ret

def CreateBucket(
self,
bucket: str,
labels: Optional[Dict[str, str]] = None,
predefined_acl: str = 'private',
predefined_default_object_acl: str = 'private') -> Dict[str, Any]:
"""Creates a Google Cloud Storage bucket in the current project.

Args:
bucket (str): Name of the desired bucket.
labels (Dict[str, str]): Mapping of key/value strings to be applied as a label
to the bucket.
Rules for acceptable label values are located at
https://cloud.google.com/storage/docs/key-terms#bucket-labels
predefined_acl (str): A predefined set of Access Controls
to apply to the bucket.
predefined_default_object_acl (str): A predefined set of Access Controls
to apply to the objects in the bucket.
Values listed in https://cloud.google.com/storage/docs/json_api/v1/buckets/insert#parameters # pylint: disable=line-too-long

Returns:
Dict[str, Any]: An API operation object for a Google Cloud Storage bucket.
https://cloud.google.com/storage/docs/json_api/v1/buckets#resource
"""
gcs_buckets = self.GcsApi().buckets()
body = {'name': bucket, 'labels': labels}
request = gcs_buckets.insert(
project=self.project_id,
predefinedAcl=predefined_acl,
predefinedDefaultObjectAcl=predefined_default_object_acl,
body=body)
response = request.execute() # type: Dict[str, Any]
return response
26 changes: 25 additions & 1 deletion tests/providers/aws/aws_mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import mock

from libcloudforensics.providers.aws.internal import account, ebs, ec2
from libcloudforensics.providers.aws.internal import account, ebs, ec2, s3
from libcloudforensics.providers.aws.internal import log as aws_log

with mock.patch('boto3.session.Session._setup_loader') as mock_session:
Expand Down Expand Up @@ -63,6 +63,8 @@

FAKE_CLOUDTRAIL = aws_log.AWSCloudTrail(FAKE_AWS_ACCOUNT)

FAKE_STORAGE = s3.S3(FAKE_AWS_ACCOUNT)

FAKE_EVENT_LIST = [
{'EventId': '474e8265-9180-4407-a5c9-f3a86d8bb1f0',
'EventName': 'CreateUser', 'ReadOnly': 'false'},
Expand Down Expand Up @@ -184,3 +186,25 @@
MOCK_EVENT_LIST = {
'Events': FAKE_EVENT_LIST
}

MOCK_CREATE_BUCKET = {
'ResponseMetadata': {
'RequestId':
'123456789',
'HTTPStatusCode':
200,
'HTTPHeaders': {
'date':
'Thu, 15 Apr 2021 05:56:13 GMT',
'location':
'http://test-bucket.s3.amazonaws.com/',
'content-length':
'0',
'server':
'AmazonS3'
},
'RetryAttempts':
0
},
'Location': 'http://test-bucket.s3.amazonaws.com/'
}
43 changes: 43 additions & 0 deletions tests/providers/aws/internal/test_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for AWS module - s3.py."""

import typing
import unittest
import mock

from tests.providers.aws import aws_mocks


class AWSS3Test(unittest.TestCase):
"""Test AWS S3 class."""
# pylint: disable=line-too-long

@typing.no_type_check
@mock.patch('libcloudforensics.providers.aws.internal.account.AWSAccount.ClientApi')
def testCreateBucket(self, mock_s3_api):
"""Test that the Bucket is created."""
storage = mock_s3_api.return_value.create_bucket
storage.return_value = aws_mocks.MOCK_CREATE_BUCKET
create_bucket = aws_mocks.FAKE_STORAGE.CreateBucket('test-bucket')

storage.assert_called_with(
Bucket='test-bucket',
ACL='private',
CreateBucketConfiguration={
'LocationConstraint': aws_mocks.FAKE_AWS_ACCOUNT.default_region
})
self.assertEqual(200, create_bucket['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual('http://test-bucket.s3.amazonaws.com/', create_bucket['Location'])
Fryyyyy marked this conversation as resolved.
Show resolved Hide resolved
18 changes: 18 additions & 0 deletions tests/providers/gcp/internal/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,21 @@ def testGetBucketSize(self, mock_gcm_api):
size_results = gcp_mocks.FAKE_GCS.GetBucketSize('gs://test_bucket_1')
self.assertEqual(1, len(size_results))
self.assertEqual(60, size_results['test_bucket_1'])

@typing.no_type_check
@mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
def testCreateBucket(self, mock_gcs_api):
"""Test GCS bucket Create operation."""
api_create_bucket = mock_gcs_api.return_value.buckets.return_value.insert
api_create_bucket.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKETS['items'][0]
create_result = gcp_mocks.FAKE_GCS.CreateBucket('fake-bucket')

api_create_bucket.assert_called_with(
project='fake-target-project',
predefinedAcl='private',
predefinedDefaultObjectAcl='private',
body={
'name': 'fake-bucket', 'labels': None
})
self.assertEqual('fake-bucket', create_result['name'])
self.assertEqual('123456789', create_result['projectNumber'])
Fryyyyy marked this conversation as resolved.
Show resolved Hide resolved
13 changes: 13 additions & 0 deletions tools/aws_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,3 +187,16 @@ def ListImages(args: 'argparse.Namespace') -> None:
for image in images:
logger.info('Name: {0:s}, ImageId: {1:s}, Location: {2:s}'.format(
image['Name'], image['ImageId'], image['ImageLocation']))


def CreateBucket(args: 'argparse.Namespace') -> None:
"""Create an S3 bucket.

Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""

aws_account = account.AWSAccount(args.zone)
bucket = aws_account.s3.CreateBucket(args.name)

logger.info('Bucket created: {0:s}'.format(bucket['Location']))
13 changes: 12 additions & 1 deletion tools/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
'listinstances': aws_cli.ListInstances,
'listdisks': aws_cli.ListVolumes,
'querylogs': aws_cli.QueryLogs,
'startvm': aws_cli.StartAnalysisVm
'startvm': aws_cli.StartAnalysisVm,
'createbucket': aws_cli.CreateBucket
},
'az': {
'copydisk': az_cli.CreateDiskCopy,
Expand All @@ -46,6 +47,7 @@
'creatediskgcs': gcp_cli.CreateDiskFromGCSImage,
'deleteinstance': gcp_cli.DeleteInstance,
'deleteobject': gcp_cli.DeleteObject,
'createbucket': gcp_cli.CreateBucket,
'listbuckets': gcp_cli.ListBuckets,
'listdisks': gcp_cli.ListDisks,
'listinstances': gcp_cli.ListInstances,
Expand Down Expand Up @@ -189,6 +191,10 @@ def Main() -> None:
args=[
('--filter', 'Filter to apply to Name of AMI image.', None),
])
AddParser('aws', aws_subparsers, 'createbucket', 'Create an S3 bucket.',
args=[
('name', 'The name of the bucket.', None),
])

# Azure parser options
az_parser.add_argument('default_resource_group_name',
Expand Down Expand Up @@ -339,6 +345,11 @@ def Main() -> None:
'Name of the disk to create. If None, name '
'will be printed at the end.',
None)])
AddParser('gcp', gcp_subparsers, 'createbucket',
'Create a GCS bucket in a project.',
args=[
('name', 'Name of bucket.', None),
])
AddParser('gcp', gcp_subparsers, 'listbuckets',
'List GCS buckets for a project.')
AddParser('gcp', gcp_subparsers, 'bucketacls', 'List ACLs of a GCS bucket.',
Expand Down
13 changes: 13 additions & 0 deletions tools/gcp_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,19 @@ def ListBuckets(args: 'argparse.Namespace') -> None:
obj.get('id', 'ID not found'), obj.get('selfLink', 'No link')))


def CreateBucket(args: 'argparse.Namespace') -> None:
"""Create a bucket in a GCP project.

Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
gcs = gcp_storage.GoogleCloudStorage(args.project)
result = gcs.CreateBucket(args.name, labels={'created_by': 'cfu'})
logger.info(
'{0:s} : {1:s}'.format(
result.get('id', 'ID not found'), result.get('selfLink', 'No link')))


def ListBucketObjects(args: 'argparse.Namespace') -> None:
"""List the objects in a GCS bucket.

Expand Down