diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 3e15854efd6c..479af4af812b 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 1.3.3
+current_version = 1.3.4
[bumpversion:file:setup.py]
diff --git a/.travis.yml b/.travis.yml
index f1b7ac40dde2..de22818b895d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,6 +8,19 @@ python:
env:
- TEST_SERVER_MODE=false
- TEST_SERVER_MODE=true
+# Due to incomplete Python 3.7 support on Travis CI (
+# https://github.com/travis-ci/travis-ci/issues/9815),
+# using a matrix is necessary
+matrix:
+ include:
+ - python: 3.7
+ env: TEST_SERVER_MODE=false
+ dist: xenial
+ sudo: true
+ - python: 3.7
+ env: TEST_SERVER_MODE=true
+ dist: xenial
+ sudo: true
before_install:
- export BOTO_CONFIG=/dev/null
install:
diff --git a/AUTHORS.md b/AUTHORS.md
index 6b7c96291e29..0a152505a921 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -53,3 +53,4 @@ Moto is written by Steve Pulec with contributions from:
* [Jim Shields](https://github.com/jimjshields)
* [William Richard](https://github.com/william-richard)
* [Alex Casalboni](https://github.com/alexcasalboni)
+* [Jon Beilke](https://github.com/jrbeilke)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fb3a5d8d5cb5..7f7ee44487cb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,25 @@
Moto Changelog
===================
+1.3.6
+-----
+
+ * Fix boto3 pinning.
+
+1.3.5
+-----
+
+ * Pin down botocore issue as temporary fix for #1793.
+ * More features on secrets manager
+
+1.3.4
+------
+
+ * IAM get account authorization details
+ * adding account id to ManagedPolicy ARN
+ * APIGateway usage plans and usage plan keys
+ * ECR list images
+
1.3.3
------
diff --git a/IMPLEMENTATION_COVERAGE.md b/IMPLEMENTATION_COVERAGE.md
index d19d2473efc3..17b864dc3a0c 100644
--- a/IMPLEMENTATION_COVERAGE.md
+++ b/IMPLEMENTATION_COVERAGE.md
@@ -58,7 +58,6 @@
- [ ] get_room
- [ ] get_room_skill_parameter
- [ ] get_skill_group
-- [ ] list_device_events
- [ ] list_skills
- [ ] list_tags
- [ ] put_room_skill_parameter
@@ -82,7 +81,7 @@
- [ ] update_room
- [ ] update_skill_group
-## apigateway - 17% implemented
+## apigateway - 24% implemented
- [ ] create_api_key
- [ ] create_authorizer
- [ ] create_base_path_mapping
@@ -95,8 +94,8 @@
- [X] create_resource
- [X] create_rest_api
- [X] create_stage
-- [ ] create_usage_plan
-- [ ] create_usage_plan_key
+- [X] create_usage_plan
+- [X] create_usage_plan_key
- [ ] create_vpc_link
- [ ] delete_api_key
- [ ] delete_authorizer
@@ -116,8 +115,8 @@
- [X] delete_resource
- [X] delete_rest_api
- [ ] delete_stage
-- [ ] delete_usage_plan
-- [ ] delete_usage_plan_key
+- [X] delete_usage_plan
+- [X] delete_usage_plan_key
- [ ] delete_vpc_link
- [ ] flush_stage_authorizers_cache
- [ ] flush_stage_cache
@@ -162,10 +161,10 @@
- [X] get_stages
- [ ] get_tags
- [ ] get_usage
-- [ ] get_usage_plan
-- [ ] get_usage_plan_key
-- [ ] get_usage_plan_keys
-- [ ] get_usage_plans
+- [X] get_usage_plan
+- [X] get_usage_plan_key
+- [X] get_usage_plan_keys
+- [X] get_usage_plans
- [ ] get_vpc_link
- [ ] get_vpc_links
- [ ] import_api_keys
@@ -352,7 +351,6 @@
- [ ] delete_scaling_plan
- [ ] describe_scaling_plan_resources
- [ ] describe_scaling_plans
-- [ ] update_scaling_plan
## batch - 93% implemented
- [ ] cancel_job
@@ -767,8 +765,6 @@
- [ ] create_pipeline
- [ ] delete_custom_action_type
- [ ] delete_pipeline
-- [ ] delete_webhook
-- [ ] deregister_webhook_with_third_party
- [ ] disable_stage_transition
- [ ] enable_stage_transition
- [ ] get_job_details
@@ -779,7 +775,6 @@
- [ ] list_action_types
- [ ] list_pipeline_executions
- [ ] list_pipelines
-- [ ] list_webhooks
- [ ] poll_for_jobs
- [ ] poll_for_third_party_jobs
- [ ] put_action_revision
@@ -788,8 +783,6 @@
- [ ] put_job_success_result
- [ ] put_third_party_job_failure_result
- [ ] put_third_party_job_success_result
-- [ ] put_webhook
-- [ ] register_webhook_with_third_party
- [ ] retry_stage_execution
- [ ] start_pipeline_execution
- [ ] update_pipeline
@@ -1065,7 +1058,6 @@
- [ ] create_project
- [ ] create_remote_access_session
- [ ] create_upload
-- [ ] create_vpce_configuration
- [ ] delete_device_pool
- [ ] delete_instance_profile
- [ ] delete_network_profile
@@ -1073,7 +1065,6 @@
- [ ] delete_remote_access_session
- [ ] delete_run
- [ ] delete_upload
-- [ ] delete_vpce_configuration
- [ ] get_account_settings
- [ ] get_device
- [ ] get_device_instance
@@ -1089,7 +1080,6 @@
- [ ] get_suite
- [ ] get_test
- [ ] get_upload
-- [ ] get_vpce_configuration
- [ ] install_to_remote_access_session
- [ ] list_artifacts
- [ ] list_device_instances
@@ -1109,7 +1099,6 @@
- [ ] list_tests
- [ ] list_unique_problems
- [ ] list_uploads
-- [ ] list_vpce_configurations
- [ ] purchase_offering
- [ ] renew_offering
- [ ] schedule_run
@@ -1120,7 +1109,6 @@
- [ ] update_instance_profile
- [ ] update_network_profile
- [ ] update_project
-- [ ] update_vpce_configuration
## directconnect - 0% implemented
- [ ] allocate_connection_on_interconnect
@@ -1277,7 +1265,7 @@
- [ ] update_radius
- [ ] verify_trust
-## dynamodb - 21% implemented
+## dynamodb - 22% implemented
- [ ] batch_get_item
- [ ] batch_write_item
- [ ] create_backup
@@ -1289,7 +1277,6 @@
- [ ] describe_backup
- [ ] describe_continuous_backups
- [ ] describe_global_table
-- [ ] describe_global_table_settings
- [ ] describe_limits
- [ ] describe_table
- [ ] describe_time_to_live
@@ -1307,7 +1294,6 @@
- [ ] untag_resource
- [ ] update_continuous_backups
- [ ] update_global_table
-- [ ] update_global_table_settings
- [ ] update_item
- [ ] update_table
- [ ] update_time_to_live
@@ -1318,7 +1304,7 @@
- [ ] get_shard_iterator
- [ ] list_streams
-## ec2 - 36% implemented
+## ec2 - 37% implemented
- [ ] accept_reserved_instances_exchange_quote
- [ ] accept_vpc_endpoint_connections
- [X] accept_vpc_peering_connection
@@ -1356,7 +1342,6 @@
- [ ] create_default_vpc
- [X] create_dhcp_options
- [ ] create_egress_only_internet_gateway
-- [ ] create_fleet
- [ ] create_flow_logs
- [ ] create_fpga_image
- [X] create_image
@@ -1391,7 +1376,6 @@
- [X] delete_customer_gateway
- [ ] delete_dhcp_options
- [ ] delete_egress_only_internet_gateway
-- [ ] delete_fleets
- [ ] delete_flow_logs
- [ ] delete_fpga_image
- [X] delete_internet_gateway
@@ -1433,9 +1417,6 @@
- [ ] describe_egress_only_internet_gateways
- [ ] describe_elastic_gpus
- [ ] describe_export_tasks
-- [ ] describe_fleet_history
-- [ ] describe_fleet_instances
-- [ ] describe_fleets
- [ ] describe_flow_logs
- [ ] describe_fpga_image_attribute
- [ ] describe_fpga_images
@@ -1532,7 +1513,6 @@
- [X] import_key_pair
- [ ] import_snapshot
- [ ] import_volume
-- [ ] modify_fleet
- [ ] modify_fpga_image_attribute
- [ ] modify_hosts
- [ ] modify_id_format
@@ -1905,11 +1885,8 @@
- [ ] delete_delivery_stream
- [ ] describe_delivery_stream
- [ ] list_delivery_streams
-- [ ] list_tags_for_delivery_stream
- [ ] put_record
- [ ] put_record_batch
-- [ ] tag_delivery_stream
-- [ ] untag_delivery_stream
- [ ] update_destination
## fms - 0% implemented
@@ -2231,7 +2208,7 @@
- [ ] describe_event_types
- [ ] describe_events
-## iam - 47% implemented
+## iam - 48% implemented
- [ ] add_client_id_to_open_id_connect_provider
- [X] add_role_to_instance_profile
- [X] add_user_to_group
@@ -2281,7 +2258,7 @@
- [X] enable_mfa_device
- [ ] generate_credential_report
- [ ] get_access_key_last_used
-- [ ] get_account_authorization_details
+- [X] get_account_authorization_details
- [ ] get_account_password_policy
- [ ] get_account_summary
- [ ] get_context_keys_for_custom_policy
@@ -2536,38 +2513,6 @@
- [ ] start_next_pending_job_execution
- [ ] update_job_execution
-## iotanalytics - 0% implemented
-- [ ] batch_put_message
-- [ ] cancel_pipeline_reprocessing
-- [ ] create_channel
-- [ ] create_dataset
-- [ ] create_dataset_content
-- [ ] create_datastore
-- [ ] create_pipeline
-- [ ] delete_channel
-- [ ] delete_dataset
-- [ ] delete_dataset_content
-- [ ] delete_datastore
-- [ ] delete_pipeline
-- [ ] describe_channel
-- [ ] describe_dataset
-- [ ] describe_datastore
-- [ ] describe_logging_options
-- [ ] describe_pipeline
-- [ ] get_dataset_content
-- [ ] list_channels
-- [ ] list_datasets
-- [ ] list_datastores
-- [ ] list_pipelines
-- [ ] put_logging_options
-- [ ] run_pipeline_activity
-- [ ] sample_channel_data
-- [ ] start_pipeline_reprocessing
-- [ ] update_channel
-- [ ] update_dataset
-- [ ] update_datastore
-- [ ] update_pipeline
-
## kinesis - 56% implemented
- [X] add_tags_to_stream
- [X] create_stream
@@ -2815,7 +2760,7 @@
- [ ] update_domain_entry
- [ ] update_load_balancer_attribute
-## logs - 24% implemented
+## logs - 27% implemented
- [ ] associate_kms_key
- [ ] cancel_export_task
- [ ] create_export_task
@@ -2830,7 +2775,7 @@
- [ ] delete_subscription_filter
- [ ] describe_destinations
- [ ] describe_export_tasks
-- [ ] describe_log_groups
+- [X] describe_log_groups
- [X] describe_log_streams
- [ ] describe_metric_filters
- [ ] describe_resource_policies
@@ -3569,9 +3514,6 @@
- [ ] update_tags_for_domain
- [ ] view_billing
-## runtime.sagemaker - 0% implemented
-- [ ] invoke_endpoint
-
## s3 - 15% implemented
- [ ] abort_multipart_upload
- [ ] complete_multipart_upload
@@ -3703,18 +3645,18 @@
- [ ] put_attributes
- [ ] select
-## secretsmanager - 0% implemented
+## secretsmanager - 33% implemented
- [ ] cancel_rotate_secret
-- [ ] create_secret
+- [X] create_secret
- [ ] delete_secret
-- [ ] describe_secret
-- [ ] get_random_password
-- [ ] get_secret_value
+- [X] describe_secret
+- [X] get_random_password
+- [X] get_secret_value
- [ ] list_secret_version_ids
- [ ] list_secrets
- [ ] put_secret_value
- [ ] restore_secret
-- [ ] rotate_secret
+- [X] rotate_secret
- [ ] tag_resource
- [ ] untag_resource
- [ ] update_secret
@@ -3984,7 +3926,7 @@
- [X] tag_queue
- [X] untag_queue
-## ssm - 10% implemented
+## ssm - 11% implemented
- [X] add_tags_to_resource
- [ ] cancel_command
- [ ] create_activation
@@ -3997,7 +3939,6 @@
- [ ] delete_activation
- [ ] delete_association
- [ ] delete_document
-- [ ] delete_inventory
- [ ] delete_maintenance_window
- [X] delete_parameter
- [X] delete_parameters
@@ -4021,7 +3962,6 @@
- [ ] describe_instance_patch_states
- [ ] describe_instance_patch_states_for_patch_group
- [ ] describe_instance_patches
-- [ ] describe_inventory_deletions
- [ ] describe_maintenance_window_execution_task_invocations
- [ ] describe_maintenance_window_execution_tasks
- [ ] describe_maintenance_window_executions
@@ -4053,7 +3993,7 @@
- [ ] list_association_versions
- [ ] list_associations
- [ ] list_command_invocations
-- [ ] list_commands
+- [X] list_commands
- [ ] list_compliance_items
- [ ] list_compliance_summaries
- [ ] list_document_versions
@@ -4464,36 +4404,25 @@
- [ ] update_resource
## workspaces - 0% implemented
-- [ ] associate_ip_groups
-- [ ] authorize_ip_rules
-- [ ] create_ip_group
- [ ] create_tags
- [ ] create_workspaces
-- [ ] delete_ip_group
- [ ] delete_tags
-- [ ] describe_ip_groups
- [ ] describe_tags
- [ ] describe_workspace_bundles
- [ ] describe_workspace_directories
- [ ] describe_workspaces
- [ ] describe_workspaces_connection_status
-- [ ] disassociate_ip_groups
- [ ] modify_workspace_properties
-- [ ] modify_workspace_state
- [ ] reboot_workspaces
- [ ] rebuild_workspaces
-- [ ] revoke_ip_rules
- [ ] start_workspaces
- [ ] stop_workspaces
- [ ] terminate_workspaces
-- [ ] update_rules_of_ip_group
## xray - 0% implemented
- [ ] batch_get_traces
-- [ ] get_encryption_config
- [ ] get_service_graph
- [ ] get_trace_graph
- [ ] get_trace_summaries
-- [ ] put_encryption_config
- [ ] put_telemetry_records
- [ ] put_trace_segments
diff --git a/README.md b/README.md
index 189bf2c4f366..791226d6b50b 100644
--- a/README.md
+++ b/README.md
@@ -177,7 +177,7 @@ def test_add_servers():
```
#### Using moto 1.0.X with boto2
-moto 1.0.X mock docorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
+moto 1.0.X mock decorators are defined for boto3 and do not work with boto2. Use the @mock_AWSSVC_deprecated to work with boto2.
Using moto with boto2
```python
diff --git a/moto/__init__.py b/moto/__init__.py
index 2301b7ca13ca..6992c535e1da 100644
--- a/moto/__init__.py
+++ b/moto/__init__.py
@@ -3,7 +3,7 @@
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = 'moto'
-__version__ = '1.3.3'
+__version__ = '1.3.6'
from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
@@ -24,6 +24,7 @@
from .emr import mock_emr, mock_emr_deprecated # flake8: noqa
from .events import mock_events # flake8: noqa
from .glacier import mock_glacier, mock_glacier_deprecated # flake8: noqa
+from .glue import mock_glue # flake8: noqa
from .iam import mock_iam, mock_iam_deprecated # flake8: noqa
from .kinesis import mock_kinesis, mock_kinesis_deprecated # flake8: noqa
from .kms import mock_kms, mock_kms_deprecated # flake8: noqa
diff --git a/moto/backends.py b/moto/backends.py
index 25fcec09a3b2..d95424385d27 100644
--- a/moto/backends.py
+++ b/moto/backends.py
@@ -20,6 +20,7 @@
from moto.emr import emr_backends
from moto.events import events_backends
from moto.glacier import glacier_backends
+from moto.glue import glue_backends
from moto.iam import iam_backends
from moto.instance_metadata import instance_metadata_backends
from moto.kinesis import kinesis_backends
@@ -66,6 +67,7 @@
'events': events_backends,
'emr': emr_backends,
'glacier': glacier_backends,
+ 'glue': glue_backends,
'iam': iam_backends,
'moto_api': moto_api_backends,
'instance_metadata': instance_metadata_backends,
diff --git a/moto/cloudformation/parsing.py b/moto/cloudformation/parsing.py
index c4059a06bc0a..35b05d1013bc 100644
--- a/moto/cloudformation/parsing.py
+++ b/moto/cloudformation/parsing.py
@@ -387,6 +387,7 @@ def __init__(self, stack_id, stack_name, parameters, tags, region_name, template
"AWS::StackName": stack_name,
"AWS::URLSuffix": "amazonaws.com",
"AWS::NoValue": None,
+ "AWS::Partition": "aws",
}
def __getitem__(self, key):
diff --git a/moto/core/models.py b/moto/core/models.py
index 92dc2a98096a..adc06a9c0701 100644
--- a/moto/core/models.py
+++ b/moto/core/models.py
@@ -89,6 +89,17 @@ def decorate_class(self, klass):
if inspect.ismethod(attr_value) and attr_value.__self__ is klass:
continue
+ # Check if this is a staticmethod. If so, skip patching
+ for cls in inspect.getmro(klass):
+ if attr_value.__name__ not in cls.__dict__:
+ continue
+ bound_attr_value = cls.__dict__[attr_value.__name__]
+ if not isinstance(bound_attr_value, staticmethod):
+ break
+ else:
+ # It is a staticmethod, skip patching
+ continue
+
try:
setattr(klass, attr, self(attr_value, reset=False))
except TypeError:
diff --git a/moto/dynamodb2/responses.py b/moto/dynamodb2/responses.py
index 3c7e7ffc2dc3..493e17833b08 100644
--- a/moto/dynamodb2/responses.py
+++ b/moto/dynamodb2/responses.py
@@ -20,6 +20,17 @@ def has_empty_keys_or_values(_dict):
)
+def get_empty_str_error():
+ er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
+ return (400,
+ {'server': 'amazon.com'},
+ dynamo_json_dump({'__type': er,
+ 'message': ('One or more parameter values were '
+ 'invalid: An AttributeValue may not '
+ 'contain an empty string')}
+ ))
+
+
class DynamoHandler(BaseResponse):
def get_endpoint_name(self, headers):
@@ -174,14 +185,7 @@ def put_item(self):
item = self.body['Item']
if has_empty_keys_or_values(item):
- er = 'com.amazonaws.dynamodb.v20111205#ValidationException'
- return (400,
- {'server': 'amazon.com'},
- dynamo_json_dump({'__type': er,
- 'message': ('One or more parameter values were '
- 'invalid: An AttributeValue may not '
- 'contain an empty string')}
- ))
+ return get_empty_str_error()
overwrite = 'Expected' not in self.body
if not overwrite:
@@ -523,6 +527,7 @@ def delete_item(self):
return dynamo_json_dump(item_dict)
def update_item(self):
+
name = self.body['TableName']
key = self.body['Key']
update_expression = self.body.get('UpdateExpression')
@@ -533,6 +538,9 @@ def update_item(self):
'ExpressionAttributeValues', {})
existing_item = self.dynamodb_backend.get_item(name, key)
+ if has_empty_keys_or_values(expression_attribute_values):
+ return get_empty_str_error()
+
if 'Expected' in self.body:
expected = self.body['Expected']
else:
diff --git a/moto/ec2/models.py b/moto/ec2/models.py
index 4e26f0f6562c..b94cac4794b1 100755
--- a/moto/ec2/models.py
+++ b/moto/ec2/models.py
@@ -13,6 +13,7 @@
import boto.ec2
from collections import defaultdict
+import weakref
from datetime import datetime
from boto.ec2.instance import Instance as BotoInstance, Reservation
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
@@ -2115,10 +2116,20 @@ def get_cidr_block_association_set(self, ipv6=False):
class VPCBackend(object):
+ __refs__ = defaultdict(list)
+
def __init__(self):
self.vpcs = {}
+ self.__refs__[self.__class__].append(weakref.ref(self))
super(VPCBackend, self).__init__()
+ @classmethod
+ def get_instances(cls):
+ for inst_ref in cls.__refs__[cls]:
+ inst = inst_ref()
+ if inst is not None:
+ yield inst
+
def create_vpc(self, cidr_block, instance_tenancy='default', amazon_provided_ipv6_cidr_block=False):
vpc_id = random_vpc_id()
vpc = VPC(self, vpc_id, cidr_block, len(self.vpcs) == 0, instance_tenancy, amazon_provided_ipv6_cidr_block)
@@ -2142,6 +2153,13 @@ def get_vpc(self, vpc_id):
raise InvalidVPCIdError(vpc_id)
return self.vpcs.get(vpc_id)
+ # get vpc by vpc id and aws region
+ def get_cross_vpc(self, vpc_id, peer_region):
+ for vpcs in self.get_instances():
+ if vpcs.region_name == peer_region:
+ match_vpc = vpcs.get_vpc(vpc_id)
+ return match_vpc
+
def get_all_vpcs(self, vpc_ids=None, filters=None):
matches = self.vpcs.values()
if vpc_ids:
diff --git a/moto/ec2/responses/vpc_peering_connections.py b/moto/ec2/responses/vpc_peering_connections.py
index 1bccce4f6cae..49d752893319 100644
--- a/moto/ec2/responses/vpc_peering_connections.py
+++ b/moto/ec2/responses/vpc_peering_connections.py
@@ -5,8 +5,12 @@
class VPCPeeringConnections(BaseResponse):
def create_vpc_peering_connection(self):
+ peer_region = self._get_param('PeerRegion')
+ if peer_region == self.region or peer_region is None:
+ peer_vpc = self.ec2_backend.get_vpc(self._get_param('PeerVpcId'))
+ else:
+ peer_vpc = self.ec2_backend.get_cross_vpc(self._get_param('PeerVpcId'), peer_region)
vpc = self.ec2_backend.get_vpc(self._get_param('VpcId'))
- peer_vpc = self.ec2_backend.get_vpc(self._get_param('PeerVpcId'))
vpc_pcx = self.ec2_backend.create_vpc_peering_connection(vpc, peer_vpc)
template = self.response_template(
CREATE_VPC_PEERING_CONNECTION_RESPONSE)
@@ -41,26 +45,31 @@ def reject_vpc_peering_connection(self):
CREATE_VPC_PEERING_CONNECTION_RESPONSE = """
-
- 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
-
- {{ vpc_pcx.id }}
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+ {{ vpc_pcx.id }}
- 777788889999
- {{ vpc_pcx.vpc.id }}
- {{ vpc_pcx.vpc.cidr_block }}
+ 777788889999
+ {{ vpc_pcx.vpc.id }}
+ {{ vpc_pcx.vpc.cidr_block }}
+
+ false
+ false
+ false
+
123456789012
{{ vpc_pcx.peer_vpc.id }}
- initiating-request
- Initiating request to {accepter ID}.
+ initiating-request
+ Initiating Request to {accepter ID}
2014-02-18T14:37:25.000Z
-
+
"""
diff --git a/moto/ecr/models.py b/moto/ecr/models.py
index 79ef9cf52aa6..4849ffbfad8f 100644
--- a/moto/ecr/models.py
+++ b/moto/ecr/models.py
@@ -4,12 +4,12 @@
from copy import copy
from random import random
+from botocore.exceptions import ParamValidationError
+
from moto.core import BaseBackend, BaseModel
from moto.ec2 import ec2_backends
from moto.ecr.exceptions import ImageNotFoundException, RepositoryNotFoundException
-from botocore.exceptions import ParamValidationError
-
DEFAULT_REGISTRY_ID = '012345678910'
@@ -97,13 +97,14 @@ def update_from_cloudformation_json(cls, original_resource, new_resource_name, c
class Image(BaseObject):
- def __init__(self, tag, manifest, repository, registry_id=DEFAULT_REGISTRY_ID):
+ def __init__(self, tag, manifest, repository, digest=None, registry_id=DEFAULT_REGISTRY_ID):
self.image_tag = tag
+ self.image_tags = [tag] if tag is not None else []
self.image_manifest = manifest
self.image_size_in_bytes = 50 * 1024 * 1024
self.repository = repository
self.registry_id = registry_id
- self.image_digest = None
+ self.image_digest = digest
self.image_pushed_at = None
def _create_digest(self):
@@ -115,6 +116,14 @@ def get_image_digest(self):
self._create_digest()
return self.image_digest
+ def get_image_manifest(self):
+ return self.image_manifest
+
+ def update_tag(self, tag):
+ self.image_tag = tag
+ if tag not in self.image_tags and tag is not None:
+ self.image_tags.append(tag)
+
@property
def response_object(self):
response_object = self.gen_response_object()
@@ -124,26 +133,26 @@ def response_object(self):
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
- return response_object
+ return {k: v for k, v in response_object.items() if v is not None and v != [None]}
@property
def response_list_object(self):
response_object = self.gen_response_object()
response_object['imageTag'] = self.image_tag
response_object['imageDigest'] = "i don't know"
- return response_object
+ return {k: v for k, v in response_object.items() if v is not None and v != [None]}
@property
def response_describe_object(self):
response_object = self.gen_response_object()
- response_object['imageTags'] = [self.image_tag]
+ response_object['imageTags'] = self.image_tags
response_object['imageDigest'] = self.get_image_digest()
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
response_object['imageSizeInBytes'] = self.image_size_in_bytes
response_object['imagePushedAt'] = '2017-05-09'
- return response_object
+ return {k: v for k, v in response_object.items() if v is not None and v != []}
@property
def response_batch_get_image(self):
@@ -154,7 +163,7 @@ def response_batch_get_image(self):
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
- return response_object
+ return {k: v for k, v in response_object.items() if v is not None and v != [None]}
class ECRBackend(BaseBackend):
@@ -231,7 +240,7 @@ def describe_images(self, repository_name, registry_id=None, image_ids=None):
found = False
for image in repository.images:
if (('imageDigest' in image_id and image.get_image_digest() == image_id['imageDigest']) or
- ('imageTag' in image_id and image.image_tag == image_id['imageTag'])):
+ ('imageTag' in image_id and image_id['imageTag'] in image.image_tags)):
found = True
response.add(image)
if not found:
@@ -257,9 +266,16 @@ def put_image(self, repository_name, image_manifest, image_tag):
else:
raise Exception("{0} is not a repository".format(repository_name))
- image = Image(image_tag, image_manifest, repository_name)
- repository.images.append(image)
- return image
+ existing_images = list(filter(lambda x: x.response_object['imageManifest'] == image_manifest, repository.images))
+ if not existing_images:
+ # this image is not in ECR yet
+ image = Image(image_tag, image_manifest, repository_name)
+ repository.images.append(image)
+ return image
+ else:
+ # update existing image
+ existing_images[0].update_tag(image_tag)
+ return existing_images[0]
def batch_get_image(self, repository_name, registry_id=None, image_ids=None, accepted_media_types=None):
if repository_name in self.repositories:
diff --git a/moto/ecs/models.py b/moto/ecs/models.py
index 55fb4d4d931b..d00853843c0f 100644
--- a/moto/ecs/models.py
+++ b/moto/ecs/models.py
@@ -179,7 +179,7 @@ def response_object(self):
class Service(BaseObject):
- def __init__(self, cluster, service_name, task_definition, desired_count, load_balancers=None):
+ def __init__(self, cluster, service_name, task_definition, desired_count, load_balancers=None, scheduling_strategy=None):
self.cluster_arn = cluster.arn
self.arn = 'arn:aws:ecs:us-east-1:012345678910:service/{0}'.format(
service_name)
@@ -202,6 +202,7 @@ def __init__(self, cluster, service_name, task_definition, desired_count, load_b
}
]
self.load_balancers = load_balancers if load_balancers is not None else []
+ self.scheduling_strategy = scheduling_strategy if scheduling_strategy is not None else 'REPLICA'
self.pending_count = 0
@property
@@ -214,6 +215,7 @@ def response_object(self):
del response_object['name'], response_object['arn']
response_object['serviceName'] = self.name
response_object['serviceArn'] = self.arn
+ response_object['schedulingStrategy'] = self.scheduling_strategy
for deployment in response_object['deployments']:
if isinstance(deployment['createdAt'], datetime):
@@ -655,7 +657,7 @@ def stop_task(self, cluster_str, task_str, reason):
raise Exception("Could not find task {} on cluster {}".format(
task_str, cluster_name))
- def create_service(self, cluster_str, service_name, task_definition_str, desired_count, load_balancers=None):
+ def create_service(self, cluster_str, service_name, task_definition_str, desired_count, load_balancers=None, scheduling_strategy=None):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
@@ -665,7 +667,7 @@ def create_service(self, cluster_str, service_name, task_definition_str, desired
desired_count = desired_count if desired_count is not None else 0
service = Service(cluster, service_name,
- task_definition, desired_count, load_balancers)
+ task_definition, desired_count, load_balancers, scheduling_strategy)
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
self.services[cluster_service_pair] = service
diff --git a/moto/ecs/responses.py b/moto/ecs/responses.py
index 9455d7a2820f..e0bfefc02c56 100644
--- a/moto/ecs/responses.py
+++ b/moto/ecs/responses.py
@@ -154,8 +154,9 @@ def create_service(self):
task_definition_str = self._get_param('taskDefinition')
desired_count = self._get_int_param('desiredCount')
load_balancers = self._get_param('loadBalancers')
+ scheduling_strategy = self._get_param('schedulingStrategy')
service = self.ecs_backend.create_service(
- cluster_str, service_name, task_definition_str, desired_count, load_balancers)
+ cluster_str, service_name, task_definition_str, desired_count, load_balancers, scheduling_strategy)
return json.dumps({
'service': service.response_object
})
diff --git a/moto/elb/responses.py b/moto/elb/responses.py
index 40d6ec2f9e92..b512f56e9dfc 100644
--- a/moto/elb/responses.py
+++ b/moto/elb/responses.py
@@ -259,12 +259,22 @@ def set_load_balancer_policies_for_backend_server(self):
def describe_instance_health(self):
load_balancer_name = self._get_param('LoadBalancerName')
- instance_ids = [list(param.values())[0] for param in self._get_list_prefix('Instances.member')]
- if len(instance_ids) == 0:
- instance_ids = self.elb_backend.get_load_balancer(
- load_balancer_name).instance_ids
+ provided_instance_ids = [
+ list(param.values())[0]
+ for param in self._get_list_prefix('Instances.member')
+ ]
+ registered_instances_id = self.elb_backend.get_load_balancer(
+ load_balancer_name).instance_ids
+ if len(provided_instance_ids) == 0:
+ provided_instance_ids = registered_instances_id
template = self.response_template(DESCRIBE_INSTANCE_HEALTH_TEMPLATE)
- return template.render(instance_ids=instance_ids)
+ instances = []
+ for instance_id in provided_instance_ids:
+ state = "InService" \
+ if instance_id in registered_instances_id\
+ else "Unknown"
+ instances.append({"InstanceId": instance_id, "State": state})
+ return template.render(instances=instances)
def add_tags(self):
@@ -689,11 +699,11 @@ def _add_tags(self, elb):
DESCRIBE_INSTANCE_HEALTH_TEMPLATE = """
- {% for instance_id in instance_ids %}
+ {% for instance in instances %}
N/A
- {{ instance_id }}
- InService
+ {{ instance['InstanceId'] }}
+ {{ instance['State'] }}
N/A
{% endfor %}
diff --git a/moto/glue/__init__.py b/moto/glue/__init__.py
new file mode 100644
index 000000000000..6b1f13326e97
--- /dev/null
+++ b/moto/glue/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import unicode_literals
+from .models import glue_backend
+
+glue_backends = {"global": glue_backend}
+mock_glue = glue_backend.decorator
diff --git a/moto/glue/exceptions.py b/moto/glue/exceptions.py
new file mode 100644
index 000000000000..62ea1525c8b6
--- /dev/null
+++ b/moto/glue/exceptions.py
@@ -0,0 +1,24 @@
+from __future__ import unicode_literals
+from moto.core.exceptions import JsonRESTError
+
+
+class GlueClientError(JsonRESTError):
+ code = 400
+
+
+class DatabaseAlreadyExistsException(GlueClientError):
+ def __init__(self):
+ self.code = 400
+ super(DatabaseAlreadyExistsException, self).__init__(
+ 'DatabaseAlreadyExistsException',
+ 'Database already exists.'
+ )
+
+
+class TableAlreadyExistsException(GlueClientError):
+ def __init__(self):
+ self.code = 400
+ super(TableAlreadyExistsException, self).__init__(
+ 'TableAlreadyExistsException',
+ 'Table already exists.'
+ )
diff --git a/moto/glue/models.py b/moto/glue/models.py
new file mode 100644
index 000000000000..09b7d60ed9b1
--- /dev/null
+++ b/moto/glue/models.py
@@ -0,0 +1,60 @@
+from __future__ import unicode_literals
+
+from moto.core import BaseBackend, BaseModel
+from moto.compat import OrderedDict
+from.exceptions import DatabaseAlreadyExistsException, TableAlreadyExistsException
+
+
+class GlueBackend(BaseBackend):
+
+ def __init__(self):
+ self.databases = OrderedDict()
+
+ def create_database(self, database_name):
+ if database_name in self.databases:
+ raise DatabaseAlreadyExistsException()
+
+ database = FakeDatabase(database_name)
+ self.databases[database_name] = database
+ return database
+
+ def get_database(self, database_name):
+ return self.databases[database_name]
+
+ def create_table(self, database_name, table_name, table_input):
+ database = self.get_database(database_name)
+
+ if table_name in database.tables:
+ raise TableAlreadyExistsException()
+
+ table = FakeTable(database_name, table_name, table_input)
+ database.tables[table_name] = table
+ return table
+
+ def get_table(self, database_name, table_name):
+ database = self.get_database(database_name)
+ return database.tables[table_name]
+
+ def get_tables(self, database_name):
+ database = self.get_database(database_name)
+ return [table for table_name, table in database.tables.items()]
+
+
+class FakeDatabase(BaseModel):
+
+ def __init__(self, database_name):
+ self.name = database_name
+ self.tables = OrderedDict()
+
+
+class FakeTable(BaseModel):
+
+ def __init__(self, database_name, table_name, table_input):
+ self.database_name = database_name
+ self.name = table_name
+ self.table_input = table_input
+ self.storage_descriptor = self.table_input.get('StorageDescriptor', {})
+ self.partition_keys = self.table_input.get('PartitionKeys', [])
+
+
+glue_backend = GlueBackend()
diff --git a/moto/glue/responses.py b/moto/glue/responses.py
new file mode 100644
index 000000000000..bb64c40d4cde
--- /dev/null
+++ b/moto/glue/responses.py
@@ -0,0 +1,63 @@
+from __future__ import unicode_literals
+
+import json
+
+from moto.core.responses import BaseResponse
+from .models import glue_backend
+
+
+class GlueResponse(BaseResponse):
+
+ @property
+ def glue_backend(self):
+ return glue_backend
+
+ @property
+ def parameters(self):
+ return json.loads(self.body)
+
+ def create_database(self):
+ database_name = self.parameters['DatabaseInput']['Name']
+ self.glue_backend.create_database(database_name)
+ return ""
+
+ def get_database(self):
+ database_name = self.parameters.get('Name')
+ database = self.glue_backend.get_database(database_name)
+ return json.dumps({'Database': {'Name': database.name}})
+
+ def create_table(self):
+ database_name = self.parameters.get('DatabaseName')
+ table_input = self.parameters.get('TableInput')
+ table_name = table_input.get('Name')
+ self.glue_backend.create_table(database_name, table_name, table_input)
+ return ""
+
+ def get_table(self):
+ database_name = self.parameters.get('DatabaseName')
+ table_name = self.parameters.get('Name')
+ table = self.glue_backend.get_table(database_name, table_name)
+ return json.dumps({
+ 'Table': {
+ 'DatabaseName': table.database_name,
+ 'Name': table.name,
+ 'PartitionKeys': table.partition_keys,
+ 'StorageDescriptor': table.storage_descriptor
+ }
+ })
+
+ def get_tables(self):
+ database_name = self.parameters.get('DatabaseName')
+ tables = self.glue_backend.get_tables(database_name)
+ return json.dumps(
+ {
+ 'TableList': [
+ {
+ 'DatabaseName': table.database_name,
+ 'Name': table.name,
+ 'PartitionKeys': table.partition_keys,
+ 'StorageDescriptor': table.storage_descriptor
+ } for table in tables
+ ]
+ }
+ )
diff --git a/moto/glue/urls.py b/moto/glue/urls.py
new file mode 100644
index 000000000000..f3eaa9cad2dd
--- /dev/null
+++ b/moto/glue/urls.py
@@ -0,0 +1,11 @@
+from __future__ import unicode_literals
+
+from .responses import GlueResponse
+
+url_bases = [
+ "https?://glue(.*).amazonaws.com"
+]
+
+url_paths = {
+ '{0}/$': GlueResponse.dispatch
+}
diff --git a/moto/glue/utils.py b/moto/glue/utils.py
new file mode 100644
index 000000000000..baffc4882521
--- /dev/null
+++ b/moto/glue/utils.py
@@ -0,0 +1 @@
+from __future__ import unicode_literals
diff --git a/moto/iam/models.py b/moto/iam/models.py
index 8b632e555092..4d884fa2f18a 100644
--- a/moto/iam/models.py
+++ b/moto/iam/models.py
@@ -37,7 +37,6 @@ def __init__(self,
description=None,
document=None,
path=None):
- self.document = document or {}
self.name = name
self.attachment_count = 0
@@ -45,7 +44,7 @@ def __init__(self,
self.id = random_policy_id()
self.path = path or '/'
self.default_version_id = default_version_id or 'v1'
- self.versions = []
+ self.versions = [PolicyVersion(self.arn, document, True)]
self.create_datetime = datetime.now(pytz.utc)
self.update_datetime = datetime.now(pytz.utc)
@@ -72,11 +71,11 @@ class ManagedPolicy(Policy):
def attach_to(self, obj):
self.attachment_count += 1
- obj.managed_policies[self.name] = self
+ obj.managed_policies[self.arn] = self
def detach_from(self, obj):
self.attachment_count -= 1
- del obj.managed_policies[self.name]
+ del obj.managed_policies[self.arn]
@property
def arn(self):
@@ -477,11 +476,13 @@ def create_policy(self, description, path, policy_document, policy_name):
document=policy_document,
path=path,
)
- self.managed_policies[policy.name] = policy
+ self.managed_policies[policy.arn] = policy
return policy
- def get_policy(self, policy_name):
- return self.managed_policies.get(policy_name)
+ def get_policy(self, policy_arn):
+ if policy_arn not in self.managed_policies:
+ raise IAMNotFoundException("Policy {0} not found".format(policy_arn))
+ return self.managed_policies.get(policy_arn)
def list_attached_role_policies(self, role_name, marker=None, max_items=100, path_prefix='/'):
policies = self.get_role(role_name).managed_policies.values()
@@ -575,21 +576,18 @@ def list_role_policies(self, role_name):
return role.policies.keys()
def create_policy_version(self, policy_arn, policy_document, set_as_default):
- policy_name = policy_arn.split(':')[-1]
- policy_name = policy_name.split('/')[1]
- policy = self.get_policy(policy_name)
+ policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
version = PolicyVersion(policy_arn, policy_document, set_as_default)
policy.versions.append(version)
+ version.version_id = 'v{0}'.format(len(policy.versions))
if set_as_default:
policy.default_version_id = version.version_id
return version
def get_policy_version(self, policy_arn, version_id):
- policy_name = policy_arn.split(':')[-1]
- policy_name = policy_name.split('/')[1]
- policy = self.get_policy(policy_name)
+ policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
for version in policy.versions:
@@ -598,19 +596,18 @@ def get_policy_version(self, policy_arn, version_id):
raise IAMNotFoundException("Policy version not found")
def list_policy_versions(self, policy_arn):
- policy_name = policy_arn.split(':')[-1]
- policy_name = policy_name.split('/')[1]
- policy = self.get_policy(policy_name)
+ policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
return policy.versions
def delete_policy_version(self, policy_arn, version_id):
- policy_name = policy_arn.split(':')[-1]
- policy_name = policy_name.split('/')[1]
- policy = self.get_policy(policy_name)
+ policy = self.get_policy(policy_arn)
if not policy:
raise IAMNotFoundException("Policy not found")
+ if version_id == policy.default_version_id:
+ raise IAMConflictException(
+ "Cannot delete the default version of a policy")
for i, v in enumerate(policy.versions):
if v.version_id == version_id:
del policy.versions[i]
@@ -905,5 +902,32 @@ def create_account_alias(self, alias):
def delete_account_alias(self, alias):
self.account_aliases = []
+ def get_account_authorization_details(self, filter):
+ policies = self.managed_policies.values()
+ local_policies = set(policies) - set(aws_managed_policies)
+ returned_policies = []
+
+ if len(filter) == 0:
+ return {
+ 'instance_profiles': self.instance_profiles.values(),
+ 'roles': self.roles.values(),
+ 'groups': self.groups.values(),
+ 'users': self.users.values(),
+ 'managed_policies': self.managed_policies.values()
+ }
+
+ if 'AWSManagedPolicy' in filter:
+ returned_policies = aws_managed_policies
+ if 'LocalManagedPolicy' in filter:
+ returned_policies = returned_policies + list(local_policies)
+
+ return {
+ 'instance_profiles': self.instance_profiles.values(),
+ 'roles': self.roles.values() if 'Role' in filter else [],
+ 'groups': self.groups.values() if 'Group' in filter else [],
+ 'users': self.users.values() if 'User' in filter else [],
+ 'managed_policies': returned_policies
+ }
+
iam_backend = IAMBackend()
diff --git a/moto/iam/responses.py b/moto/iam/responses.py
index 786afab08253..9e8d21396f8d 100644
--- a/moto/iam/responses.py
+++ b/moto/iam/responses.py
@@ -58,6 +58,12 @@ def create_policy(self):
template = self.response_template(CREATE_POLICY_TEMPLATE)
return template.render(policy=policy)
+ def get_policy(self):
+ policy_arn = self._get_param('PolicyArn')
+ policy = iam_backend.get_policy(policy_arn)
+ template = self.response_template(GET_POLICY_TEMPLATE)
+ return template.render(policy=policy)
+
def list_attached_role_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
@@ -534,6 +540,18 @@ def delete_account_alias(self):
template = self.response_template(DELETE_ACCOUNT_ALIAS_TEMPLATE)
return template.render()
+ def get_account_authorization_details(self):
+ filter_param = self._get_multi_param('Filter.member')
+ account_details = iam_backend.get_account_authorization_details(filter_param)
+ template = self.response_template(GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE)
+ return template.render(
+ instance_profiles=account_details['instance_profiles'],
+ policies=account_details['managed_policies'],
+ users=account_details['users'],
+ groups=account_details['groups'],
+ roles=account_details['roles']
+ )
+
ATTACH_ROLE_POLICY_TEMPLATE = """
@@ -589,6 +607,25 @@ def delete_account_alias(self):
"""
+GET_POLICY_TEMPLATE = """
+
+
+ {{ policy.name }}
+ {{ policy.description }}
+ {{ policy.default_version_id }}
+ {{ policy.id }}
+ {{ policy.path }}
+ {{ policy.arn }}
+ {{ policy.attachment_count }}
+ {{ policy.create_datetime.isoformat() }}
+ {{ policy.update_datetime.isoformat() }}
+
+
+
+ 684f0917-3d22-11e4-a4a0-cffb9EXAMPLE
+
+"""
+
LIST_ATTACHED_ROLE_POLICIES_TEMPLATE = """
{% if marker is none %}
@@ -1309,3 +1346,144 @@ def delete_account_alias(self):
7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
"""
+
+
+LIST_GROUPS_FOR_USER_TEMPLATE = """
+
+
+ {% for group in groups %}
+
+ {{ group.path }}
+ {{ group.name }}
+ {{ group.id }}
+ {{ group.arn }}
+
+ {% endfor %}
+
+ false
+
+
+ 7a62c49f-347e-4fc4-9331-6e8eEXAMPLE
+
+"""
+
+
+GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """
+
+ false
+
+ {% for user in users %}
+
+
+
+ {{ user.id }}
+ {{ user.path }}
+ {{ user.name }}
+ {{ user.arn }}
+ 2012-05-09T15:45:35Z
+
+ {% endfor %}
+
+
+ EXAMPLEkakv9BCuUNFDtxWSyfzetYwEx2ADc8dnzfvERF5S6YMvXKx41t6gCl/eeaCX3Jo94/
+ bKqezEAg8TEVS99EKFLxm3jtbpl25FDWEXAMPLE
+
+
+ {% for group in groups %}
+
+ {{ group.id }}
+
+ {% for policy in group.managed_policies %}
+
+ {{ policy.name }}
+ {{ policy.arn }}
+
+ {% endfor %}
+
+ {{ group.name }}
+ {{ group.path }}
+ {{ group.arn }}
+ 2012-05-09T16:27:11Z
+
+
+ {% endfor %}
+
+
+ {% for role in roles %}
+
+
+
+ {% for policy in role.managed_policies %}
+
+ {{ policy.name }}
+ {{ policy.arn }}
+
+ {% endfor %}
+
+
+ {% for profile in instance_profiles %}
+
+ {{ profile.id }}
+
+ {% for role in profile.roles %}
+
+ {{ role.path }}
+ {{ role.arn }}
+ {{ role.name }}
+ {{ role.assume_role_policy_document }}
+ 2012-05-09T15:45:35Z
+ {{ role.id }}
+
+ {% endfor %}
+
+ {{ profile.name }}
+ {{ profile.path }}
+ {{ profile.arn }}
+ 2012-05-09T16:27:11Z
+
+ {% endfor %}
+
+ {{ role.path }}
+ {{ role.arn }}
+ {{ role.name }}
+ {{ role.assume_role_policy_document }}
+ 2014-07-30T17:09:20Z
+ {{ role.id }}
+
+ {% endfor %}
+
+
+ {% for policy in policies %}
+
+ {{ policy.name }}
+ {{ policy.default_version_id }}
+ {{ policy.id }}
+ {{ policy.path }}
+
+
+
+ {"Version":"2012-10-17","Statement":{"Effect":"Allow",
+ "Action":["iam:CreatePolicy","iam:CreatePolicyVersion",
+ "iam:DeletePolicy","iam:DeletePolicyVersion","iam:GetPolicy",
+ "iam:GetPolicyVersion","iam:ListPolicies",
+ "iam:ListPolicyVersions","iam:SetDefaultPolicyVersion"],
+ "Resource":"*"}}
+
+ true
+ v1
+ 2012-05-09T16:27:11Z
+
+
+ {{ policy.arn }}
+ 1
+ 2012-05-09T16:27:11Z
+ true
+ 2012-05-09T16:27:11Z
+
+ {% endfor %}
+
+
+
+ 92e79ae7-7399-11e4-8c85-4b53eEXAMPLE
+
+"""
diff --git a/moto/iot/models.py b/moto/iot/models.py
index ce7a4cf57eb1..c36bb985f50d 100644
--- a/moto/iot/models.py
+++ b/moto/iot/models.py
@@ -1,14 +1,17 @@
from __future__ import unicode_literals
-import time
-import boto3
-import string
-import random
+
import hashlib
-import uuid
+import random
import re
+import string
+import time
+import uuid
+from collections import OrderedDict
from datetime import datetime
+
+import boto3
+
from moto.core import BaseBackend, BaseModel
-from collections import OrderedDict
from .exceptions import (
ResourceNotFoundException,
InvalidRequestException,
@@ -271,15 +274,37 @@ def create_thing_type(self, thing_type_name, thing_type_properties):
def list_thing_types(self, thing_type_name=None):
if thing_type_name:
- # It's wierd but thing_type_name is filterd by forward match, not complete match
+ # It's weird but thing_type_name is filtered by forward match, not complete match
return [_ for _ in self.thing_types.values() if _.thing_type_name.startswith(thing_type_name)]
- thing_types = self.thing_types.values()
- return thing_types
+ return self.thing_types.values()
+
+ def list_things(self, attribute_name, attribute_value, thing_type_name, max_results, token):
+ all_things = [_.to_dict() for _ in self.things.values()]
+ if attribute_name is not None and thing_type_name is not None:
+ filtered_things = list(filter(lambda elem:
+ attribute_name in elem["attributes"] and
+ elem["attributes"][attribute_name] == attribute_value and
+ "thingTypeName" in elem and
+ elem["thingTypeName"] == thing_type_name, all_things))
+ elif attribute_name is not None and thing_type_name is None:
+ filtered_things = list(filter(lambda elem:
+ attribute_name in elem["attributes"] and
+ elem["attributes"][attribute_name] == attribute_value, all_things))
+ elif attribute_name is None and thing_type_name is not None:
+ filtered_things = list(
+ filter(lambda elem: "thingTypeName" in elem and elem["thingTypeName"] == thing_type_name, all_things))
+ else:
+ filtered_things = all_things
+
+ if token is None:
+ things = filtered_things[0:max_results]
+ next_token = str(max_results) if len(filtered_things) > max_results else None
+ else:
+ token = int(token)
+ things = filtered_things[token:token + max_results]
+ next_token = str(token + max_results) if len(filtered_things) > token + max_results else None
- def list_things(self, attribute_name, attribute_value, thing_type_name):
- # TODO: filter by attributess or thing_type
- things = self.things.values()
- return things
+ return things, next_token
def describe_thing(self, thing_name):
things = [_ for _ in self.things.values() if _.thing_name == thing_name]
diff --git a/moto/iot/responses.py b/moto/iot/responses.py
index fcdf12f7875e..006c4c4cc741 100644
--- a/moto/iot/responses.py
+++ b/moto/iot/responses.py
@@ -1,7 +1,9 @@
from __future__ import unicode_literals
+
+import json
+
from moto.core.responses import BaseResponse
from .models import iot_backends
-import json
class IoTResponse(BaseResponse):
@@ -32,30 +34,39 @@ def create_thing_type(self):
return json.dumps(dict(thingTypeName=thing_type_name, thingTypeArn=thing_type_arn))
def list_thing_types(self):
- # previous_next_token = self._get_param("nextToken")
- # max_results = self._get_int_param("maxResults")
+ previous_next_token = self._get_param("nextToken")
+ max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
thing_type_name = self._get_param("thingTypeName")
thing_types = self.iot_backend.list_thing_types(
thing_type_name=thing_type_name
)
- # TODO: implement pagination in the future
- next_token = None
- return json.dumps(dict(thingTypes=[_.to_dict() for _ in thing_types], nextToken=next_token))
+
+ thing_types = [_.to_dict() for _ in thing_types]
+ if previous_next_token is None:
+ result = thing_types[0:max_results]
+ next_token = str(max_results) if len(thing_types) > max_results else None
+ else:
+ token = int(previous_next_token)
+ result = thing_types[token:token + max_results]
+ next_token = str(token + max_results) if len(thing_types) > token + max_results else None
+
+ return json.dumps(dict(thingTypes=result, nextToken=next_token))
def list_things(self):
- # previous_next_token = self._get_param("nextToken")
- # max_results = self._get_int_param("maxResults")
+ previous_next_token = self._get_param("nextToken")
+ max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
attribute_name = self._get_param("attributeName")
attribute_value = self._get_param("attributeValue")
thing_type_name = self._get_param("thingTypeName")
- things = self.iot_backend.list_things(
+ things, next_token = self.iot_backend.list_things(
attribute_name=attribute_name,
attribute_value=attribute_value,
thing_type_name=thing_type_name,
+ max_results=max_results,
+ token=previous_next_token
)
- # TODO: implement pagination in the future
- next_token = None
- return json.dumps(dict(things=[_.to_dict() for _ in things], nextToken=next_token))
+
+ return json.dumps(dict(things=things, nextToken=next_token))
def describe_thing(self):
thing_name = self._get_param("thingName")
diff --git a/moto/logs/exceptions.py b/moto/logs/exceptions.py
index cc83452ea292..bb02eced3337 100644
--- a/moto/logs/exceptions.py
+++ b/moto/logs/exceptions.py
@@ -29,5 +29,5 @@ def __init__(self):
self.code = 400
super(ResourceAlreadyExistsException, self).__init__(
'ResourceAlreadyExistsException',
- 'The specified resource already exists.'
+ 'The specified log group already exists'
)
diff --git a/moto/logs/models.py b/moto/logs/models.py
index 3e1c7b955b55..a4ff9db46114 100644
--- a/moto/logs/models.py
+++ b/moto/logs/models.py
@@ -86,7 +86,7 @@ def put_log_events(self, log_group_name, log_stream_name, log_events, sequence_t
self.events += [LogEvent(self.lastIngestionTime, log_event) for log_event in log_events]
self.uploadSequenceToken += 1
- return self.uploadSequenceToken
+ return '{:056d}'.format(self.uploadSequenceToken)
def get_log_events(self, log_group_name, log_stream_name, start_time, end_time, limit, next_token, start_from_head):
def filter_func(event):
diff --git a/moto/packages/httpretty/core.py b/moto/packages/httpretty/core.py
index e0f3a7e696f9..8ad9168a5a66 100644
--- a/moto/packages/httpretty/core.py
+++ b/moto/packages/httpretty/core.py
@@ -85,6 +85,7 @@
old_ssl_wrap_socket = None
old_sslwrap_simple = None
old_sslsocket = None
+old_sslcontext_wrap_socket = None
if PY3: # pragma: no cover
basestring = (bytes, str)
@@ -100,6 +101,10 @@
if not PY3:
old_sslwrap_simple = ssl.sslwrap_simple
old_sslsocket = ssl.SSLSocket
+ try:
+ old_sslcontext_wrap_socket = ssl.SSLContext.wrap_socket
+ except AttributeError:
+ pass
except ImportError: # pragma: no cover
ssl = None
@@ -281,7 +286,7 @@ def getpeercert(self, *a, **kw):
return {
'notAfter': shift.strftime('%b %d %H:%M:%S GMT'),
'subjectAltName': (
- ('DNS', '*%s' % self._host),
+ ('DNS', '*.%s' % self._host),
('DNS', self._host),
('DNS', '*'),
),
@@ -772,7 +777,7 @@ class URIMatcher(object):
def __init__(self, uri, entries, match_querystring=False):
self._match_querystring = match_querystring
- if type(uri).__name__ == 'SRE_Pattern':
+ if type(uri).__name__ in ('SRE_Pattern', 'Pattern'):
self.regex = uri
result = urlsplit(uri.pattern)
if result.scheme == 'https':
@@ -1012,6 +1017,10 @@ def disable(cls):
if ssl:
ssl.wrap_socket = old_ssl_wrap_socket
ssl.SSLSocket = old_sslsocket
+ try:
+ ssl.SSLContext.wrap_socket = old_sslcontext_wrap_socket
+ except AttributeError:
+ pass
ssl.__dict__['wrap_socket'] = old_ssl_wrap_socket
ssl.__dict__['SSLSocket'] = old_sslsocket
@@ -1058,6 +1067,14 @@ def enable(cls):
ssl.wrap_socket = fake_wrap_socket
ssl.SSLSocket = FakeSSLSocket
+ try:
+ def fake_sslcontext_wrap_socket(cls, *args, **kwargs):
+ return fake_wrap_socket(*args, **kwargs)
+
+ ssl.SSLContext.wrap_socket = fake_sslcontext_wrap_socket
+ except AttributeError:
+ pass
+
ssl.__dict__['wrap_socket'] = fake_wrap_socket
ssl.__dict__['SSLSocket'] = FakeSSLSocket
diff --git a/moto/rds/models.py b/moto/rds/models.py
index 77deff09d9ea..feecefe0ce79 100644
--- a/moto/rds/models.py
+++ b/moto/rds/models.py
@@ -48,6 +48,10 @@ def __init__(self, **kwargs):
if self.publicly_accessible is None:
self.publicly_accessible = True
+ self.copy_tags_to_snapshot = kwargs.get("copy_tags_to_snapshot")
+ if self.copy_tags_to_snapshot is None:
+ self.copy_tags_to_snapshot = False
+
self.backup_retention_period = kwargs.get("backup_retention_period")
if self.backup_retention_period is None:
self.backup_retention_period = 1
@@ -137,6 +141,7 @@ def create_from_cloudformation_json(cls, resource_name, cloudformation_json, reg
"multi_az": properties.get("MultiAZ"),
"port": properties.get('Port', 3306),
"publicly_accessible": properties.get("PubliclyAccessible"),
+ "copy_tags_to_snapshot": properties.get("CopyTagsToSnapshot"),
"region": region_name,
"security_groups": security_groups,
"storage_encrypted": properties.get("StorageEncrypted"),
@@ -217,6 +222,7 @@ def to_xml(self):
{% endif %}
{{ database.publicly_accessible }}
+ {{ database.copy_tags_to_snapshot }}
{{ database.auto_minor_version_upgrade }}
{{ database.allocated_storage }}
{{ database.storage_encrypted }}
diff --git a/moto/rds2/models.py b/moto/rds2/models.py
index 3fc4b6d659d8..fee004f7698c 100644
--- a/moto/rds2/models.py
+++ b/moto/rds2/models.py
@@ -73,6 +73,9 @@ def __init__(self, **kwargs):
self.publicly_accessible = kwargs.get("publicly_accessible")
if self.publicly_accessible is None:
self.publicly_accessible = True
+ self.copy_tags_to_snapshot = kwargs.get("copy_tags_to_snapshot")
+ if self.copy_tags_to_snapshot is None:
+ self.copy_tags_to_snapshot = False
self.backup_retention_period = kwargs.get("backup_retention_period")
if self.backup_retention_period is None:
self.backup_retention_period = 1
@@ -208,6 +211,7 @@ def to_xml(self):
{% endif %}
{{ database.publicly_accessible }}
+ {{ database.copy_tags_to_snapshot }}
{{ database.auto_minor_version_upgrade }}
{{ database.allocated_storage }}
{{ database.storage_encrypted }}
@@ -304,6 +308,7 @@ def create_from_cloudformation_json(cls, resource_name, cloudformation_json, reg
"db_parameter_group_name": properties.get('DBParameterGroupName'),
"port": properties.get('Port', 3306),
"publicly_accessible": properties.get("PubliclyAccessible"),
+ "copy_tags_to_snapshot": properties.get("CopyTagsToSnapshot"),
"region": region_name,
"security_groups": security_groups,
"storage_encrypted": properties.get("StorageEncrypted"),
@@ -362,6 +367,7 @@ def to_json(self):
"PreferredBackupWindow": "{{ database.preferred_backup_window }}",
"PreferredMaintenanceWindow": "{{ database.preferred_maintenance_window }}",
"PubliclyAccessible": "{{ database.publicly_accessible }}",
+ "CopyTagsToSnapshot": "{{ database.copy_tags_to_snapshot }}",
"AllocatedStorage": "{{ database.allocated_storage }}",
"Endpoint": {
"Address": "{{ database.address }}",
@@ -411,10 +417,10 @@ def delete(self, region_name):
class Snapshot(BaseModel):
- def __init__(self, database, snapshot_id, tags=None):
+ def __init__(self, database, snapshot_id, tags):
self.database = database
self.snapshot_id = snapshot_id
- self.tags = tags or []
+ self.tags = tags
self.created_at = iso_8601_datetime_with_milliseconds(datetime.datetime.now())
@property
@@ -456,6 +462,20 @@ def to_xml(self):
""")
return template.render(snapshot=self, database=self.database)
+ def get_tags(self):
+ return self.tags
+
+ def add_tags(self, tags):
+ new_keys = [tag_set['Key'] for tag_set in tags]
+ self.tags = [tag_set for tag_set in self.tags if tag_set[
+ 'Key'] not in new_keys]
+ self.tags.extend(tags)
+ return self.tags
+
+ def remove_tags(self, tag_keys):
+ self.tags = [tag_set for tag_set in self.tags if tag_set[
+ 'Key'] not in tag_keys]
+
class SecurityGroup(BaseModel):
@@ -691,6 +711,10 @@ def create_snapshot(self, db_instance_identifier, db_snapshot_identifier, tags=N
raise DBSnapshotAlreadyExistsError(db_snapshot_identifier)
if len(self.snapshots) >= int(os.environ.get('MOTO_RDS_SNAPSHOT_LIMIT', '100')):
raise SnapshotQuotaExceededError()
+ if tags is None:
+ tags = list()
+ if database.copy_tags_to_snapshot and not tags:
+ tags = database.get_tags()
snapshot = Snapshot(database, db_snapshot_identifier, tags)
self.snapshots[db_snapshot_identifier] = snapshot
return snapshot
@@ -787,13 +811,13 @@ def find_db_from_id(self, db_id):
def delete_database(self, db_instance_identifier, db_snapshot_name=None):
if db_instance_identifier in self.databases:
+ if db_snapshot_name:
+ self.create_snapshot(db_instance_identifier, db_snapshot_name)
database = self.databases.pop(db_instance_identifier)
if database.is_replica:
primary = self.find_db_from_id(database.source_db_identifier)
primary.remove_replica(database)
database.status = 'deleting'
- if db_snapshot_name:
- self.snapshots[db_snapshot_name] = Snapshot(database, db_snapshot_name)
return database
else:
raise DBInstanceNotFoundError(db_instance_identifier)
@@ -1028,8 +1052,8 @@ def list_tags_for_resource(self, arn):
if resource_name in self.security_groups:
return self.security_groups[resource_name].get_tags()
elif resource_type == 'snapshot': # DB Snapshot
- # TODO: Complete call to tags on resource type DB Snapshot
- return []
+ if resource_name in self.snapshots:
+ return self.snapshots[resource_name].get_tags()
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].get_tags()
@@ -1059,7 +1083,8 @@ def remove_tags_from_resource(self, arn, tag_keys):
if resource_name in self.security_groups:
return self.security_groups[resource_name].remove_tags(tag_keys)
elif resource_type == 'snapshot': # DB Snapshot
- return None
+ if resource_name in self.snapshots:
+ return self.snapshots[resource_name].remove_tags(tag_keys)
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].remove_tags(tag_keys)
@@ -1088,7 +1113,8 @@ def add_tags_to_resource(self, arn, tags):
if resource_name in self.security_groups:
return self.security_groups[resource_name].add_tags(tags)
elif resource_type == 'snapshot': # DB Snapshot
- return []
+ if resource_name in self.snapshots:
+ return self.snapshots[resource_name].add_tags(tags)
elif resource_type == 'subgrp': # DB subnet group
if resource_name in self.subnet_groups:
return self.subnet_groups[resource_name].add_tags(tags)
diff --git a/moto/rds2/responses.py b/moto/rds2/responses.py
index eddb0042b825..66d4e0c52286 100644
--- a/moto/rds2/responses.py
+++ b/moto/rds2/responses.py
@@ -19,6 +19,7 @@ def _get_db_kwargs(self):
"allocated_storage": self._get_int_param('AllocatedStorage'),
"availability_zone": self._get_param("AvailabilityZone"),
"backup_retention_period": self._get_param("BackupRetentionPeriod"),
+ "copy_tags_to_snapshot": self._get_param("CopyTagsToSnapshot"),
"db_instance_class": self._get_param('DBInstanceClass'),
"db_instance_identifier": self._get_param('DBInstanceIdentifier'),
"db_name": self._get_param("DBName"),
@@ -159,7 +160,7 @@ def reboot_db_instance(self):
def create_db_snapshot(self):
db_instance_identifier = self._get_param('DBInstanceIdentifier')
db_snapshot_identifier = self._get_param('DBSnapshotIdentifier')
- tags = self._get_param('Tags', [])
+ tags = self.unpack_complex_list_params('Tags.Tag', ('Key', 'Value'))
snapshot = self.backend.create_snapshot(db_instance_identifier, db_snapshot_identifier, tags)
template = self.response_template(CREATE_SNAPSHOT_TEMPLATE)
return template.render(snapshot=snapshot)
diff --git a/moto/redshift/models.py b/moto/redshift/models.py
index 4eafcfc7992d..70cbb95cb7cd 100644
--- a/moto/redshift/models.py
+++ b/moto/redshift/models.py
@@ -78,6 +78,7 @@ def __init__(self, redshift_backend, cluster_identifier, node_type, master_usern
super(Cluster, self).__init__(region_name, tags)
self.redshift_backend = redshift_backend
self.cluster_identifier = cluster_identifier
+ self.create_time = iso_8601_datetime_with_milliseconds(datetime.datetime.now())
self.status = 'available'
self.node_type = node_type
self.master_username = master_username
@@ -237,6 +238,7 @@ def to_json(self):
"Address": self.endpoint,
"Port": self.port
},
+ 'ClusterCreateTime': self.create_time,
"PendingModifiedValues": [],
"Tags": self.tags,
"IamRoles": [{
diff --git a/moto/s3/models.py b/moto/s3/models.py
index cf5628141350..f3994b5d85a0 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -27,8 +27,14 @@ class FakeDeleteMarker(BaseModel):
def __init__(self, key):
self.key = key
+ self.name = key.name
+ self.last_modified = datetime.datetime.utcnow()
self._version_id = key.version_id + 1
+ @property
+ def last_modified_ISO8601(self):
+ return iso_8601_datetime_with_milliseconds(self.last_modified)
+
@property
def version_id(self):
return self._version_id
@@ -630,10 +636,7 @@ def get_bucket_latest_versions(self, bucket_name):
latest_versions = {}
for version in versions:
- if isinstance(version, FakeDeleteMarker):
- name = version.key.name
- else:
- name = version.name
+ name = version.name
version_id = version.version_id
maximum_version_per_key[name] = max(
version_id,
diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index 5e7cf0fe5af9..f8dc7e42b990 100755
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -1273,10 +1273,10 @@ def _key_response_post(self, request, body, bucket_name, query, key_name, header
{% endfor %}
{% for marker in delete_marker_list %}
- {{ marker.key.name }}
+ {{ marker.name }}
{{ marker.version_id }}
- {% if latest_versions[marker.key.name] == marker.version_id %}true{% else %}false{% endif %}
- {{ marker.key.last_modified_ISO8601 }}
+ {% if latest_versions[marker.name] == marker.version_id %}true{% else %}false{% endif %}
+ {{ marker.last_modified_ISO8601 }}
75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
webfile
diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py
index 3923f90b0c78..1404a0ec81c2 100644
--- a/moto/secretsmanager/models.py
+++ b/moto/secretsmanager/models.py
@@ -33,20 +33,27 @@ def __init__(self, region_name=None, **kwargs):
self.name = kwargs.get('name', '')
self.createdate = int(time.time())
self.secret_string = ''
+ self.rotation_enabled = False
+ self.rotation_lambda_arn = ''
+ self.auto_rotate_after_days = 0
+ self.version_id = ''
def reset(self):
region_name = self.region
self.__dict__ = {}
self.__init__(region_name)
+ def _is_valid_identifier(self, identifier):
+ return identifier in (self.name, self.secret_id)
+
def get_secret_value(self, secret_id, version_id, version_stage):
- if self.secret_id == '':
+ if not self._is_valid_identifier(secret_id):
raise ResourceNotFoundException()
response = json.dumps({
"ARN": secret_arn(self.region, self.secret_id),
- "Name": self.secret_id,
+ "Name": self.name,
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
"SecretString": self.secret_string,
"VersionStages": [
@@ -61,15 +68,94 @@ def create_secret(self, name, secret_string, **kwargs):
self.secret_string = secret_string
self.secret_id = name
+ self.name = name
response = json.dumps({
"ARN": secret_arn(self.region, name),
- "Name": self.secret_id,
+ "Name": self.name,
"VersionId": "A435958A-D821-4193-B719-B7769357AER4",
})
return response
+ def describe_secret(self, secret_id):
+ if not self._is_valid_identifier(secret_id):
+ raise ResourceNotFoundException
+
+ response = json.dumps({
+ "ARN": secret_arn(self.region, self.secret_id),
+ "Name": self.name,
+ "Description": "",
+ "KmsKeyId": "",
+ "RotationEnabled": self.rotation_enabled,
+ "RotationLambdaARN": self.rotation_lambda_arn,
+ "RotationRules": {
+ "AutomaticallyAfterDays": self.auto_rotate_after_days
+ },
+ "LastRotatedDate": None,
+ "LastChangedDate": None,
+ "LastAccessedDate": None,
+ "DeletedDate": None,
+ "Tags": [
+ {
+ "Key": "",
+ "Value": ""
+ },
+ ]
+ })
+
+ return response
+
+ def rotate_secret(self, secret_id, client_request_token=None,
+ rotation_lambda_arn=None, rotation_rules=None):
+
+ rotation_days = 'AutomaticallyAfterDays'
+
+ if not self._is_valid_identifier(secret_id):
+ raise ResourceNotFoundException
+
+ if client_request_token:
+ token_length = len(client_request_token)
+ if token_length < 32 or token_length > 64:
+ msg = (
+ 'ClientRequestToken '
+ 'must be 32-64 characters long.'
+ )
+ raise InvalidParameterException(msg)
+
+ if rotation_lambda_arn:
+ if len(rotation_lambda_arn) > 2048:
+ msg = (
+ 'RotationLambdaARN '
+ 'must <= 2048 characters long.'
+ )
+ raise InvalidParameterException(msg)
+
+ if rotation_rules:
+ if rotation_days in rotation_rules:
+ rotation_period = rotation_rules[rotation_days]
+ if rotation_period < 1 or rotation_period > 1000:
+ msg = (
+ 'RotationRules.AutomaticallyAfterDays '
+ 'must be within 1-1000.'
+ )
+ raise InvalidParameterException(msg)
+
+ self.version_id = client_request_token or ''
+ self.rotation_lambda_arn = rotation_lambda_arn or ''
+ if rotation_rules:
+ self.auto_rotate_after_days = rotation_rules.get(rotation_days, 0)
+ if self.auto_rotate_after_days > 0:
+ self.rotation_enabled = True
+
+ response = json.dumps({
+ "ARN": secret_arn(self.region, self.secret_id),
+ "Name": self.name,
+ "VersionId": self.version_id
+ })
+
+ return response
+
def get_random_password(self, password_length,
exclude_characters, exclude_numbers,
exclude_punctuation, exclude_uppercase,
diff --git a/moto/secretsmanager/responses.py b/moto/secretsmanager/responses.py
index 06387560a2c9..b8b6872a8fbc 100644
--- a/moto/secretsmanager/responses.py
+++ b/moto/secretsmanager/responses.py
@@ -44,3 +44,21 @@ def get_random_password(self):
include_space=include_space,
require_each_included_type=require_each_included_type
)
+
+ def describe_secret(self):
+ secret_id = self._get_param('SecretId')
+ return secretsmanager_backends[self.region].describe_secret(
+ secret_id=secret_id
+ )
+
+ def rotate_secret(self):
+ client_request_token = self._get_param('ClientRequestToken')
+ rotation_lambda_arn = self._get_param('RotationLambdaARN')
+ rotation_rules = self._get_param('RotationRules')
+ secret_id = self._get_param('SecretId')
+ return secretsmanager_backends[self.region].rotate_secret(
+ secret_id=secret_id,
+ client_request_token=client_request_token,
+ rotation_lambda_arn=rotation_lambda_arn,
+ rotation_rules=rotation_rules
+ )
diff --git a/moto/server.py b/moto/server.py
index aad47757afac..ba247047845a 100644
--- a/moto/server.py
+++ b/moto/server.py
@@ -34,6 +34,9 @@ def __init__(self, create_app, service=None):
self.service = service
def get_backend_for_host(self, host):
+ if host == 'moto_api':
+ return host
+
if self.service:
return self.service
diff --git a/moto/ses/models.py b/moto/ses/models.py
index 3dced60f2fa8..71fe9d9a14e9 100644
--- a/moto/ses/models.py
+++ b/moto/ses/models.py
@@ -49,7 +49,8 @@ def __init__(self):
self.sent_messages = []
self.sent_message_count = 0
- def _is_verified_address(self, address):
+ def _is_verified_address(self, source):
+ _, address = parseaddr(source)
if address in self.addresses:
return True
user, host = address.split('@', 1)
diff --git a/moto/sqs/models.py b/moto/sqs/models.py
index b8db356e9f01..f3262a988054 100644
--- a/moto/sqs/models.py
+++ b/moto/sqs/models.py
@@ -385,10 +385,22 @@ def reset(self):
def create_queue(self, name, **kwargs):
queue = self.queues.get(name)
if queue:
- # Queue already exist. If attributes don't match, throw error
- for key, value in kwargs.items():
- if getattr(queue, camelcase_to_underscores(key)) != value:
- raise QueueAlreadyExists("The specified queue already exists.")
+ try:
+ kwargs.pop('region')
+ except KeyError:
+ pass
+
+ new_queue = Queue(name, region=self.region_name, **kwargs)
+
+ queue_attributes = queue.attributes
+ new_queue_attributes = new_queue.attributes
+
+ for key in ['CreatedTimestamp', 'LastModifiedTimestamp']:
+ queue_attributes.pop(key)
+ new_queue_attributes.pop(key)
+
+ if queue_attributes != new_queue_attributes:
+ raise QueueAlreadyExists("The specified queue already exists.")
else:
try:
kwargs.pop('region')
diff --git a/moto/sqs/responses.py b/moto/sqs/responses.py
index c489d7118270..b4f64b14e439 100644
--- a/moto/sqs/responses.py
+++ b/moto/sqs/responses.py
@@ -336,7 +336,7 @@ def receive_message(self):
try:
wait_time = int(self.querystring.get("WaitTimeSeconds")[0])
except TypeError:
- wait_time = queue.receive_message_wait_time_seconds
+ wait_time = int(queue.receive_message_wait_time_seconds)
if wait_time < 0 or wait_time > 20:
return self._error(
diff --git a/moto/ssm/models.py b/moto/ssm/models.py
index 656a14839829..f16a7d981fab 100644
--- a/moto/ssm/models.py
+++ b/moto/ssm/models.py
@@ -5,10 +5,12 @@
from moto.core import BaseBackend, BaseModel
from moto.core.exceptions import RESTError
from moto.ec2 import ec2_backends
+from moto.cloudformation import cloudformation_backends
import datetime
import time
import uuid
+import itertools
class Parameter(BaseModel):
@@ -67,7 +69,7 @@ def __init__(self, comment='', document_name='', timeout_seconds=MAX_TIMEOUT_SEC
instance_ids=None, max_concurrency='', max_errors='',
notification_config=None, output_s3_bucket_name='',
output_s3_key_prefix='', output_s3_region='', parameters=None,
- service_role_arn='', targets=None):
+ service_role_arn='', targets=None, backend_region='us-east-1'):
if instance_ids is None:
instance_ids = []
@@ -88,9 +90,9 @@ def __init__(self, comment='', document_name='', timeout_seconds=MAX_TIMEOUT_SEC
self.status = 'Success'
self.status_details = 'Details placeholder'
- now = datetime.datetime.now()
- self.requested_date_time = now.isoformat()
- expires_after = now + datetime.timedelta(0, timeout_seconds)
+ self.requested_date_time = datetime.datetime.now()
+ self.requested_date_time_iso = self.requested_date_time.isoformat()
+ expires_after = self.requested_date_time + datetime.timedelta(0, timeout_seconds)
self.expires_after = expires_after.isoformat()
self.comment = comment
@@ -105,6 +107,32 @@ def __init__(self, comment='', document_name='', timeout_seconds=MAX_TIMEOUT_SEC
self.parameters = parameters
self.service_role_arn = service_role_arn
self.targets = targets
+ self.backend_region = backend_region
+
+ # Get instance ids from a cloud formation stack target.
+ stack_instance_ids = [self.get_instance_ids_by_stack_ids(target['Values']) for
+ target in self.targets if
+ target['Key'] == 'tag:aws:cloudformation:stack-name']
+
+ self.instance_ids += list(itertools.chain.from_iterable(stack_instance_ids))
+
+ # Create invocations with a single run command plugin.
+ self.invocations = []
+ for instance_id in self.instance_ids:
+ self.invocations.append(
+ self.invocation_response(instance_id, "aws:runShellScript"))
+
+ def get_instance_ids_by_stack_ids(self, stack_ids):
+ instance_ids = []
+ cloudformation_backend = cloudformation_backends[self.backend_region]
+ for stack_id in stack_ids:
+ stack_resources = cloudformation_backend.list_stack_resources(stack_id)
+ instance_resources = [
+ instance.id for instance in stack_resources
+ if instance.type == "AWS::EC2::Instance"]
+ instance_ids.extend(instance_resources)
+
+ return instance_ids
def response_object(self):
r = {
@@ -122,7 +150,7 @@ def response_object(self):
'OutputS3BucketName': self.output_s3_bucket_name,
'OutputS3KeyPrefix': self.output_s3_key_prefix,
'Parameters': self.parameters,
- 'RequestedDateTime': self.requested_date_time,
+ 'RequestedDateTime': self.requested_date_time_iso,
'ServiceRole': self.service_role_arn,
'Status': self.status,
'StatusDetails': self.status_details,
@@ -132,6 +160,50 @@ def response_object(self):
return r
+ def invocation_response(self, instance_id, plugin_name):
+ # Calculate elapsed time from requested time and now. Use a hardcoded
+ # elapsed time since there is no easy way to convert a timedelta to
+ # an ISO 8601 duration string.
+ elapsed_time_iso = "PT5M"
+ elapsed_time_delta = datetime.timedelta(minutes=5)
+ end_time = self.requested_date_time + elapsed_time_delta
+
+ r = {
+ 'CommandId': self.command_id,
+ 'InstanceId': instance_id,
+ 'Comment': self.comment,
+ 'DocumentName': self.document_name,
+ 'PluginName': plugin_name,
+ 'ResponseCode': 0,
+ 'ExecutionStartDateTime': self.requested_date_time_iso,
+ 'ExecutionElapsedTime': elapsed_time_iso,
+ 'ExecutionEndDateTime': end_time.isoformat(),
+ 'Status': 'Success',
+ 'StatusDetails': 'Success',
+ 'StandardOutputContent': '',
+ 'StandardOutputUrl': '',
+ 'StandardErrorContent': '',
+ }
+
+ return r
+
+ def get_invocation(self, instance_id, plugin_name):
+ invocation = next(
+ (invocation for invocation in self.invocations
+ if invocation['InstanceId'] == instance_id), None)
+
+ if invocation is None:
+ raise RESTError(
+ 'InvocationDoesNotExist',
+ 'An error occurred (InvocationDoesNotExist) when calling the GetCommandInvocation operation')
+
+ if plugin_name is not None and invocation['PluginName'] != plugin_name:
+ raise RESTError(
+ 'InvocationDoesNotExist',
+ 'An error occurred (InvocationDoesNotExist) when calling the GetCommandInvocation operation')
+
+ return invocation
+
class SimpleSystemManagerBackend(BaseBackend):
@@ -140,6 +212,11 @@ def __init__(self):
self._resource_tags = defaultdict(lambda: defaultdict(dict))
self._commands = []
+ # figure out what region we're in
+ for region, backend in ssm_backends.items():
+ if backend == self:
+ self._region = region
+
def delete_parameter(self, name):
try:
del self._parameters[name]
@@ -260,7 +337,8 @@ def send_command(self, **kwargs):
output_s3_region=kwargs.get('OutputS3Region', ''),
parameters=kwargs.get('Parameters', {}),
service_role_arn=kwargs.get('ServiceRoleArn', ''),
- targets=kwargs.get('Targets', []))
+ targets=kwargs.get('Targets', []),
+ backend_region=self._region)
self._commands.append(command)
return {
@@ -298,6 +376,18 @@ def get_commands_by_instance_id(self, instance_id):
command for command in self._commands
if instance_id in command.instance_ids]
+ def get_command_invocation(self, **kwargs):
+ """
+ https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_GetCommandInvocation.html
+ """
+
+ command_id = kwargs.get('CommandId')
+ instance_id = kwargs.get('InstanceId')
+ plugin_name = kwargs.get('PluginName', None)
+
+ command = self.get_command_by_id(command_id)
+ return command.get_invocation(instance_id, plugin_name)
+
ssm_backends = {}
for region, ec2_backend in ec2_backends.items():
diff --git a/moto/ssm/responses.py b/moto/ssm/responses.py
index fd0d8b630448..eb05e51b6374 100644
--- a/moto/ssm/responses.py
+++ b/moto/ssm/responses.py
@@ -210,3 +210,8 @@ def list_commands(self):
return json.dumps(
self.ssm_backend.list_commands(**self.request_params)
)
+
+ def get_command_invocation(self):
+ return json.dumps(
+ self.ssm_backend.get_command_invocation(**self.request_params)
+ )
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 655be0616d1d..111cd5f3ff84 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,7 +1,7 @@
-r requirements.txt
mock
nose
-sure==1.2.24
+sure==1.4.11
coverage
flake8==3.5.0
freezegun
@@ -13,5 +13,5 @@ six>=1.9
prompt-toolkit==1.0.14
click==6.7
inflection==0.3.1
-lxml==4.0.0
+lxml==4.2.3
beautifulsoup4==4.6.0
diff --git a/scripts/implementation_coverage.py b/scripts/implementation_coverage.py
index 74ce9590dd95..4e385e1d6301 100755
--- a/scripts/implementation_coverage.py
+++ b/scripts/implementation_coverage.py
@@ -6,13 +6,17 @@
import boto3
+script_dir = os.path.dirname(os.path.abspath(__file__))
+
+
def get_moto_implementation(service_name):
- if not hasattr(moto, service_name):
+ service_name_standardized = service_name.replace("-", "") if "-" in service_name else service_name
+ if not hasattr(moto, service_name_standardized):
return None
- module = getattr(moto, service_name)
+ module = getattr(moto, service_name_standardized)
if module is None:
return None
- mock = getattr(module, "mock_{}".format(service_name))
+ mock = getattr(module, "mock_{}".format(service_name_standardized))
if mock is None:
return None
backends = list(mock().backends.values())
@@ -72,20 +76,22 @@ def write_implementation_coverage_to_file(coverage):
except OSError:
pass
- for service_name in sorted(coverage):
- implemented = coverage.get(service_name)['implemented']
- not_implemented = coverage.get(service_name)['not_implemented']
- operations = sorted(implemented + not_implemented)
-
- if implemented and not_implemented:
- percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
- elif implemented:
- percentage_implemented = 100
- else:
- percentage_implemented = 0
+ implementation_coverage_file = "{}/../IMPLEMENTATION_COVERAGE.md".format(script_dir)
+ # rewrite the implementation coverage file with updated values
+ print("Writing to {}".format(implementation_coverage_file))
+ with open(implementation_coverage_file, "a+") as file:
+ for service_name in sorted(coverage):
+ implemented = coverage.get(service_name)['implemented']
+ not_implemented = coverage.get(service_name)['not_implemented']
+ operations = sorted(implemented + not_implemented)
+
+ if implemented and not_implemented:
+ percentage_implemented = int(100.0 * len(implemented) / (len(implemented) + len(not_implemented)))
+ elif implemented:
+ percentage_implemented = 100
+ else:
+ percentage_implemented = 0
- # rewrite the implementation coverage file with updated values
- with open("../IMPLEMENTATION_COVERAGE.md", "a+") as file:
file.write("\n")
file.write("## {} - {}% implemented\n".format(service_name, percentage_implemented))
for op in operations:
diff --git a/setup.py b/setup.py
index 62f9026d7111..98780dd5a2e2 100755
--- a/setup.py
+++ b/setup.py
@@ -8,10 +8,9 @@
install_requires = [
"Jinja2>=2.7.3",
"boto>=2.36.0",
- "boto3>=1.6.16",
- "botocore>=1.9.16",
- "cookies",
- "cryptography>=2.0.0",
+ "boto3>=1.6.16,<1.8",
+ "botocore>=1.9.16,<1.11",
+ "cryptography>=2.3.0",
"requests>=2.5",
"xmltodict",
"six>1.9",
@@ -41,7 +40,7 @@
setup(
name='moto',
- version='1.3.3',
+ version='1.3.6',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
diff --git a/tests/test_core/test_decorator_calls.py b/tests/test_core/test_decorator_calls.py
index 9e3638cc25a8..5d2f6a4ef007 100644
--- a/tests/test_core/test_decorator_calls.py
+++ b/tests/test_core/test_decorator_calls.py
@@ -85,3 +85,14 @@ def setUp(self):
def test_still_the_same(self):
bucket = self.conn.get_bucket('mybucket')
bucket.name.should.equal("mybucket")
+
+
+@mock_s3_deprecated
+class TesterWithStaticmethod(object):
+
+ @staticmethod
+ def static(*args):
+ assert not args or not isinstance(args[0], TesterWithStaticmethod)
+
+ def test_no_instance_sent_to_staticmethod(self):
+ self.static()
diff --git a/tests/test_dynamodb2/test_dynamodb.py b/tests/test_dynamodb2/test_dynamodb.py
index ab8f258566c6..243de2701a8c 100644
--- a/tests/test_dynamodb2/test_dynamodb.py
+++ b/tests/test_dynamodb2/test_dynamodb.py
@@ -201,6 +201,48 @@ def test_item_add_empty_string_exception():
)
+@requires_boto_gte("2.9")
+@mock_dynamodb2
+def test_update_item_with_empty_string_exception():
+ name = 'TestTable'
+ conn = boto3.client('dynamodb',
+ region_name='us-west-2',
+ aws_access_key_id="ak",
+ aws_secret_access_key="sk")
+ conn.create_table(TableName=name,
+ KeySchema=[{'AttributeName':'forum_name','KeyType':'HASH'}],
+ AttributeDefinitions=[{'AttributeName':'forum_name','AttributeType':'S'}],
+ ProvisionedThroughput={'ReadCapacityUnits':5,'WriteCapacityUnits':5})
+
+ conn.put_item(
+ TableName=name,
+ Item={
+ 'forum_name': { 'S': 'LOLCat Forum' },
+ 'subject': { 'S': 'Check this out!' },
+ 'Body': { 'S': 'http://url_to_lolcat.gif'},
+ 'SentBy': { 'S': "test" },
+ 'ReceivedTime': { 'S': '12/9/2011 11:36:03 PM'},
+ }
+ )
+
+ with assert_raises(ClientError) as ex:
+ conn.update_item(
+ TableName=name,
+ Key={
+ 'forum_name': { 'S': 'LOLCat Forum'},
+ },
+ UpdateExpression='set Body=:Body',
+ ExpressionAttributeValues={
+ ':Body': {'S': ''}
+ })
+
+ ex.exception.response['Error']['Code'].should.equal('ValidationException')
+ ex.exception.response['ResponseMetadata']['HTTPStatusCode'].should.equal(400)
+ ex.exception.response['Error']['Message'].should.equal(
+ 'One or more parameter values were invalid: An AttributeValue may not contain an empty string'
+ )
+
+
@requires_boto_gte("2.9")
@mock_dynamodb2
def test_query_invalid_table():
diff --git a/tests/test_ec2/test_internet_gateways.py b/tests/test_ec2/test_internet_gateways.py
index 5842621cdb98..3a1d0fda9cef 100644
--- a/tests/test_ec2/test_internet_gateways.py
+++ b/tests/test_ec2/test_internet_gateways.py
@@ -199,7 +199,7 @@ def test_igw_desribe():
@mock_ec2_deprecated
-def test_igw_desribe_bad_id():
+def test_igw_describe_bad_id():
""" internet gateway fail to fetch by bad id """
conn = boto.connect_vpc('the_key', 'the_secret')
with assert_raises(EC2ResponseError) as cm:
diff --git a/tests/test_ec2/test_vpc_peering.py b/tests/test_ec2/test_vpc_peering.py
index 6722eed60316..1f98791b333e 100644
--- a/tests/test_ec2/test_vpc_peering.py
+++ b/tests/test_ec2/test_vpc_peering.py
@@ -2,12 +2,15 @@
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises
from nose.tools import assert_raises
+from moto.ec2.exceptions import EC2ClientError
+from botocore.exceptions import ClientError
+import boto3
import boto
from boto.exception import EC2ResponseError
import sure # noqa
-from moto import mock_ec2_deprecated
+from moto import mock_ec2, mock_ec2_deprecated
from tests.helpers import requires_boto_gte
@@ -93,3 +96,37 @@ def test_vpc_peering_connections_delete():
cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
+
+
+@mock_ec2
+def test_vpc_peering_connections_cross_region():
+ # create vpc in us-west-1 and ap-northeast-1
+ ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
+ vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
+ ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
+ vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
+ # create peering
+ vpc_pcx = ec2_usw1.create_vpc_peering_connection(
+ VpcId=vpc_usw1.id,
+ PeerVpcId=vpc_apn1.id,
+ PeerRegion='ap-northeast-1',
+ )
+ vpc_pcx.status['Code'].should.equal('initiating-request')
+ vpc_pcx.requester_vpc.id.should.equal(vpc_usw1.id)
+ vpc_pcx.accepter_vpc.id.should.equal(vpc_apn1.id)
+
+
+@mock_ec2
+def test_vpc_peering_connections_cross_region_fail():
+ # create vpc in us-west-1 and ap-northeast-1
+ ec2_usw1 = boto3.resource('ec2', region_name='us-west-1')
+ vpc_usw1 = ec2_usw1.create_vpc(CidrBlock='10.90.0.0/16')
+ ec2_apn1 = boto3.resource('ec2', region_name='ap-northeast-1')
+ vpc_apn1 = ec2_apn1.create_vpc(CidrBlock='10.20.0.0/16')
+ # create peering wrong region with no vpc
+ with assert_raises(ClientError) as cm:
+ ec2_usw1.create_vpc_peering_connection(
+ VpcId=vpc_usw1.id,
+ PeerVpcId=vpc_apn1.id,
+ PeerRegion='ap-northeast-2')
+ cm.exception.response['Error']['Code'].should.equal('InvalidVpcID.NotFound')
diff --git a/tests/test_ecr/test_ecr_boto3.py b/tests/test_ecr/test_ecr_boto3.py
index d689e4e181ee..c0cef81a9dc7 100644
--- a/tests/test_ecr/test_ecr_boto3.py
+++ b/tests/test_ecr/test_ecr_boto3.py
@@ -45,7 +45,8 @@ def _create_image_manifest():
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 73109,
- "digest": _create_image_digest("layer3")
+ # randomize image digest
+ "digest": _create_image_digest()
}
]
}
@@ -197,6 +198,47 @@ def test_put_image():
response['image']['repositoryName'].should.equal('test_repository')
response['image']['registryId'].should.equal('012345678910')
+@mock_ecr
+def test_put_image_with_multiple_tags():
+ client = boto3.client('ecr', region_name='us-east-1')
+ _ = client.create_repository(
+ repositoryName='test_repository'
+ )
+ manifest = _create_image_manifest()
+ response = client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest),
+ imageTag='v1'
+ )
+
+ response['image']['imageId']['imageTag'].should.equal('v1')
+ response['image']['imageId']['imageDigest'].should.contain("sha")
+ response['image']['repositoryName'].should.equal('test_repository')
+ response['image']['registryId'].should.equal('012345678910')
+
+ response1 = client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest),
+ imageTag='latest'
+ )
+
+ response1['image']['imageId']['imageTag'].should.equal('latest')
+ response1['image']['imageId']['imageDigest'].should.contain("sha")
+ response1['image']['repositoryName'].should.equal('test_repository')
+ response1['image']['registryId'].should.equal('012345678910')
+
+ response2 = client.describe_images(repositoryName='test_repository')
+ type(response2['imageDetails']).should.be(list)
+ len(response2['imageDetails']).should.be(1)
+
+ response2['imageDetails'][0]['imageDigest'].should.contain("sha")
+
+ response2['imageDetails'][0]['registryId'].should.equal("012345678910")
+
+ response2['imageDetails'][0]['repositoryName'].should.equal("test_repository")
+
+ len(response2['imageDetails'][0]['imageTags']).should.be(2)
+ response2['imageDetails'][0]['imageTags'].should.be.equal(['v1', 'latest'])
@mock_ecr
def test_list_images():
@@ -281,6 +323,11 @@ def test_describe_images():
repositoryName='test_repository'
)
+ _ = client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(_create_image_manifest())
+ )
+
_ = client.put_image(
repositoryName='test_repository',
imageManifest=json.dumps(_create_image_manifest()),
@@ -301,32 +348,37 @@ def test_describe_images():
response = client.describe_images(repositoryName='test_repository')
type(response['imageDetails']).should.be(list)
- len(response['imageDetails']).should.be(3)
+ len(response['imageDetails']).should.be(4)
response['imageDetails'][0]['imageDigest'].should.contain("sha")
response['imageDetails'][1]['imageDigest'].should.contain("sha")
response['imageDetails'][2]['imageDigest'].should.contain("sha")
+ response['imageDetails'][3]['imageDigest'].should.contain("sha")
response['imageDetails'][0]['registryId'].should.equal("012345678910")
response['imageDetails'][1]['registryId'].should.equal("012345678910")
response['imageDetails'][2]['registryId'].should.equal("012345678910")
+ response['imageDetails'][3]['registryId'].should.equal("012345678910")
response['imageDetails'][0]['repositoryName'].should.equal("test_repository")
response['imageDetails'][1]['repositoryName'].should.equal("test_repository")
response['imageDetails'][2]['repositoryName'].should.equal("test_repository")
+ response['imageDetails'][3]['repositoryName'].should.equal("test_repository")
- len(response['imageDetails'][0]['imageTags']).should.be(1)
+ response['imageDetails'][0].should_not.have.key('imageTags')
len(response['imageDetails'][1]['imageTags']).should.be(1)
len(response['imageDetails'][2]['imageTags']).should.be(1)
+ len(response['imageDetails'][3]['imageTags']).should.be(1)
image_tags = ['latest', 'v1', 'v2']
- set([response['imageDetails'][0]['imageTags'][0],
- response['imageDetails'][1]['imageTags'][0],
- response['imageDetails'][2]['imageTags'][0]]).should.equal(set(image_tags))
+ set([response['imageDetails'][1]['imageTags'][0],
+ response['imageDetails'][2]['imageTags'][0],
+ response['imageDetails'][3]['imageTags'][0]]).should.equal(set(image_tags))
response['imageDetails'][0]['imageSizeInBytes'].should.equal(52428800)
response['imageDetails'][1]['imageSizeInBytes'].should.equal(52428800)
response['imageDetails'][2]['imageSizeInBytes'].should.equal(52428800)
+ response['imageDetails'][3]['imageSizeInBytes'].should.equal(52428800)
@mock_ecr
@@ -355,6 +407,68 @@ def test_describe_images_by_tag():
image_detail['imageDigest'].should.equal(put_response['imageId']['imageDigest'])
+@mock_ecr
+def test_describe_images_tags_should_not_contain_empty_tag1():
+ client = boto3.client('ecr', region_name='us-east-1')
+ _ = client.create_repository(
+ repositoryName='test_repository'
+ )
+
+ manifest = _create_image_manifest()
+ client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest)
+ )
+
+ tags = ['v1', 'v2', 'latest']
+ for tag in tags:
+ client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest),
+ imageTag=tag
+ )
+
+ response = client.describe_images(repositoryName='test_repository', imageIds=[{'imageTag': tag}])
+ len(response['imageDetails']).should.be(1)
+ image_detail = response['imageDetails'][0]
+ len(image_detail['imageTags']).should.equal(3)
+ image_detail['imageTags'].should.be.equal(tags)
+
+
+@mock_ecr
+def test_describe_images_tags_should_not_contain_empty_tag2():
+ client = boto3.client('ecr', region_name='us-east-1')
+ _ = client.create_repository(
+ repositoryName='test_repository'
+ )
+
+ manifest = _create_image_manifest()
+ tags = ['v1', 'v2']
+ for tag in tags:
+ client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest),
+ imageTag=tag
+ )
+
+ client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest)
+ )
+
+ client.put_image(
+ repositoryName='test_repository',
+ imageManifest=json.dumps(manifest),
+ imageTag='latest'
+ )
+
+ response = client.describe_images(repositoryName='test_repository', imageIds=[{'imageTag': tag}])
+ len(response['imageDetails']).should.be(1)
+ image_detail = response['imageDetails'][0]
+ len(image_detail['imageTags']).should.equal(3)
+ image_detail['imageTags'].should.be.equal(['v1', 'v2', 'latest'])
+
+
@mock_ecr
def test_describe_repository_that_doesnt_exist():
client = boto3.client('ecr', region_name='us-east-1')
diff --git a/tests/test_ecs/test_ecs_boto3.py b/tests/test_ecs/test_ecs_boto3.py
index bf72dc230fa6..70c1463ee215 100644
--- a/tests/test_ecs/test_ecs_boto3.py
+++ b/tests/test_ecs/test_ecs_boto3.py
@@ -304,6 +304,52 @@ def test_create_service():
response['service']['status'].should.equal('ACTIVE')
response['service']['taskDefinition'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
+ response['service']['schedulingStrategy'].should.equal('REPLICA')
+
+@mock_ecs
+def test_create_service_scheduling_strategy():
+ client = boto3.client('ecs', region_name='us-east-1')
+ _ = client.create_cluster(
+ clusterName='test_ecs_cluster'
+ )
+ _ = client.register_task_definition(
+ family='test_ecs_task',
+ containerDefinitions=[
+ {
+ 'name': 'hello_world',
+ 'image': 'docker/hello-world:latest',
+ 'cpu': 1024,
+ 'memory': 400,
+ 'essential': True,
+ 'environment': [{
+ 'name': 'AWS_ACCESS_KEY_ID',
+ 'value': 'SOME_ACCESS_KEY'
+ }],
+ 'logConfiguration': {'logDriver': 'json-file'}
+ }
+ ]
+ )
+ response = client.create_service(
+ cluster='test_ecs_cluster',
+ serviceName='test_ecs_service',
+ taskDefinition='test_ecs_task',
+ desiredCount=2,
+ schedulingStrategy='DAEMON',
+ )
+ response['service']['clusterArn'].should.equal(
+ 'arn:aws:ecs:us-east-1:012345678910:cluster/test_ecs_cluster')
+ response['service']['desiredCount'].should.equal(2)
+ len(response['service']['events']).should.equal(0)
+ len(response['service']['loadBalancers']).should.equal(0)
+ response['service']['pendingCount'].should.equal(0)
+ response['service']['runningCount'].should.equal(0)
+ response['service']['serviceArn'].should.equal(
+ 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service')
+ response['service']['serviceName'].should.equal('test_ecs_service')
+ response['service']['status'].should.equal('ACTIVE')
+ response['service']['taskDefinition'].should.equal(
+ 'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
+ response['service']['schedulingStrategy'].should.equal('DAEMON')
@mock_ecs
@@ -411,6 +457,72 @@ def test_describe_services():
response['services'][0]['deployments'][0]['status'].should.equal('PRIMARY')
+@mock_ecs
+def test_describe_services_scheduling_strategy():
+ client = boto3.client('ecs', region_name='us-east-1')
+ _ = client.create_cluster(
+ clusterName='test_ecs_cluster'
+ )
+ _ = client.register_task_definition(
+ family='test_ecs_task',
+ containerDefinitions=[
+ {
+ 'name': 'hello_world',
+ 'image': 'docker/hello-world:latest',
+ 'cpu': 1024,
+ 'memory': 400,
+ 'essential': True,
+ 'environment': [{
+ 'name': 'AWS_ACCESS_KEY_ID',
+ 'value': 'SOME_ACCESS_KEY'
+ }],
+ 'logConfiguration': {'logDriver': 'json-file'}
+ }
+ ]
+ )
+ _ = client.create_service(
+ cluster='test_ecs_cluster',
+ serviceName='test_ecs_service1',
+ taskDefinition='test_ecs_task',
+ desiredCount=2
+ )
+ _ = client.create_service(
+ cluster='test_ecs_cluster',
+ serviceName='test_ecs_service2',
+ taskDefinition='test_ecs_task',
+ desiredCount=2,
+ schedulingStrategy='DAEMON'
+ )
+ _ = client.create_service(
+ cluster='test_ecs_cluster',
+ serviceName='test_ecs_service3',
+ taskDefinition='test_ecs_task',
+ desiredCount=2
+ )
+ response = client.describe_services(
+ cluster='test_ecs_cluster',
+ services=['test_ecs_service1',
+ 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2',
+ 'test_ecs_service3']
+ )
+ len(response['services']).should.equal(3)
+ response['services'][0]['serviceArn'].should.equal(
+ 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service1')
+ response['services'][0]['serviceName'].should.equal('test_ecs_service1')
+ response['services'][1]['serviceArn'].should.equal(
+ 'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service2')
+ response['services'][1]['serviceName'].should.equal('test_ecs_service2')
+
+ response['services'][0]['deployments'][0]['desiredCount'].should.equal(2)
+ response['services'][0]['deployments'][0]['pendingCount'].should.equal(2)
+ response['services'][0]['deployments'][0]['runningCount'].should.equal(0)
+ response['services'][0]['deployments'][0]['status'].should.equal('PRIMARY')
+
+ response['services'][0]['schedulingStrategy'].should.equal('REPLICA')
+ response['services'][1]['schedulingStrategy'].should.equal('DAEMON')
+ response['services'][2]['schedulingStrategy'].should.equal('REPLICA')
+
+
@mock_ecs
def test_update_service():
client = boto3.client('ecs', region_name='us-east-1')
@@ -449,6 +561,7 @@ def test_update_service():
desiredCount=0
)
response['service']['desiredCount'].should.equal(0)
+ response['service']['schedulingStrategy'].should.equal('REPLICA')
@mock_ecs
@@ -515,8 +628,10 @@ def test_delete_service():
'arn:aws:ecs:us-east-1:012345678910:service/test_ecs_service')
response['service']['serviceName'].should.equal('test_ecs_service')
response['service']['status'].should.equal('ACTIVE')
+ response['service']['schedulingStrategy'].should.equal('REPLICA')
response['service']['taskDefinition'].should.equal(
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
+
@mock_ec2
diff --git a/tests/test_elb/test_elb.py b/tests/test_elb/test_elb.py
index 5827e70c79ab..a67508430d7a 100644
--- a/tests/test_elb/test_elb.py
+++ b/tests/test_elb/test_elb.py
@@ -723,6 +723,40 @@ def test_describe_instance_health():
instances_health[0].state.should.equal('InService')
+@mock_ec2
+@mock_elb
+def test_describe_instance_health_boto3():
+ elb = boto3.client('elb', region_name="us-east-1")
+ ec2 = boto3.client('ec2', region_name="us-east-1")
+ instances = ec2.run_instances(MinCount=2, MaxCount=2)['Instances']
+ lb_name = "my_load_balancer"
+ elb.create_load_balancer(
+ Listeners=[{
+ 'InstancePort': 80,
+ 'LoadBalancerPort': 8080,
+ 'Protocol': 'HTTP'
+ }],
+ LoadBalancerName=lb_name,
+ )
+ elb.register_instances_with_load_balancer(
+ LoadBalancerName=lb_name,
+ Instances=[{'InstanceId': instances[0]['InstanceId']}]
+ )
+ instances_health = elb.describe_instance_health(
+ LoadBalancerName=lb_name,
+ Instances=[{'InstanceId': instance['InstanceId']} for instance in instances]
+ )
+ instances_health['InstanceStates'].should.have.length_of(2)
+ instances_health['InstanceStates'][0]['InstanceId'].\
+ should.equal(instances[0]['InstanceId'])
+ instances_health['InstanceStates'][0]['State'].\
+ should.equal('InService')
+ instances_health['InstanceStates'][1]['InstanceId'].\
+ should.equal(instances[1]['InstanceId'])
+ instances_health['InstanceStates'][1]['State'].\
+ should.equal('Unknown')
+
+
@mock_elb
def test_add_remove_tags():
client = boto3.client('elb', region_name='us-east-1')
diff --git a/tests/test_glue/__init__.py b/tests/test_glue/__init__.py
new file mode 100644
index 000000000000..baffc4882521
--- /dev/null
+++ b/tests/test_glue/__init__.py
@@ -0,0 +1 @@
+from __future__ import unicode_literals
diff --git a/tests/test_glue/fixtures/__init__.py b/tests/test_glue/fixtures/__init__.py
new file mode 100644
index 000000000000..baffc4882521
--- /dev/null
+++ b/tests/test_glue/fixtures/__init__.py
@@ -0,0 +1 @@
+from __future__ import unicode_literals
diff --git a/tests/test_glue/fixtures/datacatalog.py b/tests/test_glue/fixtures/datacatalog.py
new file mode 100644
index 000000000000..b2efe4154a95
--- /dev/null
+++ b/tests/test_glue/fixtures/datacatalog.py
@@ -0,0 +1,31 @@
+from __future__ import unicode_literals
+
+TABLE_INPUT = {
+ 'Owner': 'a_fake_owner',
+ 'Parameters': {
+ 'EXTERNAL': 'TRUE',
+ },
+ 'Retention': 0,
+ 'StorageDescriptor': {
+ 'BucketColumns': [],
+ 'Compressed': False,
+ 'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
+ 'NumberOfBuckets': -1,
+ 'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
+ 'Parameters': {},
+ 'SerdeInfo': {
+ 'Parameters': {
+ 'serialization.format': '1'
+ },
+ 'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
+ },
+ 'SkewedInfo': {
+ 'SkewedColumnNames': [],
+ 'SkewedColumnValueLocationMaps': {},
+ 'SkewedColumnValues': []
+ },
+ 'SortColumns': [],
+ 'StoredAsSubDirectories': False
+ },
+ 'TableType': 'EXTERNAL_TABLE',
+}
diff --git a/tests/test_glue/helpers.py b/tests/test_glue/helpers.py
new file mode 100644
index 000000000000..4a51f9117ee4
--- /dev/null
+++ b/tests/test_glue/helpers.py
@@ -0,0 +1,46 @@
+from __future__ import unicode_literals
+
+import copy
+
+from .fixtures.datacatalog import TABLE_INPUT
+
+
+def create_database(client, database_name):
+ return client.create_database(
+ DatabaseInput={
+ 'Name': database_name
+ }
+ )
+
+
+def get_database(client, database_name):
+ return client.get_database(Name=database_name)
+
+
+def create_table_input(table_name, s3_location, columns=[], partition_keys=[]):
+ table_input = copy.deepcopy(TABLE_INPUT)
+ table_input['Name'] = table_name
+ table_input['PartitionKeys'] = partition_keys
+ table_input['StorageDescriptor']['Columns'] = columns
+ table_input['StorageDescriptor']['Location'] = s3_location
+ return table_input
+
+
+def create_table(client, database_name, table_name, table_input):
+ return client.create_table(
+ DatabaseName=database_name,
+ TableInput=table_input
+ )
+
+
+def get_table(client, database_name, table_name):
+ return client.get_table(
+ DatabaseName=database_name,
+ Name=table_name
+ )
+
+
+def get_tables(client, database_name):
+ return client.get_tables(
+ DatabaseName=database_name
+ )
diff --git a/tests/test_glue/test_datacatalog.py b/tests/test_glue/test_datacatalog.py
new file mode 100644
index 000000000000..7dabeb1f313e
--- /dev/null
+++ b/tests/test_glue/test_datacatalog.py
@@ -0,0 +1,108 @@
+from __future__ import unicode_literals
+
+import sure # noqa
+from nose.tools import assert_raises
+import boto3
+from botocore.client import ClientError
+
+from moto import mock_glue
+from . import helpers
+
+
+@mock_glue
+def test_create_database():
+ client = boto3.client('glue', region_name='us-east-1')
+ database_name = 'myspecialdatabase'
+ helpers.create_database(client, database_name)
+
+ response = helpers.get_database(client, database_name)
+ database = response['Database']
+
+ database.should.equal({'Name': database_name})
+
+
+@mock_glue
+def test_create_database_already_exists():
+ client = boto3.client('glue', region_name='us-east-1')
+ database_name = 'cantcreatethisdatabasetwice'
+ helpers.create_database(client, database_name)
+
+ with assert_raises(ClientError) as exc:
+ helpers.create_database(client, database_name)
+
+ exc.exception.response['Error']['Code'].should.equal('DatabaseAlreadyExistsException')
+
+
+@mock_glue
+def test_create_table():
+ client = boto3.client('glue', region_name='us-east-1')
+ database_name = 'myspecialdatabase'
+ helpers.create_database(client, database_name)
+
+ table_name = 'myspecialtable'
+ s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
+ database_name=database_name,
+ table_name=table_name
+ )
+
+ table_input = helpers.create_table_input(table_name, s3_location)
+ helpers.create_table(client, database_name, table_name, table_input)
+
+ response = helpers.get_table(client, database_name, table_name)
+ table = response['Table']
+
+ table['Name'].should.equal(table_input['Name'])
+ table['StorageDescriptor'].should.equal(table_input['StorageDescriptor'])
+ table['PartitionKeys'].should.equal(table_input['PartitionKeys'])
+
+
+@mock_glue
+def test_create_table_already_exists():
+ client = boto3.client('glue', region_name='us-east-1')
+ database_name = 'myspecialdatabase'
+ helpers.create_database(client, database_name)
+
+ table_name = 'cantcreatethistabletwice'
+ s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
+ database_name=database_name,
+ table_name=table_name
+ )
+
+ table_input = helpers.create_table_input(table_name, s3_location)
+ helpers.create_table(client, database_name, table_name, table_input)
+
+ with assert_raises(ClientError) as exc:
+ helpers.create_table(client, database_name, table_name, table_input)
+
+ exc.exception.response['Error']['Code'].should.equal('TableAlreadyExistsException')
+
+
+@mock_glue
+def test_get_tables():
+ client = boto3.client('glue', region_name='us-east-1')
+ database_name = 'myspecialdatabase'
+ helpers.create_database(client, database_name)
+
+ table_names = ['myfirsttable', 'mysecondtable', 'mythirdtable']
+ table_inputs = {}
+
+ for table_name in table_names:
+ s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
+ database_name=database_name,
+ table_name=table_name
+ )
+ table_input = helpers.create_table_input(table_name, s3_location)
+ table_inputs[table_name] = table_input
+ helpers.create_table(client, database_name, table_name, table_input)
+
+ response = helpers.get_tables(client, database_name)
+
+ tables = response['TableList']
+
+ assert len(tables) == 3
+
+ for table in tables:
+ table_name = table['Name']
+ table_name.should.equal(table_inputs[table_name]['Name'])
+ table['StorageDescriptor'].should.equal(table_inputs[table_name]['StorageDescriptor'])
+ table['PartitionKeys'].should.equal(table_inputs[table_name]['PartitionKeys'])
diff --git a/tests/test_iam/test_iam.py b/tests/test_iam/test_iam.py
index 182a606613a6..bc23ff7126dd 100644
--- a/tests/test_iam/test_iam.py
+++ b/tests/test_iam/test_iam.py
@@ -286,6 +286,16 @@ def test_create_policy_versions():
PolicyDocument='{"some":"policy"}')
version.get('PolicyVersion').get('Document').should.equal({'some': 'policy'})
+@mock_iam
+def test_get_policy():
+ conn = boto3.client('iam', region_name='us-east-1')
+ response = conn.create_policy(
+ PolicyName="TestGetPolicy",
+ PolicyDocument='{"some":"policy"}')
+ policy = conn.get_policy(
+ PolicyArn="arn:aws:iam::123456789012:policy/TestGetPolicy")
+ response['Policy']['Arn'].should.equal("arn:aws:iam::123456789012:policy/TestGetPolicy")
+
@mock_iam
def test_get_policy_version():
@@ -314,17 +324,22 @@ def test_list_policy_versions():
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
conn.create_policy(
PolicyName="TestListPolicyVersions",
- PolicyDocument='{"some":"policy"}')
- conn.create_policy_version(
- PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"first":"policy"}')
+ versions = conn.list_policy_versions(
+ PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
+ versions.get('Versions')[0].get('VersionId').should.equal('v1')
+
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
PolicyDocument='{"second":"policy"}')
+ conn.create_policy_version(
+ PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions",
+ PolicyDocument='{"third":"policy"}')
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestListPolicyVersions")
- versions.get('Versions')[0].get('Document').should.equal({'first': 'policy'})
+ print(versions.get('Versions'))
versions.get('Versions')[1].get('Document').should.equal({'second': 'policy'})
+ versions.get('Versions')[2].get('Document').should.equal({'third': 'policy'})
@mock_iam
@@ -332,20 +347,20 @@ def test_delete_policy_version():
conn = boto3.client('iam', region_name='us-east-1')
conn.create_policy(
PolicyName="TestDeletePolicyVersion",
- PolicyDocument='{"some":"policy"}')
+ PolicyDocument='{"first":"policy"}')
conn.create_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
- PolicyDocument='{"first":"policy"}')
+ PolicyDocument='{"second":"policy"}')
with assert_raises(ClientError):
conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
VersionId='v2-nope-this-does-not-exist')
conn.delete_policy_version(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion",
- VersionId='v1')
+ VersionId='v2')
versions = conn.list_policy_versions(
PolicyArn="arn:aws:iam::123456789012:policy/TestDeletePolicyVersion")
- len(versions.get('Versions')).should.equal(0)
+ len(versions.get('Versions')).should.equal(1)
@mock_iam_deprecated()
@@ -678,3 +693,68 @@ def test_update_access_key():
Status='Inactive')
resp = client.list_access_keys(UserName=username)
resp['AccessKeyMetadata'][0]['Status'].should.equal('Inactive')
+
+
+@mock_iam
+def test_get_account_authorization_details():
+ import json
+ conn = boto3.client('iam', region_name='us-east-1')
+ conn.create_role(RoleName="my-role", AssumeRolePolicyDocument="some policy", Path="/my-path/")
+ conn.create_user(Path='/', UserName='testCloudAuxUser')
+ conn.create_group(Path='/', GroupName='testCloudAuxGroup')
+ conn.create_policy(
+ PolicyName='testCloudAuxPolicy',
+ Path='/',
+ PolicyDocument=json.dumps({
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Action": "s3:ListBucket",
+ "Resource": "*",
+ "Effect": "Allow",
+ }
+ ]
+ }),
+ Description='Test CloudAux Policy'
+ )
+
+ result = conn.get_account_authorization_details(Filter=['Role'])
+ len(result['RoleDetailList']) == 1
+ len(result['UserDetailList']) == 0
+ len(result['GroupDetailList']) == 0
+ len(result['Policies']) == 0
+
+ result = conn.get_account_authorization_details(Filter=['User'])
+ len(result['RoleDetailList']) == 0
+ len(result['UserDetailList']) == 1
+ len(result['GroupDetailList']) == 0
+ len(result['Policies']) == 0
+
+ result = conn.get_account_authorization_details(Filter=['Group'])
+ len(result['RoleDetailList']) == 0
+ len(result['UserDetailList']) == 0
+ len(result['GroupDetailList']) == 1
+ len(result['Policies']) == 0
+
+ result = conn.get_account_authorization_details(Filter=['LocalManagedPolicy'])
+ len(result['RoleDetailList']) == 0
+ len(result['UserDetailList']) == 0
+ len(result['GroupDetailList']) == 0
+ len(result['Policies']) == 1
+
+ # Check for greater than 1 since this should always be greater than one but might change.
+ # See iam/aws_managed_policies.py
+ result = conn.get_account_authorization_details(Filter=['AWSManagedPolicy'])
+ len(result['RoleDetailList']) == 0
+ len(result['UserDetailList']) == 0
+ len(result['GroupDetailList']) == 0
+ len(result['Policies']) > 1
+
+ result = conn.get_account_authorization_details()
+ len(result['RoleDetailList']) == 1
+ len(result['UserDetailList']) == 1
+ len(result['GroupDetailList']) == 1
+ len(result['Policies']) > 1
+
+
+
diff --git a/tests/test_iot/test_iot.py b/tests/test_iot/test_iot.py
index 2136157906c0..5c6effd7a2de 100644
--- a/tests/test_iot/test_iot.py
+++ b/tests/test_iot/test_iot.py
@@ -1,8 +1,9 @@
from __future__ import unicode_literals
-import boto3
-import sure # noqa
import json
+import sure # noqa
+import boto3
+
from moto import mock_iot
@@ -63,6 +64,166 @@ def test_things():
res.should.have.key('thingTypes').which.should.have.length_of(0)
+@mock_iot
+def test_list_thing_types():
+ client = boto3.client('iot', region_name='ap-northeast-1')
+
+ for i in range(0, 100):
+ client.create_thing_type(thingTypeName=str(i + 1))
+
+ thing_types = client.list_thing_types()
+ thing_types.should.have.key('nextToken')
+ thing_types.should.have.key('thingTypes').which.should.have.length_of(50)
+ thing_types['thingTypes'][0]['thingTypeName'].should.equal('1')
+ thing_types['thingTypes'][-1]['thingTypeName'].should.equal('50')
+
+ thing_types = client.list_thing_types(nextToken=thing_types['nextToken'])
+ thing_types.should.have.key('thingTypes').which.should.have.length_of(50)
+ thing_types.should_not.have.key('nextToken')
+ thing_types['thingTypes'][0]['thingTypeName'].should.equal('51')
+ thing_types['thingTypes'][-1]['thingTypeName'].should.equal('100')
+
+
+@mock_iot
+def test_list_thing_types_with_typename_filter():
+ client = boto3.client('iot', region_name='ap-northeast-1')
+
+ client.create_thing_type(thingTypeName='thing')
+ client.create_thing_type(thingTypeName='thingType')
+ client.create_thing_type(thingTypeName='thingTypeName')
+ client.create_thing_type(thingTypeName='thingTypeNameGroup')
+ client.create_thing_type(thingTypeName='shouldNotFind')
+ client.create_thing_type(thingTypeName='find me it shall not')
+
+ thing_types = client.list_thing_types(thingTypeName='thing')
+ thing_types.should_not.have.key('nextToken')
+ thing_types.should.have.key('thingTypes').which.should.have.length_of(4)
+ thing_types['thingTypes'][0]['thingTypeName'].should.equal('thing')
+ thing_types['thingTypes'][-1]['thingTypeName'].should.equal('thingTypeNameGroup')
+
+ thing_types = client.list_thing_types(thingTypeName='thingTypeName')
+ thing_types.should_not.have.key('nextToken')
+ thing_types.should.have.key('thingTypes').which.should.have.length_of(2)
+ thing_types['thingTypes'][0]['thingTypeName'].should.equal('thingTypeName')
+ thing_types['thingTypes'][-1]['thingTypeName'].should.equal('thingTypeNameGroup')
+
+
+@mock_iot
+def test_list_things_with_next_token():
+ client = boto3.client('iot', region_name='ap-northeast-1')
+
+ for i in range(0, 200):
+ client.create_thing(thingName=str(i + 1))
+
+ things = client.list_things()
+ things.should.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('1')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/1')
+ things['things'][-1]['thingName'].should.equal('50')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/50')
+
+ things = client.list_things(nextToken=things['nextToken'])
+ things.should.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('51')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/51')
+ things['things'][-1]['thingName'].should.equal('100')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/100')
+
+ things = client.list_things(nextToken=things['nextToken'])
+ things.should.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('101')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/101')
+ things['things'][-1]['thingName'].should.equal('150')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/150')
+
+ things = client.list_things(nextToken=things['nextToken'])
+ things.should_not.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('151')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/151')
+ things['things'][-1]['thingName'].should.equal('200')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/200')
+
+
+@mock_iot
+def test_list_things_with_attribute_and_thing_type_filter_and_next_token():
+ client = boto3.client('iot', region_name='ap-northeast-1')
+ client.create_thing_type(thingTypeName='my-thing-type')
+
+ for i in range(0, 200):
+ if not (i + 1) % 3:
+ attribute_payload = {
+ 'attributes': {
+ 'foo': 'bar'
+ }
+ }
+ elif not (i + 1) % 5:
+ attribute_payload = {
+ 'attributes': {
+ 'bar': 'foo'
+ }
+ }
+ else:
+ attribute_payload = {}
+
+ if not (i + 1) % 2:
+ thing_type_name = 'my-thing-type'
+ client.create_thing(thingName=str(i + 1), thingTypeName=thing_type_name, attributePayload=attribute_payload)
+ else:
+ client.create_thing(thingName=str(i + 1), attributePayload=attribute_payload)
+
+ # Test filter for thingTypeName
+ things = client.list_things(thingTypeName=thing_type_name)
+ things.should.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('2')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/2')
+ things['things'][-1]['thingName'].should.equal('100')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/100')
+ all(item['thingTypeName'] == thing_type_name for item in things['things'])
+
+ things = client.list_things(nextToken=things['nextToken'], thingTypeName=thing_type_name)
+ things.should_not.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('102')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/102')
+ things['things'][-1]['thingName'].should.equal('200')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/200')
+ all(item['thingTypeName'] == thing_type_name for item in things['things'])
+
+ # Test filter for attributes
+ things = client.list_things(attributeName='foo', attributeValue='bar')
+ things.should.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(50)
+ things['things'][0]['thingName'].should.equal('3')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/3')
+ things['things'][-1]['thingName'].should.equal('150')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/150')
+ all(item['attributes'] == {'foo': 'bar'} for item in things['things'])
+
+ things = client.list_things(nextToken=things['nextToken'], attributeName='foo', attributeValue='bar')
+ things.should_not.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(16)
+ things['things'][0]['thingName'].should.equal('153')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/153')
+ things['things'][-1]['thingName'].should.equal('198')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/198')
+ all(item['attributes'] == {'foo': 'bar'} for item in things['things'])
+
+ # Test filter for attributes and thingTypeName
+ things = client.list_things(thingTypeName=thing_type_name, attributeName='foo', attributeValue='bar')
+ things.should_not.have.key('nextToken')
+ things.should.have.key('things').which.should.have.length_of(33)
+ things['things'][0]['thingName'].should.equal('6')
+ things['things'][0]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/6')
+ things['things'][-1]['thingName'].should.equal('198')
+ things['things'][-1]['thingArn'].should.equal('arn:aws:iot:ap-northeast-1:1:thing/198')
+ all(item['attributes'] == {'foo': 'bar'} and item['thingTypeName'] == thing_type_name for item in things['things'])
+
+
@mock_iot
def test_certs():
client = boto3.client('iot', region_name='ap-northeast-1')
@@ -204,7 +365,6 @@ def test_principal_thing():
@mock_iot
def test_thing_groups():
client = boto3.client('iot', region_name='ap-northeast-1')
- name = 'my-thing'
group_name = 'my-group-name'
# thing group
@@ -424,6 +584,7 @@ def test_create_job():
job.should.have.key('jobArn')
job.should.have.key('description')
+
@mock_iot
def test_describe_job():
client = boto3.client('iot', region_name='eu-west-1')
diff --git a/tests/test_logs/test_logs.py b/tests/test_logs/test_logs.py
index 3f924cc5503f..05bd3c823844 100644
--- a/tests/test_logs/test_logs.py
+++ b/tests/test_logs/test_logs.py
@@ -1,5 +1,6 @@
import boto3
import sure # noqa
+import six
from botocore.exceptions import ClientError
from moto import mock_logs, settings
@@ -47,7 +48,7 @@ def test_exceptions():
logEvents=[
{
'timestamp': 0,
- 'message': 'line'
+ 'message': 'line'
},
],
)
@@ -79,7 +80,7 @@ def test_put_logs():
{'timestamp': 0, 'message': 'hello'},
{'timestamp': 0, 'message': 'world'}
]
- conn.put_log_events(
+ putRes = conn.put_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
logEvents=messages
@@ -89,6 +90,9 @@ def test_put_logs():
logStreamName=log_stream_name
)
events = res['events']
+ nextSequenceToken = putRes['nextSequenceToken']
+ assert isinstance(nextSequenceToken, six.string_types) == True
+ assert len(nextSequenceToken) == 56
events.should.have.length_of(2)
diff --git a/tests/test_rds2/test_rds2.py b/tests/test_rds2/test_rds2.py
index 80dcd4f53e3b..cf9805444825 100644
--- a/tests/test_rds2/test_rds2.py
+++ b/tests/test_rds2/test_rds2.py
@@ -33,6 +33,7 @@ def test_create_database():
db_instance['DBInstanceIdentifier'].should.equal("db-master-1")
db_instance['IAMDatabaseAuthenticationEnabled'].should.equal(False)
db_instance['DbiResourceId'].should.contain("db-")
+ db_instance['CopyTagsToSnapshot'].should.equal(False)
@mock_rds2
@@ -339,6 +340,49 @@ def test_create_db_snapshots():
snapshot.get('Engine').should.equal('postgres')
snapshot.get('DBInstanceIdentifier').should.equal('db-primary-1')
snapshot.get('DBSnapshotIdentifier').should.equal('g-1')
+ result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshotArn'])
+ result['TagList'].should.equal([])
+
+
+@mock_rds2
+def test_create_db_snapshots_copy_tags():
+ conn = boto3.client('rds', region_name='us-west-2')
+ conn.create_db_snapshot.when.called_with(
+ DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='snapshot-1').should.throw(ClientError)
+
+ conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
+ AllocatedStorage=10,
+ Engine='postgres',
+ DBName='staging-postgres',
+ DBInstanceClass='db.m1.small',
+ MasterUsername='root',
+ MasterUserPassword='hunter2',
+ Port=1234,
+ DBSecurityGroups=["my_sg"],
+ CopyTagsToSnapshot=True,
+ Tags=[
+ {
+ 'Key': 'foo',
+ 'Value': 'bar',
+ },
+ {
+ 'Key': 'foo1',
+ 'Value': 'bar1',
+ },
+ ])
+
+ snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='g-1').get('DBSnapshot')
+
+ snapshot.get('Engine').should.equal('postgres')
+ snapshot.get('DBInstanceIdentifier').should.equal('db-primary-1')
+ snapshot.get('DBSnapshotIdentifier').should.equal('g-1')
+ result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshotArn'])
+ result['TagList'].should.equal([{'Value': 'bar',
+ 'Key': 'foo'},
+ {'Value': 'bar1',
+ 'Key': 'foo1'}])
@mock_rds2
@@ -656,6 +700,117 @@ def test_remove_tags_db():
len(result['TagList']).should.equal(1)
+@mock_rds2
+def test_list_tags_snapshot():
+ conn = boto3.client('rds', region_name='us-west-2')
+ result = conn.list_tags_for_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:foo')
+ result['TagList'].should.equal([])
+ conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
+ AllocatedStorage=10,
+ Engine='postgres',
+ DBName='staging-postgres',
+ DBInstanceClass='db.m1.small',
+ MasterUsername='root',
+ MasterUserPassword='hunter2',
+ Port=1234,
+ DBSecurityGroups=["my_sg"])
+ snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='snapshot-with-tags',
+ Tags=[
+ {
+ 'Key': 'foo',
+ 'Value': 'bar',
+ },
+ {
+ 'Key': 'foo1',
+ 'Value': 'bar1',
+ },
+ ])
+ result = conn.list_tags_for_resource(ResourceName=snapshot['DBSnapshot']['DBSnapshotArn'])
+ result['TagList'].should.equal([{'Value': 'bar',
+ 'Key': 'foo'},
+ {'Value': 'bar1',
+ 'Key': 'foo1'}])
+
+
+@mock_rds2
+def test_add_tags_snapshot():
+ conn = boto3.client('rds', region_name='us-west-2')
+ conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
+ AllocatedStorage=10,
+ Engine='postgres',
+ DBName='staging-postgres',
+ DBInstanceClass='db.m1.small',
+ MasterUsername='root',
+ MasterUserPassword='hunter2',
+ Port=1234,
+ DBSecurityGroups=["my_sg"])
+ snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='snapshot-without-tags',
+ Tags=[
+ {
+ 'Key': 'foo',
+ 'Value': 'bar',
+ },
+ {
+ 'Key': 'foo1',
+ 'Value': 'bar1',
+ },
+ ])
+ result = conn.list_tags_for_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags')
+ list(result['TagList']).should.have.length_of(2)
+ conn.add_tags_to_resource(ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags',
+ Tags=[
+ {
+ 'Key': 'foo',
+ 'Value': 'fish',
+ },
+ {
+ 'Key': 'foo2',
+ 'Value': 'bar2',
+ },
+ ])
+ result = conn.list_tags_for_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-without-tags')
+ list(result['TagList']).should.have.length_of(3)
+
+
+@mock_rds2
+def test_remove_tags_snapshot():
+ conn = boto3.client('rds', region_name='us-west-2')
+ conn.create_db_instance(DBInstanceIdentifier='db-primary-1',
+ AllocatedStorage=10,
+ Engine='postgres',
+ DBName='staging-postgres',
+ DBInstanceClass='db.m1.small',
+ MasterUsername='root',
+ MasterUserPassword='hunter2',
+ Port=1234,
+ DBSecurityGroups=["my_sg"])
+ snapshot = conn.create_db_snapshot(DBInstanceIdentifier='db-primary-1',
+ DBSnapshotIdentifier='snapshot-with-tags',
+ Tags=[
+ {
+ 'Key': 'foo',
+ 'Value': 'bar',
+ },
+ {
+ 'Key': 'foo1',
+ 'Value': 'bar1',
+ },
+ ])
+ result = conn.list_tags_for_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags')
+ list(result['TagList']).should.have.length_of(2)
+ conn.remove_tags_from_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags', TagKeys=['foo'])
+ result = conn.list_tags_for_resource(
+ ResourceName='arn:aws:rds:us-west-2:1234567890:snapshot:snapshot-with-tags')
+ len(result['TagList']).should.equal(1)
+
+
@mock_rds2
def test_add_tags_option_group():
conn = boto3.client('rds', region_name='us-west-2')
diff --git a/tests/test_redshift/test_redshift.py b/tests/test_redshift/test_redshift.py
index 6e027b86cde5..9208c92dd7d1 100644
--- a/tests/test_redshift/test_redshift.py
+++ b/tests/test_redshift/test_redshift.py
@@ -1,5 +1,7 @@
from __future__ import unicode_literals
+import datetime
+
import boto
import boto3
from boto.redshift.exceptions import (
@@ -32,6 +34,8 @@ def test_create_cluster_boto3():
MasterUserPassword='password',
)
response['Cluster']['NodeType'].should.equal('ds2.xlarge')
+ create_time = response['Cluster']['ClusterCreateTime']
+ create_time.should.be.lower_than(datetime.datetime.now(create_time.tzinfo))
@mock_redshift
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 9a68d1bbb3e8..6e339abb6699 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -2471,6 +2471,72 @@ def test_boto3_delete_markers():
oldest['Key'].should.equal('key-with-versions-and-unicode-ó')
+@mock_s3
+def test_boto3_multiple_delete_markers():
+ s3 = boto3.client('s3', region_name='us-east-1')
+ bucket_name = 'mybucket'
+ key = u'key-with-versions-and-unicode-ó'
+ s3.create_bucket(Bucket=bucket_name)
+ s3.put_bucket_versioning(
+ Bucket=bucket_name,
+ VersioningConfiguration={
+ 'Status': 'Enabled'
+ }
+ )
+ items = (six.b('v1'), six.b('v2'))
+ for body in items:
+ s3.put_object(
+ Bucket=bucket_name,
+ Key=key,
+ Body=body
+ )
+
+ # Delete the object twice to add multiple delete markers
+ s3.delete_object(Bucket=bucket_name, Key=key)
+ s3.delete_object(Bucket=bucket_name, Key=key)
+
+ response = s3.list_object_versions(Bucket=bucket_name)
+ response['DeleteMarkers'].should.have.length_of(2)
+
+ with assert_raises(ClientError) as e:
+ s3.get_object(
+ Bucket=bucket_name,
+ Key=key
+ )
+ e.response['Error']['Code'].should.equal('404')
+
+ # Remove both delete markers to restore the object
+ s3.delete_object(
+ Bucket=bucket_name,
+ Key=key,
+ VersionId='2'
+ )
+ s3.delete_object(
+ Bucket=bucket_name,
+ Key=key,
+ VersionId='3'
+ )
+
+ response = s3.get_object(
+ Bucket=bucket_name,
+ Key=key
+ )
+ response['Body'].read().should.equal(items[-1])
+ response = s3.list_object_versions(Bucket=bucket_name)
+ response['Versions'].should.have.length_of(2)
+
+ # We've asserted there is only 2 records so one is newest, one is oldest
+ latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0]
+ oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0]
+
+ # Double check ordering of version ID's
+ latest['VersionId'].should.equal('1')
+ oldest['VersionId'].should.equal('0')
+
+ # Double check the name is still unicode
+ latest['Key'].should.equal('key-with-versions-and-unicode-ó')
+ oldest['Key'].should.equal('key-with-versions-and-unicode-ó')
+
@mock_s3
def test_get_stream_gzipped():
payload = b"this is some stuff here"
diff --git a/tests/test_s3/test_s3_storageclass.py b/tests/test_s3/test_s3_storageclass.py
index c4c83a2853d4..99908c50139d 100644
--- a/tests/test_s3/test_s3_storageclass.py
+++ b/tests/test_s3/test_s3_storageclass.py
@@ -101,6 +101,6 @@ def test_s3_default_storage_class():
# tests that the default storage class is still STANDARD
list_of_objects["Contents"][0]["StorageClass"].should.equal("STANDARD")
-
+
diff --git a/tests/test_s3/test_s3_utils.py b/tests/test_s3/test_s3_utils.py
index 9cda1f157e60..ce9f54c75318 100644
--- a/tests/test_s3/test_s3_utils.py
+++ b/tests/test_s3/test_s3_utils.py
@@ -21,7 +21,7 @@ def test_force_ignore_subdomain_for_bucketnames():
os.environ['S3_IGNORE_SUBDOMAIN_BUCKETNAME'] = '1'
expect(bucket_name_from_url('https://subdomain.localhost:5000/abc/resource')).should.equal(None)
del(os.environ['S3_IGNORE_SUBDOMAIN_BUCKETNAME'])
-
+
def test_versioned_key_store():
diff --git a/tests/test_secretsmanager/test_secretsmanager.py b/tests/test_secretsmanager/test_secretsmanager.py
index 6fefeb56f0de..ec384a6601c6 100644
--- a/tests/test_secretsmanager/test_secretsmanager.py
+++ b/tests/test_secretsmanager/test_secretsmanager.py
@@ -25,6 +25,15 @@ def test_get_secret_that_does_not_exist():
with assert_raises(ClientError):
result = conn.get_secret_value(SecretId='i-dont-exist')
+@mock_secretsmanager
+def test_get_secret_that_does_not_match():
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ create_secret = conn.create_secret(Name='java-util-test-password',
+ SecretString="foosecret")
+
+ with assert_raises(ClientError):
+ result = conn.get_secret_value(SecretId='i-dont-match')
+
@mock_secretsmanager
def test_create_secret():
conn = boto3.client('secretsmanager', region_name='us-east-1')
@@ -143,3 +152,135 @@ def test_get_random_too_long_password():
with assert_raises(Exception):
random_password = conn.get_random_password(PasswordLength=5555)
+
+@mock_secretsmanager
+def test_describe_secret():
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name='test-secret',
+ SecretString='foosecret')
+
+ secret_description = conn.describe_secret(SecretId='test-secret')
+ assert secret_description # Returned dict is not empty
+ assert secret_description['ARN'] == (
+ 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad')
+
+@mock_secretsmanager
+def test_describe_secret_that_does_not_exist():
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+
+ with assert_raises(ClientError):
+ result = conn.get_secret_value(SecretId='i-dont-exist')
+
+@mock_secretsmanager
+def test_describe_secret_that_does_not_match():
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name='test-secret',
+ SecretString='foosecret')
+
+ with assert_raises(ClientError):
+ result = conn.get_secret_value(SecretId='i-dont-match')
+
+@mock_secretsmanager
+def test_rotate_secret():
+ secret_name = 'test-secret'
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name=secret_name,
+ SecretString='foosecret')
+
+ rotated_secret = conn.rotate_secret(SecretId=secret_name)
+
+ assert rotated_secret
+ assert rotated_secret['ARN'] == (
+ 'arn:aws:secretsmanager:us-west-2:1234567890:secret:test-secret-rIjad'
+ )
+ assert rotated_secret['Name'] == secret_name
+ assert rotated_secret['VersionId'] != ''
+
+@mock_secretsmanager
+def test_rotate_secret_enable_rotation():
+ secret_name = 'test-secret'
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name=secret_name,
+ SecretString='foosecret')
+
+ initial_description = conn.describe_secret(SecretId=secret_name)
+ assert initial_description
+ assert initial_description['RotationEnabled'] is False
+ assert initial_description['RotationRules']['AutomaticallyAfterDays'] == 0
+
+ conn.rotate_secret(SecretId=secret_name,
+ RotationRules={'AutomaticallyAfterDays': 42})
+
+ rotated_description = conn.describe_secret(SecretId=secret_name)
+ assert rotated_description
+ assert rotated_description['RotationEnabled'] is True
+ assert rotated_description['RotationRules']['AutomaticallyAfterDays'] == 42
+
+@mock_secretsmanager
+def test_rotate_secret_that_does_not_exist():
+ conn = boto3.client('secretsmanager', 'us-west-2')
+
+ with assert_raises(ClientError):
+ result = conn.rotate_secret(SecretId='i-dont-exist')
+
+@mock_secretsmanager
+def test_rotate_secret_that_does_not_match():
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name='test-secret',
+ SecretString='foosecret')
+
+ with assert_raises(ClientError):
+ result = conn.rotate_secret(SecretId='i-dont-match')
+
+@mock_secretsmanager
+def test_rotate_secret_client_request_token_too_short():
+ # Test is intentionally empty. Boto3 catches too short ClientRequestToken
+ # and raises ParamValidationError before Moto can see it.
+ # test_server actually handles this error.
+ assert True
+
+@mock_secretsmanager
+def test_rotate_secret_client_request_token_too_long():
+ secret_name = 'test-secret'
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name=secret_name,
+ SecretString='foosecret')
+
+ client_request_token = (
+ 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-'
+ 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C'
+ )
+ with assert_raises(ClientError):
+ result = conn.rotate_secret(SecretId=secret_name,
+ ClientRequestToken=client_request_token)
+
+@mock_secretsmanager
+def test_rotate_secret_rotation_lambda_arn_too_long():
+ secret_name = 'test-secret'
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name=secret_name,
+ SecretString='foosecret')
+
+ rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters
+ with assert_raises(ClientError):
+ result = conn.rotate_secret(SecretId=secret_name,
+ RotationLambdaARN=rotation_lambda_arn)
+
+@mock_secretsmanager
+def test_rotate_secret_rotation_period_zero():
+ # Test is intentionally empty. Boto3 catches zero day rotation period
+ # and raises ParamValidationError before Moto can see it.
+ # test_server actually handles this error.
+ assert True
+
+@mock_secretsmanager
+def test_rotate_secret_rotation_period_too_long():
+ secret_name = 'test-secret'
+ conn = boto3.client('secretsmanager', region_name='us-west-2')
+ conn.create_secret(Name=secret_name,
+ SecretString='foosecret')
+
+ rotation_rules = {'AutomaticallyAfterDays': 1001}
+ with assert_raises(ClientError):
+ result = conn.rotate_secret(SecretId=secret_name,
+ RotationRules=rotation_rules)
diff --git a/tests/test_secretsmanager/test_server.py b/tests/test_secretsmanager/test_server.py
index 2f73ece07a45..e573f9b6719b 100644
--- a/tests/test_secretsmanager/test_server.py
+++ b/tests/test_secretsmanager/test_server.py
@@ -49,6 +49,27 @@ def test_get_secret_that_does_not_exist():
assert json_data['message'] == "Secrets Manager can't find the specified secret"
assert json_data['__type'] == 'ResourceNotFoundException'
+@mock_secretsmanager
+def test_get_secret_that_does_not_match():
+ backend = server.create_backend_app("secretsmanager")
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foo-secret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"},
+ )
+ get_secret = test_client.post('/',
+ data={"SecretId": "i-dont-match",
+ "VersionStage": "AWSCURRENT"},
+ headers={
+ "X-Amz-Target": "secretsmanager.GetSecretValue"},
+ )
+ json_data = json.loads(get_secret.data.decode("utf-8"))
+ assert json_data['message'] == "Secrets Manager can't find the specified secret"
+ assert json_data['__type'] == 'ResourceNotFoundException'
+
@mock_secretsmanager
def test_create_secret():
@@ -66,3 +87,335 @@ def test_create_secret():
assert json_data['ARN'] == (
'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad')
assert json_data['Name'] == 'test-secret'
+
+@mock_secretsmanager
+def test_describe_secret():
+
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+ describe_secret = test_client.post('/',
+ data={"SecretId": "test-secret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.DescribeSecret"
+ },
+ )
+
+ json_data = json.loads(describe_secret.data.decode("utf-8"))
+ assert json_data # Returned dict is not empty
+ assert json_data['ARN'] == (
+ 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
+ )
+
+@mock_secretsmanager
+def test_describe_secret_that_does_not_exist():
+
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ describe_secret = test_client.post('/',
+ data={"SecretId": "i-dont-exist"},
+ headers={
+ "X-Amz-Target": "secretsmanager.DescribeSecret"
+ },
+ )
+
+ json_data = json.loads(describe_secret.data.decode("utf-8"))
+ assert json_data['message'] == "Secrets Manager can't find the specified secret"
+ assert json_data['__type'] == 'ResourceNotFoundException'
+
+@mock_secretsmanager
+def test_describe_secret_that_does_not_match():
+
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+ describe_secret = test_client.post('/',
+ data={"SecretId": "i-dont-match"},
+ headers={
+ "X-Amz-Target": "secretsmanager.DescribeSecret"
+ },
+ )
+
+ json_data = json.loads(describe_secret.data.decode("utf-8"))
+ assert json_data['message'] == "Secrets Manager can't find the specified secret"
+ assert json_data['__type'] == 'ResourceNotFoundException'
+
+@mock_secretsmanager
+def test_rotate_secret():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+
+ client_request_token = "EXAMPLE2-90ab-cdef-fedc-ba987SECRET2"
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "test-secret",
+ "ClientRequestToken": client_request_token},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data # Returned dict is not empty
+ assert json_data['ARN'] == (
+ 'arn:aws:secretsmanager:us-east-1:1234567890:secret:test-secret-rIjad'
+ )
+ assert json_data['Name'] == 'test-secret'
+ assert json_data['VersionId'] == client_request_token
+
+# @mock_secretsmanager
+# def test_rotate_secret_enable_rotation():
+# backend = server.create_backend_app('secretsmanager')
+# test_client = backend.test_client()
+
+# create_secret = test_client.post(
+# '/',
+# data={
+# "Name": "test-secret",
+# "SecretString": "foosecret"
+# },
+# headers={
+# "X-Amz-Target": "secretsmanager.CreateSecret"
+# },
+# )
+
+# initial_description = test_client.post(
+# '/',
+# data={
+# "SecretId": "test-secret"
+# },
+# headers={
+# "X-Amz-Target": "secretsmanager.DescribeSecret"
+# },
+# )
+
+# json_data = json.loads(initial_description.data.decode("utf-8"))
+# assert json_data # Returned dict is not empty
+# assert json_data['RotationEnabled'] is False
+# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 0
+
+# rotate_secret = test_client.post(
+# '/',
+# data={
+# "SecretId": "test-secret",
+# "RotationRules": {"AutomaticallyAfterDays": 42}
+# },
+# headers={
+# "X-Amz-Target": "secretsmanager.RotateSecret"
+# },
+# )
+
+# rotated_description = test_client.post(
+# '/',
+# data={
+# "SecretId": "test-secret"
+# },
+# headers={
+# "X-Amz-Target": "secretsmanager.DescribeSecret"
+# },
+# )
+
+# json_data = json.loads(rotated_description.data.decode("utf-8"))
+# assert json_data # Returned dict is not empty
+# assert json_data['RotationEnabled'] is True
+# assert json_data['RotationRules']['AutomaticallyAfterDays'] == 42
+
+@mock_secretsmanager
+def test_rotate_secret_that_does_not_exist():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "i-dont-exist"},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data['message'] == "Secrets Manager can't find the specified secret"
+ assert json_data['__type'] == 'ResourceNotFoundException'
+
+@mock_secretsmanager
+def test_rotate_secret_that_does_not_match():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "i-dont-match"},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data['message'] == "Secrets Manager can't find the specified secret"
+ assert json_data['__type'] == 'ResourceNotFoundException'
+
+@mock_secretsmanager
+def test_rotate_secret_client_request_token_too_short():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+
+ client_request_token = "ED9F8B6C-85B7-B7E4-38F2A3BEB13C"
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "test-secret",
+ "ClientRequestToken": client_request_token},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data['message'] == "ClientRequestToken must be 32-64 characters long."
+ assert json_data['__type'] == 'InvalidParameterException'
+
+@mock_secretsmanager
+def test_rotate_secret_client_request_token_too_long():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+
+ client_request_token = (
+ 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C-'
+ 'ED9F8B6C-85B7-446A-B7E4-38F2A3BEB13C'
+ )
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "test-secret",
+ "ClientRequestToken": client_request_token},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data['message'] == "ClientRequestToken must be 32-64 characters long."
+ assert json_data['__type'] == 'InvalidParameterException'
+
+@mock_secretsmanager
+def test_rotate_secret_rotation_lambda_arn_too_long():
+ backend = server.create_backend_app('secretsmanager')
+ test_client = backend.test_client()
+
+ create_secret = test_client.post('/',
+ data={"Name": "test-secret",
+ "SecretString": "foosecret"},
+ headers={
+ "X-Amz-Target": "secretsmanager.CreateSecret"
+ },
+ )
+
+ rotation_lambda_arn = '85B7-446A-B7E4' * 147 # == 2058 characters
+ rotate_secret = test_client.post('/',
+ data={"SecretId": "test-secret",
+ "RotationLambdaARN": rotation_lambda_arn},
+ headers={
+ "X-Amz-Target": "secretsmanager.RotateSecret"
+ },
+ )
+
+ json_data = json.loads(rotate_secret.data.decode("utf-8"))
+ assert json_data['message'] == "RotationLambdaARN must <= 2048 characters long."
+ assert json_data['__type'] == 'InvalidParameterException'
+
+
+#
+# The following tests should work, but fail on the embedded dict in
+# RotationRules. The error message suggests a problem deeper in the code, which
+# needs further investigation.
+#
+
+# @mock_secretsmanager
+# def test_rotate_secret_rotation_period_zero():
+# backend = server.create_backend_app('secretsmanager')
+# test_client = backend.test_client()
+
+# create_secret = test_client.post('/',
+# data={"Name": "test-secret",
+# "SecretString": "foosecret"},
+# headers={
+# "X-Amz-Target": "secretsmanager.CreateSecret"
+# },
+# )
+
+# rotate_secret = test_client.post('/',
+# data={"SecretId": "test-secret",
+# "RotationRules": {"AutomaticallyAfterDays": 0}},
+# headers={
+# "X-Amz-Target": "secretsmanager.RotateSecret"
+# },
+# )
+
+# json_data = json.loads(rotate_secret.data.decode("utf-8"))
+# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000."
+# assert json_data['__type'] == 'InvalidParameterException'
+
+# @mock_secretsmanager
+# def test_rotate_secret_rotation_period_too_long():
+# backend = server.create_backend_app('secretsmanager')
+# test_client = backend.test_client()
+
+# create_secret = test_client.post('/',
+# data={"Name": "test-secret",
+# "SecretString": "foosecret"},
+# headers={
+# "X-Amz-Target": "secretsmanager.CreateSecret"
+# },
+# )
+
+# rotate_secret = test_client.post('/',
+# data={"SecretId": "test-secret",
+# "RotationRules": {"AutomaticallyAfterDays": 1001}},
+# headers={
+# "X-Amz-Target": "secretsmanager.RotateSecret"
+# },
+# )
+
+# json_data = json.loads(rotate_secret.data.decode("utf-8"))
+# assert json_data['message'] == "RotationRules.AutomaticallyAfterDays must be within 1-1000."
+# assert json_data['__type'] == 'InvalidParameterException'
diff --git a/tests/test_sqs/test_sqs.py b/tests/test_sqs/test_sqs.py
index d3e4ca917696..9beb9a3faef0 100644
--- a/tests/test_sqs/test_sqs.py
+++ b/tests/test_sqs/test_sqs.py
@@ -40,6 +40,33 @@ def test_create_fifo_queue_fail():
raise RuntimeError('Should of raised InvalidParameterValue Exception')
+@mock_sqs
+def test_create_queue_with_same_attributes():
+ sqs = boto3.client('sqs', region_name='us-east-1')
+
+ dlq_url = sqs.create_queue(QueueName='test-queue-dlq')['QueueUrl']
+ dlq_arn = sqs.get_queue_attributes(QueueUrl=dlq_url)['Attributes']['QueueArn']
+
+ attributes = {
+ 'DelaySeconds': '900',
+ 'MaximumMessageSize': '262144',
+ 'MessageRetentionPeriod': '1209600',
+ 'ReceiveMessageWaitTimeSeconds': '20',
+ 'RedrivePolicy': '{"deadLetterTargetArn": "%s", "maxReceiveCount": 100}' % (dlq_arn),
+ 'VisibilityTimeout': '43200'
+ }
+
+ sqs.create_queue(
+ QueueName='test-queue',
+ Attributes=attributes
+ )
+
+ sqs.create_queue(
+ QueueName='test-queue',
+ Attributes=attributes
+ )
+
+
@mock_sqs
def test_create_queue_with_different_attributes_fail():
sqs = boto3.client('sqs', region_name='us-east-1')
@@ -1195,3 +1222,16 @@ def test_receive_messages_with_message_group_id_on_visibility_timeout():
messages = queue.receive_messages()
messages.should.have.length_of(1)
messages[0].message_id.should.equal(message.message_id)
+
+@mock_sqs
+def test_receive_message_for_queue_with_receive_message_wait_time_seconds_set():
+ sqs = boto3.resource('sqs', region_name='us-east-1')
+
+ queue = sqs.create_queue(
+ QueueName='test-queue',
+ Attributes={
+ 'ReceiveMessageWaitTimeSeconds': '2',
+ }
+ )
+
+ queue.receive_messages()
diff --git a/tests/test_ssm/test_ssm_boto3.py b/tests/test_ssm/test_ssm_boto3.py
index 7a0685d56ae0..f8ef3a237825 100644
--- a/tests/test_ssm/test_ssm_boto3.py
+++ b/tests/test_ssm/test_ssm_boto3.py
@@ -5,11 +5,12 @@
import sure # noqa
import datetime
import uuid
+import json
from botocore.exceptions import ClientError
from nose.tools import assert_raises
-from moto import mock_ssm
+from moto import mock_ssm, mock_cloudformation
@mock_ssm
@@ -668,3 +669,118 @@ def test_list_commands():
with assert_raises(ClientError):
response = client.list_commands(
CommandId=str(uuid.uuid4()))
+
+@mock_ssm
+def test_get_command_invocation():
+ client = boto3.client('ssm', region_name='us-east-1')
+
+ ssm_document = 'AWS-RunShellScript'
+ params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
+
+ response = client.send_command(
+ InstanceIds=['i-123456', 'i-234567', 'i-345678'],
+ DocumentName=ssm_document,
+ Parameters=params,
+ OutputS3Region='us-east-2',
+ OutputS3BucketName='the-bucket',
+ OutputS3KeyPrefix='pref')
+
+ cmd = response['Command']
+ cmd_id = cmd['CommandId']
+
+ instance_id = 'i-345678'
+ invocation_response = client.get_command_invocation(
+ CommandId=cmd_id,
+ InstanceId=instance_id,
+ PluginName='aws:runShellScript')
+
+ invocation_response['CommandId'].should.equal(cmd_id)
+ invocation_response['InstanceId'].should.equal(instance_id)
+
+ # test the error case for an invalid instance id
+ with assert_raises(ClientError):
+ invocation_response = client.get_command_invocation(
+ CommandId=cmd_id,
+ InstanceId='i-FAKE')
+
+ # test the error case for an invalid plugin name
+ with assert_raises(ClientError):
+ invocation_response = client.get_command_invocation(
+ CommandId=cmd_id,
+ InstanceId=instance_id,
+ PluginName='FAKE')
+
+@mock_ssm
+@mock_cloudformation
+def test_get_command_invocations_from_stack():
+ stack_template = {
+ "AWSTemplateFormatVersion": "2010-09-09",
+ "Description": "Test Stack",
+ "Resources": {
+ "EC2Instance1": {
+ "Type": "AWS::EC2::Instance",
+ "Properties": {
+ "ImageId": "ami-test-image-id",
+ "KeyName": "test",
+ "InstanceType": "t2.micro",
+ "Tags": [
+ {
+ "Key": "Test Description",
+ "Value": "Test tag"
+ },
+ {
+ "Key": "Test Name",
+ "Value": "Name tag for tests"
+ }
+ ]
+ }
+ }
+ },
+ "Outputs": {
+ "test": {
+ "Description": "Test Output",
+ "Value": "Test output value",
+ "Export": {
+ "Name": "Test value to export"
+ }
+ },
+ "PublicIP": {
+ "Value": "Test public ip"
+ }
+ }
+ }
+
+ cloudformation_client = boto3.client(
+ 'cloudformation',
+ region_name='us-east-1')
+
+ stack_template_str = json.dumps(stack_template)
+
+ response = cloudformation_client.create_stack(
+ StackName='test_stack',
+ TemplateBody=stack_template_str,
+ Capabilities=('CAPABILITY_IAM', ))
+
+ client = boto3.client('ssm', region_name='us-east-1')
+
+ ssm_document = 'AWS-RunShellScript'
+ params = {'commands': ['#!/bin/bash\necho \'hello world\'']}
+
+ response = client.send_command(
+ Targets=[{
+ 'Key': 'tag:aws:cloudformation:stack-name',
+ 'Values': ('test_stack', )}],
+ DocumentName=ssm_document,
+ Parameters=params,
+ OutputS3Region='us-east-2',
+ OutputS3BucketName='the-bucket',
+ OutputS3KeyPrefix='pref')
+
+ cmd = response['Command']
+ cmd_id = cmd['CommandId']
+ instance_ids = cmd['InstanceIds']
+
+ invocation_response = client.get_command_invocation(
+ CommandId=cmd_id,
+ InstanceId=instance_ids[0],
+ PluginName='aws:runShellScript')