diff --git a/samcli/lib/pipeline/bootstrap/environment.py b/samcli/lib/pipeline/bootstrap/environment.py index eb87a8167b..9d56e797bd 100644 --- a/samcli/lib/pipeline/bootstrap/environment.py +++ b/samcli/lib/pipeline/bootstrap/environment.py @@ -152,11 +152,9 @@ def bootstrap(self, confirm_changeset: bool = True) -> bool: if not confirmed: return False - sanitized_environment_name: str = re.sub("[^0-9a-zA-Z]+", "-", self.name) - stack_name: str = f"{STACK_NAME_PREFIX}-{sanitized_environment_name}-{ENVIRONMENT_RESOURCES_STACK_NAME_SUFFIX}" environment_resources_template_body = Environment._read_template(ENVIRONMENT_RESOURCES_CFN_TEMPLATE) output: StackOutput = manage_stack( - stack_name=stack_name, + stack_name=self._get_stack_name(), region=self.aws_region, profile=self.aws_profile, template_body=environment_resources_template_body, @@ -296,3 +294,7 @@ def print_resources_summary(self) -> None: ) click.secho(f"\tACCESS_KEY_ID: {self.pipeline_user.access_key_id}", fg="green") click.secho(f"\tSECRET_ACCESS_KEY: {self.pipeline_user.secret_access_key}", fg="green") + + def _get_stack_name(self) -> str: + sanitized_environment_name: str = re.sub("[^0-9a-zA-Z]+", "-", self.name) + return f"{STACK_NAME_PREFIX}-{sanitized_environment_name}-{ENVIRONMENT_RESOURCES_STACK_NAME_SUFFIX}" diff --git a/tests/integration/pipeline/__init__.py b/tests/integration/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py new file mode 100644 index 0000000000..5a3a321d86 --- /dev/null +++ b/tests/integration/pipeline/base.py @@ -0,0 +1,129 @@ +import os +import shutil +from pathlib import Path +from typing import List, Optional, Set, Tuple, Any +from unittest import TestCase +from unittest.mock import Mock + +import boto3 +from botocore.exceptions import ClientError + +from samcli.lib.pipeline.bootstrap.environment import Environment + + +class PipelineBase(TestCase): + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + +class InitIntegBase(PipelineBase): + generated_files: List[Path] = [] + + @classmethod + def setUpClass(cls) -> None: + # we need to compare the whole generated template, which is + # larger than normal diff size limit + cls.maxDiff = None + + def setUp(self) -> None: + super().setUp() + self.generated_files = [] + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def get_init_command_list( + self, + ): + command_list = [self.base_command(), "pipeline", "init"] + return command_list + + +class BootstrapIntegBase(PipelineBase): + stack_names: List[str] + cf_client: Any + + @classmethod + def setUpClass(cls): + cls.cf_client = boto3.client("cloudformation") + + def setUp(self): + self.stack_names = [] + super().setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + shutil.rmtree(os.path.join(os.getcwd(), ".aws-sam", "pipeline"), ignore_errors=True) + super().tearDown() + + def get_bootstrap_command_list( + self, + no_interactive: bool = False, + env_name: Optional[str] = None, + pipeline_user: Optional[str] = None, + pipeline_execution_role: Optional[str] = None, + cloudformation_execution_role: Optional[str] = None, + artifacts_bucket: Optional[str] = None, + create_image_repository: bool = False, + image_repository: Optional[str] = None, + pipeline_ip_range: Optional[str] = None, + no_confirm_changeset: bool = False, + ): + command_list = [self.base_command(), "pipeline", "bootstrap"] + + if no_interactive: + command_list += ["--no-interactive"] + if env_name: + command_list += ["--environment", env_name] + if pipeline_user: + command_list += ["--pipeline-user", pipeline_user] + if pipeline_execution_role: + command_list += ["--pipeline-execution-role", pipeline_execution_role] + if cloudformation_execution_role: + command_list += ["--cloudformation-execution-role", cloudformation_execution_role] + if artifacts_bucket: + command_list += ["--artifacts-bucket", artifacts_bucket] + if create_image_repository: + command_list += ["--create-image-repository"] + if image_repository: + command_list += ["--image-repository", image_repository] + if pipeline_ip_range: + command_list += ["--pipeline-ip-range", pipeline_ip_range] + if no_confirm_changeset: + command_list += ["--no-confirm-changeset"] + + return command_list + + def _extract_created_resource_logical_ids(self, stack_name: str) -> Set[str]: + response = self.cf_client.describe_stack_resources(StackName=stack_name) + return {resource["LogicalResourceId"] for resource in response["StackResources"]} + + def _stack_exists(self, stack_name) -> bool: + try: + self.cf_client.describe_stacks(StackName=stack_name) + return True + except ClientError as ex: + if "does not exist" in ex.response.get("Error", {}).get("Message", ""): + return False + raise ex + + def _get_env_and_stack_name(self, suffix: str = "") -> Tuple[str, str]: + # Method expects method name which can be a full path. Eg: test.integration.test_bootstrap_command.method_name + method_name = self.id().split(".")[-1] + env_name = method_name.replace("_", "-") + suffix + + mock_env = Mock() + mock_env.name = env_name + stack_name = Environment._get_stack_name(mock_env) + + return env_name, stack_name diff --git a/tests/integration/pipeline/test_bootstrap_command.py b/tests/integration/pipeline/test_bootstrap_command.py new file mode 100644 index 0000000000..389295ae8a --- /dev/null +++ b/tests/integration/pipeline/test_bootstrap_command.py @@ -0,0 +1,247 @@ +from unittest import skipIf + +from parameterized import parameterized + +from tests.integration.pipeline.base import BootstrapIntegBase +from tests.testing_utils import ( + run_command_with_input, + RUNNING_ON_CI, + RUNNING_TEST_FOR_MASTER_ON_CI, + RUN_BY_CANARY, + run_command, + run_command_with_inputs, +) + +# bootstrap tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict tests to run outside of CI/CD, when the branch is not master or tests are not run by Canary +SKIP_BOOTSTRAP_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestBootstrap(BootstrapIntegBase): + @parameterized.expand([("create_image_repository",), (False,)]) + def test_interactive_with_no_resources_provided(self, create_image_repository: bool): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + env_name, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "2" if create_image_repository else "1", # Should we create ECR repo, 1 - No, 2 - Yes + "", # Pipeline IP address range + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("We have created the following resources", stdout) + # make sure pipeline user's credential is printed + self.assertIn("ACCESS_KEY_ID", stdout) + self.assertIn("SECRET_ACCESS_KEY", stdout) + + common_resources = { + "PipelineUser", + "PipelineUserAccessKey", + "CloudFormationExecutionRole", + "PipelineExecutionRole", + "ArtifactsBucket", + "ArtifactsBucketPolicy", + "PipelineExecutionRolePermissionPolicy", + } + if create_image_repository: + self.assertSetEqual( + { + *common_resources, + "ImageRepository", + }, + self._extract_created_resource_logical_ids(stack_name), + ) + else: + self.assertSetEqual(common_resources, self._extract_created_resource_logical_ids(stack_name)) + + @parameterized.expand([("create_image_repository",), (False,)]) + def test_non_interactive_with_no_resources_provided(self, create_image_repository: bool): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, create_image_repository=create_image_repository, no_confirm_changeset=True + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 2) + stderr = bootstrap_process_execute.stderr.decode() + self.assertIn("Missing required parameter", stderr) + + def test_interactive_with_all_required_resources_provided(self): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + env_name, + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "3", # Should we create ECR repo, 3 - specify one + "arn:aws:ecr:::repository/repo-name", # ecr repo + "1.2.3.4/24", # Pipeline IP address range + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + def test_no_interactive_with_all_required_resources_provided(self): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + env_name=env_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + cloudformation_execution_role="arn:aws:iam::123:role/role-name", # CloudFormation execution role + artifacts_bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + pipeline_ip_range="1.2.3.4/24", # Pipeline IP address range + ) + + bootstrap_process_execute = run_command(bootstrap_command_list) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("skipping creation", stdout) + + @parameterized.expand([("confirm_changeset",), (False,)]) + def test_no_interactive_with_some_required_resources_provided(self, confirm_changeset): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list( + no_interactive=True, + env_name=env_name, + pipeline_user="arn:aws:iam::123:user/user-name", # pipeline user + pipeline_execution_role="arn:aws:iam::123:role/role-name", # Pipeline execution role + # CloudFormation execution role missing + artifacts_bucket="arn:aws:s3:::bucket-name", # Artifacts bucket + image_repository="arn:aws:ecr:::repository/repo-name", # ecr repo + pipeline_ip_range="1.2.3.4/24", # Pipeline IP address range + no_confirm_changeset=not confirm_changeset, + ) + + inputs = [ + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs if confirm_changeset else []) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + self.assertSetEqual({"CloudFormationExecutionRole"}, self._extract_created_resource_logical_ids(stack_name)) + + def test_interactive_cancelled_by_user(self): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + env_name, + "arn:aws:iam::123:user/user-name", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "1", # Should we create ECR repo, 1 - No + "", # Pipeline IP address range + "N", # cancel + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertTrue(stdout.strip().endswith("Should we proceed with the creation? [y/N]:")) + self.assertFalse(self._stack_exists(stack_name)) + + def test_interactive_with_some_required_resources_provided(self): + env_name, stack_name = self._get_env_and_stack_name() + self.stack_names = [stack_name] + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + env_name, + "arn:aws:iam::123:user/user-name", # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "3", # Should we create ECR repo, 3 - specify one + "arn:aws:ecr:::repository/repo-name", # ecr repo + "1.2.3.4/24", # Pipeline IP address range + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + self.assertIn("Successfully created!", stdout) + # make sure the not provided resource is the only resource created. + self.assertSetEqual({"CloudFormationExecutionRole"}, self._extract_created_resource_logical_ids(stack_name)) + + def test_interactive_pipeline_user_only_created_once(self): + """ + Create 3 stages, only the first stage resource stack creates + a pipeline user, and the remaining two share the same pipeline user. + """ + env_names = [] + for suffix in ["1", "2", "3"]: + env_name, stack_name = self._get_env_and_stack_name(suffix) + env_names.append(env_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + for i, env_name in enumerate(env_names): + inputs = [ + env_name, + *([""] if i == 0 else []), # pipeline user + "arn:aws:iam::123:role/role-name", # Pipeline execution role + "arn:aws:iam::123:role/role-name", # CloudFormation execution role + "arn:aws:s3:::bucket-name", # Artifacts bucket + "1", # Should we create ECR repo, 1 - No, 2 - Yes + "", # Pipeline IP address range + "y", # proceed + ] + + bootstrap_process_execute = run_command_with_input( + bootstrap_command_list, ("\n".join(inputs) + "\n").encode() + ) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + stdout = bootstrap_process_execute.stdout.decode() + + # only first stage creates pipeline user + if i == 0: + self.assertIn("We have created the following resources", stdout) + self.assertSetEqual( + {"PipelineUser", "PipelineUserAccessKey"}, + self._extract_created_resource_logical_ids(self.stack_names[i]), + ) + else: + self.assertIn("skipping creation", stdout) diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py new file mode 100644 index 0000000000..804249ea59 --- /dev/null +++ b/tests/integration/pipeline/test_init_command.py @@ -0,0 +1,78 @@ +from pathlib import Path + +from tests.integration.pipeline.base import InitIntegBase +from tests.testing_utils import run_command_with_inputs + +QUICK_START_JENKINS_INPUTS = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "1", # two stage pipeline, this depends on the template repo. + "credential-id", + "main", + "template.yaml", + "test", + "test-stack", + "test-pipeline-execution-role", + "test-cfn-execution-role", + "test-bucket", + "test-ecr", + "us-east-2", + "prod", + "prod-stack", + "prod-pipeline-execution-role", + "prod-cfn-execution-role", + "prod-bucket", + "prod-ecr", + "us-west-2", +] + + +class TestInit(InitIntegBase): + """ + Here we use Jenkins template for testing + """ + + def test_quick_start(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + def test_failed_when_generated_file_already_exist(self): + generated_jenkinsfile_path = Path("Jenkinsfile") + generated_jenkinsfile_path.touch() # the file now pre-exists + self.generated_files.append(generated_jenkinsfile_path) + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS) + + self.assertEqual(init_process_execute.process.returncode, 1) + stderr = init_process_execute.stderr.decode() + self.assertIn( + 'Pipeline file "Jenkinsfile" already exists in project root directory, please remove it first.', stderr + ) + + def test_custom_template(self): + generated_file = Path("weather") + self.generated_files.append(generated_file) + + custom_template_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "custom_template")) + inputs = ["2", str(custom_template_path), "Rainy"] # custom template + + init_command_list = self.get_init_command_list() + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + + self.assertTrue(generated_file.exists()) + + with open(generated_file, "r") as f: + self.assertEqual("Rainy\n", f.read()) diff --git a/tests/integration/testdata/pipeline/custom_template/cookiecutter.json b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json new file mode 100644 index 0000000000..c02b7caed1 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/cookiecutter.json @@ -0,0 +1,4 @@ +{ + "outputDir": "aws-sam-pipeline", + "weather": "" +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/questions.json b/tests/integration/testdata/pipeline/custom_template/questions.json new file mode 100644 index 0000000000..a0fe2167bf --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/questions.json @@ -0,0 +1,7 @@ +{ + "questions": [{ + "key": "weather", + "question": "How is the weather today?", + "default": "Sunny" + }] +} \ No newline at end of file diff --git a/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather new file mode 100644 index 0000000000..3501ffd0ae --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/{{cookiecutter.outputDir}}/weather @@ -0,0 +1 @@ +{{cookiecutter.weather}} diff --git a/tests/integration/testdata/pipeline/expected_jenkinsfile b/tests/integration/testdata/pipeline/expected_jenkinsfile new file mode 100644 index 0000000000..7a213a30f9 --- /dev/null +++ b/tests/integration/testdata/pipeline/expected_jenkinsfile @@ -0,0 +1,177 @@ +pipeline { + agent any + environment { + PIPELINE_USER_CREDENTIAL_ID = 'credential-id' + SAM_TEMPLATE = 'template.yaml' + MAIN_BRANCH = 'main' + TESTING_STACK_NAME = 'test-stack' + TESTING_PIPELINE_EXECUTION_ROLE = 'test-pipeline-execution-role' + TESTING_CLOUDFORMATION_EXECUTION_ROLE = 'test-cfn-execution-role' + TESTING_ARTIFACTS_BUCKET = 'test-bucket' + TESTING_IMAGE_REPOSITORY = 'test-ecr' + TESTING_REGION = 'us-east-2' + PROD_STACK_NAME = 'prod-stack' + PROD_PIPELINE_EXECUTION_ROLE = 'prod-pipeline-execution-role' + PROD_CLOUDFORMATION_EXECUTION_ROLE = 'prod-cfn-execution-role' + PROD_ARTIFACTS_BUCKET = 'prod-bucket' + PROD_IMAGE_REPOSITORY = 'prod-ecr' + PROD_REGION = 'us-west-2' + } + stages { + // uncomment and modify the following step for running the unit-tests + // stage('test') { + // steps { + // sh ''' + // # trigger the tests here + // ''' + // } + // } + + stage('build-and-deploy-feature') { + // this stage is triggered only for feature branches (feature*), + // which will build the stack and deploy to a stack named with branch name. + when { + branch 'feature*' + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'deploying-feature') { + sh ''' + sam deploy --stack-name $(echo ${BRANCH_NAME} | tr -cd '[a-zA-Z0-9-]') \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + stage('build-and-package') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + args '--user 0:0 -v /var/run/docker.sock:/var/run/docker.sock' + } + } + steps { + sh 'sam build --template ${SAM_TEMPLATE} --use-container' + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-packaging') { + sh ''' + sam package \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --region ${TESTING_REGION} \ + --output-template-file packaged-testing.yaml + ''' + } + + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-packaging') { + sh ''' + sam package \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --region ${PROD_REGION} \ + --output-template-file packaged-prod.yaml + ''' + } + + archiveArtifacts artifacts: 'packaged-testing.yaml' + archiveArtifacts artifacts: 'packaged-prod.yaml' + } + } + + stage('deploy-testing') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.TESTING_REGION, + role: env.TESTING_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'testing-deployment') { + sh ''' + sam deploy --stack-name ${TESTING_STACK_NAME} \ + --template packaged-testing.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${TESTING_REGION} \ + --s3-bucket ${TESTING_ARTIFACTS_BUCKET} \ + --image-repository ${TESTING_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${TESTING_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + + // uncomment and modify the following step for running the integration-tests + // stage('integration-test') { + // when { + // branch env.MAIN_BRANCH + // } + // steps { + // sh ''' + // # trigger the integration tests here + // ''' + // } + // } + + stage('deploy-prod') { + when { + branch env.MAIN_BRANCH + } + agent { + docker { + image 'public.ecr.aws/sam/build-provided' + } + } + steps { + withAWS( + credentials: env.PIPELINE_USER_CREDENTIAL_ID, + region: env.PROD_REGION, + role: env.PROD_PIPELINE_EXECUTION_ROLE, + roleSessionName: 'prod-deployment') { + sh ''' + sam deploy --stack-name ${PROD_STACK_NAME} \ + --template packaged-prod.yaml \ + --capabilities CAPABILITY_IAM \ + --region ${PROD_REGION} \ + --s3-bucket ${PROD_ARTIFACTS_BUCKET} \ + --image-repository ${PROD_IMAGE_REPOSITORY} \ + --no-fail-on-empty-changeset \ + --role-arn ${PROD_CLOUDFORMATION_EXECUTION_ROLE} + ''' + } + } + } + } +} diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 0cc7aa3067..78da67ab0c 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -5,6 +5,7 @@ import shutil from collections import namedtuple from subprocess import Popen, PIPE, TimeoutExpired +from typing import List IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) @@ -50,6 +51,10 @@ def run_command_with_input(command_list, stdin_input, timeout=TIMEOUT) -> Comman raise +def run_command_with_inputs(command_list: List[str], inputs: List[str], timeout=TIMEOUT) -> CommandResult: + return run_command_with_input(command_list, ("\n".join(inputs) + "\n").encode(), timeout) + + class FileCreator(object): def __init__(self): self.rootdir = tempfile.mkdtemp()