diff --git a/samcli/lib/pipeline/bootstrap/stage.py b/samcli/lib/pipeline/bootstrap/stage.py index 7311e86803..d98081237b 100644 --- a/samcli/lib/pipeline/bootstrap/stage.py +++ b/samcli/lib/pipeline/bootstrap/stage.py @@ -258,7 +258,7 @@ def save_config(self, config_dir: str, filename: str, cmd_names: List[str]) -> N except ValueError: artifacts_bucket_name = "" try: - image_repository_uri: Optional[str] = self.image_repository.get_uri() + image_repository_uri: Optional[str] = self.image_repository.get_uri() or "" except ValueError: image_repository_uri = "" @@ -266,12 +266,14 @@ def save_config(self, config_dir: str, filename: str, cmd_names: List[str]) -> N PIPELINE_EXECUTION_ROLE: self.pipeline_execution_role.arn, CLOUDFORMATION_EXECUTION_ROLE: self.cloudformation_execution_role.arn, ARTIFACTS_BUCKET: artifacts_bucket_name, + # even image repository can be None, we want to save it as empty string + # so that pipeline init command can pick it up ECR_IMAGE_REPOSITORY: image_repository_uri, REGION: self.aws_region, } for key, value in environment_specific_configs.items(): - if value: + if value is not None: samconfig.put( cmd_names=cmd_names, section="parameters", diff --git a/tests/integration/pipeline/base.py b/tests/integration/pipeline/base.py index 6fc027266b..adb0e628e1 100644 --- a/tests/integration/pipeline/base.py +++ b/tests/integration/pipeline/base.py @@ -43,10 +43,10 @@ def tearDown(self) -> None: generated_file.unlink() super().tearDown() - def get_init_command_list( - self, - ): + def get_init_command_list(self, with_bootstrap=False): command_list = [self.base_command(), "pipeline", "init"] + if with_bootstrap: + command_list.append("--bootstrap") return command_list diff --git a/tests/integration/pipeline/test_init_command.py b/tests/integration/pipeline/test_init_command.py index bbb269cb00..699579580d 100644 --- a/tests/integration/pipeline/test_init_command.py +++ b/tests/integration/pipeline/test_init_command.py @@ -1,12 +1,20 @@ +import shutil from pathlib import Path +from textwrap import dedent +from typing import List +from unittest import skipIf + +from parameterized import parameterized from samcli.commands.pipeline.bootstrap.cli import PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME -from tests.integration.pipeline.base import InitIntegBase +from tests.integration.pipeline.base import InitIntegBase, BootstrapIntegBase +from tests.integration.pipeline.test_bootstrap_command import SKIP_BOOTSTRAP_TESTS, CREDENTIAL_PROFILE from tests.testing_utils import run_command_with_inputs -QUICK_START_JENKINS_INPUTS = [ +QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL = [ "1", # quick start "1", # jenkins, this depends on the template repo. + "", "credential-id", "main", "template.yaml", @@ -43,7 +51,7 @@ def test_quick_start(self): self.generated_files.append(generated_jenkinsfile_path) init_command_list = self.get_init_command_list() - init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS) + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL) self.assertEqual(init_process_execute.process.returncode, 0) self.assertTrue(Path("Jenkinsfile").exists()) @@ -58,7 +66,7 @@ def test_failed_when_generated_file_already_exist(self): self.generated_files.append(generated_jenkinsfile_path) init_command_list = self.get_init_command_list() - init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS) + init_process_execute = run_command_with_inputs(init_command_list, QUICK_START_JENKINS_INPUTS_WITHOUT_AUTO_FILL) self.assertEqual(init_process_execute.process.returncode, 1) stderr = init_process_execute.stderr.decode() @@ -71,7 +79,7 @@ def test_custom_template(self): self.generated_files.append(generated_file) custom_template_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "custom_template")) - inputs = ["2", str(custom_template_path), "Rainy"] # custom template + inputs = ["2", str(custom_template_path), "", "Rainy"] # custom template init_command_list = self.get_init_command_list() init_process_execute = run_command_with_inputs(init_command_list, inputs) @@ -82,3 +90,180 @@ def test_custom_template(self): with open(generated_file, "r") as f: self.assertEqual("Rainy\n", f.read()) + + @parameterized.expand([("with_bootstrap",), (False,)]) + def test_with_pipelineconfig_has_all_stage_values(self, with_bootstrap): + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + pipelineconfig_path = Path(PIPELINE_CONFIG_DIR, PIPELINE_CONFIG_FILENAME) + with open(pipelineconfig_path, "w") as f: + f.write( + dedent( + """\ + version = 0.1 + [default] + [default.pipeline_bootstrap] + [default.pipeline_bootstrap.parameters] + pipeline_user = "arn:aws:iam::123:user/aws-sam-cli-managed-test-pipeline-res-PipelineUser-123" + + [test] + [test.pipeline_bootstrap] + [test.pipeline_bootstrap.parameters] + pipeline_execution_role = "test-pipeline-execution-role" + cloudformation_execution_role = "test-cfn-execution-role" + artifacts_bucket = "test-bucket" + image_repository = "test-ecr" + region = "us-east-2" + + [prod] + [prod.pipeline_bootstrap] + [prod.pipeline_bootstrap.parameters] + pipeline_execution_role = "prod-pipeline-execution-role" + cloudformation_execution_role = "prod-cfn-execution-role" + artifacts_bucket = "prod-bucket" + image_repository = "prod-ecr" + region = "us-west-2" + """ + ) + ) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + + init_command_list = self.get_init_command_list(with_bootstrap) + init_process_execute = run_command_with_inputs(init_command_list, inputs) + + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertTrue(Path("Jenkinsfile").exists()) + + expected_file_path = Path(__file__).parent.parent.joinpath(Path("testdata", "pipeline", "expected_jenkinsfile")) + with open(expected_file_path, "r") as expected, open(generated_jenkinsfile_path, "r") as output: + self.assertEqual(expected.read(), output.read()) + + +@skipIf(SKIP_BOOTSTRAP_TESTS, "Skip bootstrap tests in CI/CD only") +class TestInitWithBootstrap(BootstrapIntegBase): + generated_files: List[Path] = [] + + def setUp(self): + super().setUp() + self.command_list = [self.base_command(), "pipeline", "init", "--bootstrap"] + generated_jenkinsfile_path = Path("Jenkinsfile") + self.generated_files.append(generated_jenkinsfile_path) + + def tearDown(self) -> None: + for generated_file in self.generated_files: + if generated_file.is_dir(): + shutil.rmtree(generated_file, ignore_errors=True) + elif generated_file.exists(): + generated_file.unlink() + super().tearDown() + + def test_without_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[0], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) + + def test_with_one_stages_in_pipeline_config(self): + stage_names = [] + for suffix in ["1", "2"]: + stage_name, stack_name = self._get_stage_and_stack_name(suffix) + stage_names.append(stage_name) + self.stack_names.append(stack_name) + + bootstrap_command_list = self.get_bootstrap_command_list() + + inputs = [ + stage_names[0], + CREDENTIAL_PROFILE, + self.region, # region + "", # pipeline user + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no + "", # Confirm summary + "y", # Create resources + ] + + bootstrap_process_execute = run_command_with_inputs(bootstrap_command_list, inputs) + + self.assertEqual(bootstrap_process_execute.process.returncode, 0) + + inputs = [ + "1", # quick start + "1", # jenkins, this depends on the template repo. + "y", # Do you want to go through stage setup process now? + stage_names[1], + CREDENTIAL_PROFILE, + self.region, + "", # Pipeline execution role + "", # CloudFormation execution role + "", # Artifacts bucket + "N", # no ECR repo + "", # Confirm summary + "y", # Create resources + "credential-id", + "main", + "template.yaml", + "1", + "test-stack", + "2", + "prod-stack", + ] + init_process_execute = run_command_with_inputs(self.command_list, inputs) + self.assertEqual(init_process_execute.process.returncode, 0) + self.assertIn("Here are the stage names detected", init_process_execute.stdout.decode()) + self.assertIn(stage_names[0], init_process_execute.stdout.decode()) + self.assertIn(stage_names[1], init_process_execute.stdout.decode()) diff --git a/tests/integration/testdata/pipeline/custom_template/metadata.json b/tests/integration/testdata/pipeline/custom_template/metadata.json new file mode 100644 index 0000000000..689fe297f8 --- /dev/null +++ b/tests/integration/testdata/pipeline/custom_template/metadata.json @@ -0,0 +1,3 @@ +{ + "number_of_stages": 0 +} diff --git a/tests/unit/lib/pipeline/bootstrap/test_environment.py b/tests/unit/lib/pipeline/bootstrap/test_environment.py index 8160eaca8c..9a12f2be15 100644 --- a/tests/unit/lib/pipeline/bootstrap/test_environment.py +++ b/tests/unit/lib/pipeline/bootstrap/test_environment.py @@ -229,14 +229,26 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): samconfig_mock.return_value = samconfig_instance_mock stage: Stage = Stage(name=ANY_STAGE_NAME) + empty_ecr_call = call( + cmd_names=cmd_names, + section="parameters", + env=ANY_STAGE_NAME, + key="image_repository", + value="", + ) + expected_calls = [] - self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) stage.pipeline_user.arn = ANY_PIPELINE_USER_ARN expected_calls.append( call(cmd_names=cmd_names, section="parameters", key="pipeline_user", value=ANY_PIPELINE_USER_ARN) ) - self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put + ) stage.pipeline_execution_role.arn = ANY_PIPELINE_EXECUTION_ROLE_ARN expected_calls.append( @@ -246,9 +258,11 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): env=ANY_STAGE_NAME, key="pipeline_execution_role", value=ANY_PIPELINE_EXECUTION_ROLE_ARN, - ) + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put ) - self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) stage.cloudformation_execution_role.arn = ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN expected_calls.append( @@ -258,9 +272,11 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): env=ANY_STAGE_NAME, key="cloudformation_execution_role", value=ANY_CLOUDFORMATION_EXECUTION_ROLE_ARN, - ) + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put ) - self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) stage.artifacts_bucket.arn = "arn:aws:s3:::artifact_bucket_name" expected_calls.append( @@ -270,9 +286,11 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): env=ANY_STAGE_NAME, key="artifacts_bucket", value="artifact_bucket_name", - ) + ), + ) + self.trigger_and_assert_save_config_calls( + stage, cmd_names, expected_calls + [empty_ecr_call], samconfig_instance_mock.put ) - self.trigger_and_assert_save_config_calls(stage, cmd_names, expected_calls, samconfig_instance_mock.put) stage.image_repository.arn = "arn:aws:ecr:us-east-2:111111111111:repository/image_repository_name" expected_calls.append( @@ -289,7 +307,7 @@ def test_save_config_escapes_none_resources(self, samconfig_mock): def trigger_and_assert_save_config_calls(self, stage, cmd_names, expected_calls, samconfig_put_mock): stage.save_config(config_dir="any_config_dir", filename="any_pipeline.toml", cmd_names=cmd_names) self.assertEqual(len(expected_calls), samconfig_put_mock.call_count) - samconfig_put_mock.assert_has_calls(expected_calls) + samconfig_put_mock.assert_has_calls(expected_calls, any_order=True) samconfig_put_mock.reset_mock() @patch("samcli.lib.pipeline.bootstrap.stage.boto3")