diff --git a/requirements/base.txt b/requirements/base.txt index 34edac3fd0..f0626e7145 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ Flask<2.3 #Need to add latest lambda changes which will return invoke mode details boto3>=1.26.109,==1.* jmespath~=1.0.1 -ruamel_yaml~=0.17.21 +ruamel_yaml~=0.17.32 PyYAML>=5.4.1,==5.* cookiecutter~=2.1.1 aws-sam-translator==1.70.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index 4ef0736e47..2421d05095 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -9,7 +9,7 @@ pytest-cov==4.1.0 # here we fix its version and upgrade it manually in the future mypy==1.3.0 boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]==1.26.131 -types-pywin32==306.0.0.0 +types-pywin32==306.0.0.2 types-PyYAML==6.0.12 types-chevron==0.14.2.4 types-psutil==5.9.5.12 @@ -33,7 +33,7 @@ pytest-rerunfailures==11.1.2 # NOTE (hawflau): DO NOT upgrade pytest-metadata and pytest-json-report unless pytest-json-report addresses https://github.com/numirias/pytest-json-report/issues/89 pytest-metadata==2.0.4 pytest-json-report==1.5.0 -filelock==3.12.0 +filelock==3.12.2 # formatter black==22.6.0 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 6ad1ef3c90..f946536226 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -610,10 +610,49 @@ rich==13.3.3 \ --hash=sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333 \ --hash=sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15 # via aws-sam-cli (setup.py) -ruamel-yaml==0.17.21 \ - --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ - --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af +ruamel-yaml==0.17.32 \ + --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ + --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 # via aws-sam-cli (setup.py) +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ + --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 + # via ruamel-yaml s3transfer==0.6.0 \ --hash=sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd \ --hash=sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947 @@ -684,9 +723,9 @@ watchdog==2.1.2 \ --hash=sha256:d34ce2261f118ecd57eedeef95fc2a495fc4a40b3ed7b3bf0bd7a8ccc1ab4f8f \ --hash=sha256:edcd9ef3fd460bb8a98eb1fcf99941e9fd9f275f45f1a82cb1359ec92975d647 # via aws-sam-cli (setup.py) -websocket-client==1.5.1 \ - --hash=sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40 \ - --hash=sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e +websocket-client==1.6.1 \ + --hash=sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd \ + --hash=sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d # via docker werkzeug==2.2.3 \ --hash=sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index c69964a4c7..a1db7a41cf 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -267,9 +267,9 @@ idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==6.1.0 \ - --hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \ - --hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09 +importlib-metadata==6.7.0 \ + --hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \ + --hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5 # via # attrs # click @@ -645,9 +645,9 @@ rich==13.3.3 \ --hash=sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333 \ --hash=sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15 # via aws-sam-cli (setup.py) -ruamel-yaml==0.17.21 \ - --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ - --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af +ruamel-yaml==0.17.32 \ + --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ + --hash=sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2 # via aws-sam-cli (setup.py) ruamel-yaml-clib==0.2.7 \ --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ @@ -706,9 +706,9 @@ six==1.16.0 \ # junit-xml # python-dateutil # serverlessrepo -sympy==1.10.1 \ - --hash=sha256:5939eeffdf9e152172601463626c022a2c27e75cf6278de8d401d50c9d58787b \ - --hash=sha256:df75d738930f6fe9ebe7034e59d56698f29e85f443f743e51e47df0caccc2130 +sympy==1.12 \ + --hash=sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5 \ + --hash=sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8 # via cfn-lint text-unidecode==1.3 \ --hash=sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8 \ @@ -762,9 +762,9 @@ watchdog==2.1.2 \ --hash=sha256:d34ce2261f118ecd57eedeef95fc2a495fc4a40b3ed7b3bf0bd7a8ccc1ab4f8f \ --hash=sha256:edcd9ef3fd460bb8a98eb1fcf99941e9fd9f275f45f1a82cb1359ec92975d647 # via aws-sam-cli (setup.py) -websocket-client==1.5.1 \ - --hash=sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40 \ - --hash=sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e +websocket-client==1.6.1 \ + --hash=sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd \ + --hash=sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d # via docker werkzeug==2.2.3 \ --hash=sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe \ diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 404fd36661..49c5e44c78 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -7,6 +7,7 @@ from typing import List, Optional, cast import click +from rich.console import Console from samcli.cli.formatters import RootCommandHelpTextFormatter from samcli.commands.exceptions import AWSServiceClientError @@ -44,6 +45,11 @@ def __init__(self): self._session_id = str(uuid.uuid4()) self._experimental = False self._exception = None + self._console = Console() + + @property + def console(self): + return self._console @property def exception(self): diff --git a/samcli/cli/root/command_list.py b/samcli/cli/root/command_list.py index 0be843fbe2..cfa7000739 100644 --- a/samcli/cli/root/command_list.py +++ b/samcli/cli/root/command_list.py @@ -6,11 +6,11 @@ "validate": "Validate an AWS SAM template.", "build": "Build your AWS serverless function code.", "local": "Run your AWS serverless function locally.", - "remote": "Invoke or send an event to cloud resources in your CFN stack", + "remote": "Invoke or send an event to cloud resources in your AWS Cloudformation stack.", "package": "Package an AWS SAM application.", "deploy": "Deploy an AWS SAM application.", "delete": "Delete an AWS SAM application and the artifacts created by sam deploy.", - "logs": "Fetch AWS Cloudwatch logs for a function.", + "logs": "Fetch AWS Cloudwatch logs for AWS Lambda Functions or Cloudwatch Log groups.", "publish": "Publish a packaged AWS SAM template to AWS Serverless Application Repository for easy sharing.", "traces": "Fetch AWS X-Ray traces.", "sync": "Sync an AWS SAM project to AWS.", diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 188b1705b4..5b1b55cc32 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -326,7 +326,7 @@ def no_progressbar_click_option(): default=False, required=False, is_flag=True, - help="Does not showcase a progress bar when uploading artifacts to s3 and pushing docker images to ECR", + help="Does not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR", ) @@ -679,9 +679,9 @@ def resolve_s3_click_option(guided): required=False, is_flag=True, callback=callback, - help="Automatically resolve s3 bucket for non-guided deployments. " - "Enabling this option will also create a managed default s3 bucket for you. " - "If you do not provide a --s3-bucket value, the managed bucket will be used. " + help="Automatically resolve AWS S3 bucket for non-guided deployments. " + "Enabling this option will also create a managed default AWS S3 bucket for you. " + "If one does not provide a --s3-bucket value, the managed bucket will be used. " "Do not use --guided with this option.", ) diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index 7a3b1d8c6a..1146767a60 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -11,6 +11,7 @@ from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.command_exception_handler import command_exception_handler from samcli.commands._utils.options import common_observability_options, generate_next_command_recommendation +from samcli.commands.logs.core.command import LogsCommand from samcli.commands.logs.validation_and_exception_handlers import ( SAM_LOGS_ADDITIONAL_EXCEPTION_HANDLERS, stack_name_cw_log_group_validation, @@ -20,37 +21,34 @@ LOG = logging.getLogger(__name__) +SHORT_HELP = ( + "Fetch logs for your AWS SAM Application or AWS Cloudformation stack - Lambda Functions/CloudWatch Log groups" +) + HELP_TEXT = """ -Use this command to fetch logs generated by your Lambda function.\n -\b -When your functions are a part of a CloudFormation stack, you can fetch logs using the function's -LogicalID when you specify the stack name. -$ sam logs -n HelloWorldFunction --stack-name mystack \n -\b -Or, you can fetch logs using the function's name. -$ sam logs -n mystack-HelloWorldFunction-1FJ8PD36GML2Q \n -\b -You can view logs for a specific time range using the -s (--start-time) and -e (--end-time) options -$ sam logs -n HelloWorldFunction --stack-name mystack -s '10min ago' -e '2min ago' \n -\b -You can also add the --tail option to wait for new logs and see them as they arrive. -$ sam logs -n HelloWorldFunction --stack-name mystack --tail \n -\b -Use the --filter option to quickly find logs that match terms, phrases or values in your log events. -$ sam logs -n HelloWorldFunction --stack-name mystack --filter 'error' \n -\b -Fetch logs for all supported resources in your application, and additionally from the specified log groups. -$ sam logs --cw-log-group /aws/lambda/myfunction-123 --cw-log-group /aws/lambda/myfunction-456 -\b -You can now fetch logs from supported resources, by only providing --stack-name parameter -$ sam logs --stack-name mystack \n -\b -You can also fetch logs from a resource which is defined in a nested stack. -$ sam logs --stack-name mystack -n MyNestedStack/HelloWorldFunction +The sam logs commands fetches logs of Lambda Functions/CloudWatch log groups +with additional filtering by options. """ +DESCRIPTION = """ + Fetch logs generated by Lambda functions or other Cloudwatch log groups with additional filtering. +""" -@click.command("logs", help=HELP_TEXT, short_help="Fetch logs for a function") + +@click.command( + "logs", + short_help=SHORT_HELP, + context_settings={ + "ignore_unknown_options": False, + "allow_interspersed_args": True, + "allow_extra_args": True, + "max_content_width": 120, + }, + cls=LogsCommand, + help=HELP_TEXT, + description=DESCRIPTION, + requires_credentials=True, +) @configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--name", diff --git a/samcli/commands/logs/core/__init__.py b/samcli/commands/logs/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/logs/core/command.py b/samcli/commands/logs/core/command.py new file mode 100644 index 0000000000..60b1734e50 --- /dev/null +++ b/samcli/commands/logs/core/command.py @@ -0,0 +1,119 @@ +from click import Context, style + +from samcli.cli.core.command import CoreCommand +from samcli.cli.row_modifiers import RowDefinition, ShowcaseRowModifier +from samcli.commands.logs.core.formatters import LogsCommandHelpTextFormatter +from samcli.commands.logs.core.options import OPTIONS_INFO + +COL_SIZE_MODIFIER = 38 + + +class LogsCommand(CoreCommand): + class CustomFormatterContext(Context): + formatter_class = LogsCommandHelpTextFormatter + + context_class = CustomFormatterContext + + @staticmethod + def format_examples(ctx: Context, formatter: LogsCommandHelpTextFormatter): + with formatter.indented_section(name="Examples", extra_indents=1): + with formatter.indented_section( + name="Fetch logs with Lambda Function Logical ID and Cloudformation Stack Name" + ): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style(f"$ {ctx.command_path} -n HelloWorldFunction --stack-name mystack"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="View logs for specific time range"): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style( + f"$ {ctx.command_path} -n HelloWorldFunction --stack-name mystack -s " + f"'10min ago' -e '2min ago'" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="Tail new logs"): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style(f"$ {ctx.command_path} -n HelloWorldFunction --stack-name " f"mystack --tail"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + with formatter.indented_section(name="Fetch from Cloudwatch log groups"): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style( + f"$ {ctx.command_path} --cw-log-group /aws/lambda/myfunction-123 " + f"--cw-log-group /aws/lambda/myfunction-456" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + + with formatter.indented_section(name="Fetch logs from supported resources in Cloudformation stack"): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style(f"$ {ctx.command_path} ---stack-name mystack"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + + with formatter.indented_section(name="Fetch logs from resource defined in nested Cloudformation stack"): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style( + f"$ {ctx.command_path} ---stack-name mystack -n MyNestedStack/HelloWorldFunction" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ] + ) + + def format_options(self, ctx: Context, formatter: LogsCommandHelpTextFormatter) -> None: # type:ignore + # `ignore` is put in place here for mypy even though it is the correct behavior, + # as the `formatter_class` can be set in subclass of Command. If ignore is not set, + # mypy raises argument needs to be HelpFormatter as super class defines it. + + self.format_description(formatter) + LogsCommand.format_examples(ctx, formatter) + + CoreCommand._format_options( + ctx=ctx, + params=self.get_params(ctx), + formatter=formatter, + formatting_options=OPTIONS_INFO, + write_rd_overrides={"col_max": COL_SIZE_MODIFIER}, + ) diff --git a/samcli/commands/logs/core/formatters.py b/samcli/commands/logs/core/formatters.py new file mode 100644 index 0000000000..6a35facce0 --- /dev/null +++ b/samcli/commands/logs/core/formatters.py @@ -0,0 +1,19 @@ +from samcli.cli.formatters import RootCommandHelpTextFormatter +from samcli.cli.row_modifiers import BaseLineRowModifier +from samcli.commands.logs.core.options import ALL_OPTIONS + + +class LogsCommandHelpTextFormatter(RootCommandHelpTextFormatter): + # Picked an additive constant that gives an aesthetically pleasing look. + ADDITIVE_JUSTIFICATION = 22 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Add Additional space after determining the longest option. + # However, do not justify with padding for more than half the width of + # the terminal to retain aesthetics. + self.left_justification_length = min( + max([len(option) for option in ALL_OPTIONS]) + self.ADDITIVE_JUSTIFICATION, + self.width // 2 - self.indent_increment, + ) + self.modifiers = [BaseLineRowModifier()] diff --git a/samcli/commands/logs/core/options.py b/samcli/commands/logs/core/options.py new file mode 100644 index 0000000000..c537c857e6 --- /dev/null +++ b/samcli/commands/logs/core/options.py @@ -0,0 +1,45 @@ +""" +Logs Command Options related Datastructures for formatting. +""" +from typing import Dict, List + +from samcli.cli.core.options import ALL_COMMON_OPTIONS, add_common_options_info +from samcli.cli.row_modifiers import RowDefinition + +# The ordering of the option lists matter, they are the order in which options will be displayed. + +LOG_IDENTIFIER_OPTIONS: List[str] = ["stack_name", "cw_log_group", "name"] + +# Can be used instead of the options in the first list +ADDITIONAL_OPTIONS: List[str] = ["include_traces", "filter", "output", "tail", "start_time", "end_time"] + +AWS_CREDENTIAL_OPTION_NAMES: List[str] = ["region", "profile"] + +CONFIGURATION_OPTION_NAMES: List[str] = ["config_env", "config_file"] + +ALL_OPTIONS: List[str] = ( + LOG_IDENTIFIER_OPTIONS + + AWS_CREDENTIAL_OPTION_NAMES + + ADDITIONAL_OPTIONS + + CONFIGURATION_OPTION_NAMES + + ALL_COMMON_OPTIONS +) + +OPTIONS_INFO: Dict[str, Dict] = { + "Log Identifier Options": {"option_names": {opt: {"rank": idx} for idx, opt in enumerate(LOG_IDENTIFIER_OPTIONS)}}, + "AWS Credential Options": { + "option_names": {opt: {"rank": idx} for idx, opt in enumerate(AWS_CREDENTIAL_OPTION_NAMES)} + }, + "Additional Options": {"option_names": {opt: {"rank": idx} for idx, opt in enumerate(ADDITIONAL_OPTIONS)}}, + "Configuration Options": { + "option_names": {opt: {"rank": idx} for idx, opt in enumerate(CONFIGURATION_OPTION_NAMES)}, + "extras": [ + RowDefinition(name="Learn more about configuration files at:"), + RowDefinition( + name="https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli" + "-config.html. " + ), + ], + }, +} +add_common_options_info(OPTIONS_INFO) diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index ee74b67c37..41fb10b133 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -21,6 +21,7 @@ template_click_option, use_json_option, ) +from samcli.commands.package.core.command import PackageCommand from samcli.lib.bootstrap.bootstrap import manage_stack from samcli.lib.cli_validation.image_repository_validation import image_repository_validation from samcli.lib.telemetry.metric import track_command, track_template_warnings @@ -42,20 +43,30 @@ def resources_and_properties_help_string(): ) -HELP_TEXT = ( - """The SAM package command creates and uploads artifacts based on the package type of a given resource. -It uploads local images to ECR for `Image` package types. -It creates zip of your code and dependencies and uploads it to S3 for other package types. -The command returns a copy of your template, replacing references to local artifacts -with the AWS location where the command uploaded the artifacts. - -The following resources and their property locations are supported. -""" - + resources_and_properties_help_string() -) +DESCRIPTION = """ + Creates and uploads artifacts based on the package type of a given resource. + It uploads local images to ECR for `Image` package types. + It creates a zip of code and dependencies and uploads it to S3 for `Zip` package types. + + A new template is returned which replaces references to local artifacts + with the AWS location where the command uploaded the artifacts. + """ -@click.command("package", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +@click.command( + "package", + short_help=SHORT_HELP, + context_settings={ + "ignore_unknown_options": False, + "allow_interspersed_args": True, + "allow_extra_args": True, + "max_content_width": 120, + }, + cls=PackageCommand, + help=SHORT_HELP, + description=DESCRIPTION, + requires_credentials=True, +) @configuration_option(provider=TomlProvider(section="parameters")) @template_click_option(include_build=True) @click.option( diff --git a/samcli/commands/package/core/__init__.py b/samcli/commands/package/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/package/core/command.py b/samcli/commands/package/core/command.py new file mode 100644 index 0000000000..16a9ee3d2a --- /dev/null +++ b/samcli/commands/package/core/command.py @@ -0,0 +1,138 @@ +""" +`sam package` command class for help text visual layer. +""" +import click +from click import Context, style +from rich.table import Table + +from samcli.cli.core.command import CoreCommand +from samcli.cli.row_modifiers import RowDefinition, ShowcaseRowModifier +from samcli.commands.package.core.formatters import PackageCommandHelpTextFormatter +from samcli.commands.package.core.options import OPTIONS_INFO +from samcli.lib.utils.resources import resources_generator + +COL_SIZE_MODIFIER = 38 + + +class PackageCommand(CoreCommand): + """ + `sam` package specific command class that specializes in the visual appearance + of `sam package` help text. + It hosts a custom formatter, examples, table for supported resources, acronyms + and how options are to be used in the CLI for `sam package`. + """ + + class CustomFormatterContext(Context): + formatter_class = PackageCommandHelpTextFormatter + + context_class = CustomFormatterContext + + @staticmethod + def format_examples(ctx: Context, formatter: PackageCommandHelpTextFormatter): + with formatter.indented_section(name="Examples", extra_indents=1): + with formatter.indented_section(name="Automatic resolution of S3 buckets", extra_indents=1): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style(f"$ {ctx.command_path} --resolve-s3"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ], + col_max=COL_SIZE_MODIFIER, + ) + with formatter.indented_section(name="Get packaged template", extra_indents=1): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style(f"$ {ctx.command_path} --resolve-s3 --output-template-file packaged.yaml"), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ], + col_max=COL_SIZE_MODIFIER, + ) + with formatter.indented_section(name="Customized location for uploading artifacts", extra_indents=1): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name=style( + f"$ {ctx.command_path} --s3-bucket S3_BUCKET --output-template-file packaged.yaml" + ), + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ], + col_max=COL_SIZE_MODIFIER, + ) + + @staticmethod + def format_table(formatter: PackageCommandHelpTextFormatter): + with formatter.section(name="Supported Resources"): + pass + ctx = click.get_current_context() + table = Table(width=ctx.max_content_width) + table.add_column("Resource") + table.add_column("Location") + for resource, location in resources_generator(): + table.add_row(resource, location) + with ctx.obj.console.capture() as capture: + ctx.obj.console.print(table) + formatter.write_rd( + [ + RowDefinition(name="\n"), + RowDefinition(name=capture.get()), + ], + col_max=COL_SIZE_MODIFIER, + ) + + @staticmethod + def format_acronyms(formatter: PackageCommandHelpTextFormatter): + with formatter.indented_section(name="Acronyms", extra_indents=1): + formatter.write_rd( + [ + RowDefinition( + text="\n", + ), + RowDefinition( + name="S3", + text="Simple Storage Service", + extra_row_modifiers=[ShowcaseRowModifier()], + ), + RowDefinition( + name="ECR", + text="Elastic Container Registry", + extra_row_modifiers=[ShowcaseRowModifier()], + ), + RowDefinition( + name="KMS", + text="Key Management Service", + extra_row_modifiers=[ShowcaseRowModifier()], + ), + ], + col_max=COL_SIZE_MODIFIER, + ) + + def format_options(self, ctx: Context, formatter: PackageCommandHelpTextFormatter) -> None: # type:ignore + # `ignore` is put in place here for mypy even though it is the correct behavior, + # as the `formatter_class` can be set in subclass of Command. If ignore is not set, + # mypy raises argument needs to be HelpFormatter as super class defines it. + + self.format_description(formatter) + PackageCommand.format_examples(ctx, formatter) + PackageCommand.format_table(formatter) + PackageCommand.format_acronyms(formatter) + + CoreCommand._format_options( + ctx=ctx, + params=self.get_params(ctx), + formatter=formatter, + formatting_options=OPTIONS_INFO, + write_rd_overrides={"col_max": COL_SIZE_MODIFIER}, + ) diff --git a/samcli/commands/package/core/formatters.py b/samcli/commands/package/core/formatters.py new file mode 100644 index 0000000000..5c05ddcbfd --- /dev/null +++ b/samcli/commands/package/core/formatters.py @@ -0,0 +1,19 @@ +from samcli.cli.formatters import RootCommandHelpTextFormatter +from samcli.cli.row_modifiers import BaseLineRowModifier +from samcli.commands.package.core.options import ALL_OPTIONS + + +class PackageCommandHelpTextFormatter(RootCommandHelpTextFormatter): + # Picked an additive constant that gives an aesthetically pleasing look. + ADDITIVE_JUSTIFICATION = 15 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Add Additional space after determining the longest option. + # However, do not justify with padding for more than half the width of + # the terminal to retain aesthetics. + self.left_justification_length = min( + max([len(option) for option in ALL_OPTIONS]) + self.ADDITIVE_JUSTIFICATION, + self.width // 2 - self.indent_increment, + ) + self.modifiers = [BaseLineRowModifier()] diff --git a/samcli/commands/package/core/options.py b/samcli/commands/package/core/options.py new file mode 100644 index 0000000000..5a10f943f9 --- /dev/null +++ b/samcli/commands/package/core/options.py @@ -0,0 +1,68 @@ +""" +Package Command Options related Datastructures for formatting. +""" +from typing import Dict, List + +from samcli.cli.core.options import ALL_COMMON_OPTIONS, add_common_options_info +from samcli.cli.row_modifiers import RowDefinition + +# The ordering of the option lists matter, they are the order in which options will be displayed. + +REQUIRED_OPTIONS: List[str] = ["s3_bucket", "resolve_s3"] + +AWS_CREDENTIAL_OPTION_NAMES: List[str] = ["region", "profile"] + +INFRASTRUCTURE_OPTION_NAMES: List[str] = [ + "s3_prefix", + "image_repository", + "image_repositories", + "kms_key_id", + "metadata", +] + +DEPLOYMENT_OPTIONS: List[str] = [ + "force_upload", +] + +CONFIGURATION_OPTION_NAMES: List[str] = ["config_env", "config_file"] + +ADDITIONAL_OPTIONS: List[str] = [ + "no_progressbar", + "signing_profiles", + "template_file", + "output_template_file", + "use_json", +] + +ALL_OPTIONS: List[str] = ( + REQUIRED_OPTIONS + + AWS_CREDENTIAL_OPTION_NAMES + + INFRASTRUCTURE_OPTION_NAMES + + DEPLOYMENT_OPTIONS + + CONFIGURATION_OPTION_NAMES + + ADDITIONAL_OPTIONS + + ALL_COMMON_OPTIONS +) + +OPTIONS_INFO: Dict[str, Dict] = { + "Required Options": {"option_names": {opt: {"rank": idx} for idx, opt in enumerate(REQUIRED_OPTIONS)}}, + "AWS Credential Options": { + "option_names": {opt: {"rank": idx} for idx, opt in enumerate(AWS_CREDENTIAL_OPTION_NAMES)} + }, + "Infrastructure Options": { + "option_names": {opt: {"rank": idx} for idx, opt in enumerate(INFRASTRUCTURE_OPTION_NAMES)} + }, + "Package Management Options": {"option_names": {opt: {"rank": idx} for idx, opt in enumerate(DEPLOYMENT_OPTIONS)}}, + "Configuration Options": { + "option_names": {opt: {"rank": idx} for idx, opt in enumerate(CONFIGURATION_OPTION_NAMES)}, + "extras": [ + RowDefinition(name="Learn more about configuration files at:"), + RowDefinition( + name="https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli" + "-config.html. " + ), + ], + }, + "Additional Options": {"option_names": {opt: {"rank": idx} for idx, opt in enumerate(ADDITIONAL_OPTIONS)}}, +} +add_common_options_info(OPTIONS_INFO) diff --git a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py index 612341988d..f15d3e52bf 100644 --- a/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py +++ b/samcli/lib/bootstrap/companion_stack/companion_stack_manager.py @@ -17,6 +17,7 @@ from samcli.lib.providers.sam_function_provider import SamFunctionProvider from samcli.lib.providers.sam_stack_provider import SamLocalStackProvider from samcli.lib.utils.packagetype import IMAGE +from samcli.lib.utils.s3 import parse_s3_url # pylint: disable=E0401 if typing.TYPE_CHECKING: # pragma: no cover @@ -112,7 +113,7 @@ def update_companion_stack(self) -> None: self._s3_client, bucket_name=self._s3_bucket, prefix=self._s3_prefix, no_progressbar=True ) # TemplateUrl property requires S3 URL to be in path-style format - parts = S3Uploader.parse_s3_url( + parts = parse_s3_url( s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" ) diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 16e860c54c..58a2582403 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -38,6 +38,7 @@ from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name, mktempfile from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.colors import Colored, Colors +from samcli.lib.utils.s3 import parse_s3_url from samcli.lib.utils.time import utc_to_timestamp LOG = logging.getLogger(__name__) @@ -203,9 +204,7 @@ def _process_kwargs( temporary_file.flush() remote_path = get_uploaded_s3_object_name(file_path=temporary_file.name, extension="template") # TemplateUrl property requires S3 URL to be in path-style format - parts = S3Uploader.parse_s3_url( - s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" - ) + parts = parse_s3_url(s3_uploader.upload(temporary_file.name, remote_path), version_property="Version") kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) # don't set these arguments if not specified to use existing values diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 9e60dc5c1c..b2bbce7328 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -29,7 +29,6 @@ ECRResource, ResourceZip, ) -from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Destination, Uploaders from samcli.lib.package.utils import ( is_local_file, @@ -47,6 +46,7 @@ AWS_SERVERLESS_FUNCTION, RESOURCES_WITH_LOCAL_PATHS, ) +from samcli.lib.utils.s3 import parse_s3_url from samcli.yamlhelper import yaml_dump, yaml_parse # NOTE: sriram-mv, A cyclic dependency on `Template` needs to be broken. @@ -99,7 +99,7 @@ def do_export(self, resource_id, resource_dict, parent_dir): url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format - parts = S3Uploader.parse_s3_url(url, version_property="Version") + parts = parse_s3_url(url, version_property="Version") s3_path_url = self.uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url) @@ -146,7 +146,7 @@ def do_export(self, resource_id, resource_dict, parent_dir): url = self.uploader.upload(abs_template_path, remote_path) # TemplateUrl property requires S3 URL to be in path-style format - parts = S3Uploader.parse_s3_url(url, version_property="Version") + parts = parse_s3_url(url, version_property="Version") s3_path_url = self.uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url) diff --git a/samcli/lib/package/code_signer.py b/samcli/lib/package/code_signer.py index 92f0a78273..02434d8fb5 100644 --- a/samcli/lib/package/code_signer.py +++ b/samcli/lib/package/code_signer.py @@ -5,7 +5,7 @@ import logging from samcli.commands.exceptions import UserException -from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.utils.s3 import parse_s3_url LOG = logging.getLogger(__name__) @@ -60,7 +60,7 @@ def sign_package(self, resource_id, s3_url, s3_version): profile_owner = signing_profile_for_resource["profile_owner"] # parse given s3 url, and extract bucket and object key - parsed_s3_url = S3Uploader.parse_s3_url(s3_url) + parsed_s3_url = parse_s3_url(s3_url) s3_bucket = parsed_s3_url["Bucket"] s3_key = parsed_s3_url["Key"] s3_target_prefix = s3_key.rsplit("/", 1)[0] + "/signed_" diff --git a/samcli/lib/package/packageable_resources.py b/samcli/lib/package/packageable_resources.py index 79458dc5bc..ca245715b5 100644 --- a/samcli/lib/package/packageable_resources.py +++ b/samcli/lib/package/packageable_resources.py @@ -51,6 +51,7 @@ RESOURCES_WITH_IMAGE_COMPONENT, RESOURCES_WITH_LOCAL_PATHS, ) +from samcli.lib.utils.s3 import parse_s3_url LOG = logging.getLogger(__name__) @@ -196,7 +197,7 @@ def get_property_value(self, resource_dict): # artifact, as deletion of intrinsic ref function artifacts is not supported yet. # TODO: Allow deletion of S3 artifacts with intrinsic ref functions. if resource_path and isinstance(resource_path, str): - return self.uploader.parse_s3_url(resource_path) + return parse_s3_url(resource_path) return {"Bucket": None, "Key": None} @@ -340,7 +341,7 @@ def do_export(self, resource_id, resource_dict, parent_dir): self.RESOURCE_TYPE, resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader ) - parsed_url = S3Uploader.parse_s3_url( + parsed_url = parse_s3_url( artifact_s3_url, bucket_name_property=self.BUCKET_NAME_PROPERTY, object_key_property=self.OBJECT_KEY_PROPERTY, diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 5dab8c0d9a..fe141ada51 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -20,8 +20,7 @@ import sys import threading from collections import abc -from typing import Any, Dict, Optional, cast -from urllib.parse import parse_qs, urlparse +from typing import Any, Optional, cast import botocore import botocore.exceptions @@ -30,6 +29,7 @@ from samcli.commands.package.exceptions import BucketNotSpecifiedError, NoSuchBucketError from samcli.lib.package.local_files_utils import get_uploaded_s3_object_name +from samcli.lib.utils.s3 import parse_s3_url LOG = logging.getLogger(__name__) @@ -234,7 +234,7 @@ def get_version_of_artifact(self, s3_url: str) -> str: """ Returns version information of the S3 object that is given as S3 URL """ - parsed_s3_url = self.parse_s3_url(s3_url) + parsed_s3_url = parse_s3_url(s3_url) s3_bucket = parsed_s3_url["Bucket"] s3_key = parsed_s3_url["Key"] s3_object_tagging = self.s3.get_object_tagging(Bucket=s3_bucket, Key=s3_key) @@ -242,77 +242,6 @@ def get_version_of_artifact(self, s3_url: str) -> str: s3_object_version_id = s3_object_tagging["VersionId"] return cast(str, s3_object_version_id) - @staticmethod - def parse_s3_url( - url: Any, - bucket_name_property: str = "Bucket", - object_key_property: str = "Key", - version_property: Optional[str] = None, - ) -> Dict: - if isinstance(url, str) and url.startswith("s3://"): - return S3Uploader._parse_s3_format_url( - url=url, - bucket_name_property=bucket_name_property, - object_key_property=object_key_property, - version_property=version_property, - ) - - if isinstance(url, str) and url.startswith("https://s3"): - return S3Uploader._parse_path_style_s3_url( - url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property - ) - - raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) - - @staticmethod - def _parse_s3_format_url( - url: Any, - bucket_name_property: str = "Bucket", - object_key_property: str = "Key", - version_property: Optional[str] = None, - ) -> Dict: - """ - Method for parsing s3 urls that begin with s3:// - e.g. s3://bucket/key - """ - parsed = urlparse(url) - query = parse_qs(parsed.query) - if parsed.netloc and parsed.path: - result = dict() - result[bucket_name_property] = parsed.netloc - result[object_key_property] = parsed.path.lstrip("/") - - # If there is a query string that has a single versionId field, - # set the object version and return - if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: - result[version_property] = query["versionId"][0] - - return result - - raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) - - @staticmethod - def _parse_path_style_s3_url( - url: Any, - bucket_name_property: str = "Bucket", - object_key_property: str = "Key", - ) -> Dict: - """ - Static method for parsing path style s3 urls. - e.g. https://s3.us-east-1.amazonaws.com/bucket/key - """ - parsed = urlparse(url) - result = dict() - # parsed.path would point to /bucket/key - if parsed.path: - s3_bucket_key = parsed.path.split("/", 2)[1:] - - result[bucket_name_property] = s3_bucket_key[0] - result[object_key_property] = s3_bucket_key[1] - - return result - raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) - class ProgressPercentage: # This class was copied directly from S3Transfer docs diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 8650d3efa8..d0a1ae9787 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -26,6 +26,7 @@ from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.hash import dir_checksum from samcli.lib.utils.resources import LAMBDA_LOCAL_RESOURCES +from samcli.lib.utils.s3 import parse_s3_url LOG = logging.getLogger(__name__) @@ -68,7 +69,7 @@ def is_s3_protocol_url(url): Check whether url is a valid path in the form of "s3://..." """ try: - S3Uploader.parse_s3_url(url) + parse_s3_url(url) return True except ValueError: return False diff --git a/samcli/lib/utils/file_observer.py b/samcli/lib/utils/file_observer.py index 8ccf25cd9a..282aedb97b 100644 --- a/samcli/lib/utils/file_observer.py +++ b/samcli/lib/utils/file_observer.py @@ -2,6 +2,7 @@ Wraps watchdog to observe file system for any change. """ import logging +import platform import threading import uuid from abc import ABC, abstractmethod @@ -24,6 +25,8 @@ from samcli.local.lambdafn.config import FunctionConfig LOG = logging.getLogger(__name__) +# Windows API error returned when attempting to perform I/O on closed pipe +BROKEN_PIPE_ERROR = 109 class ResourceObserver(ABC): @@ -243,6 +246,44 @@ class ImageObserverException(ObserverException): """ +def broken_pipe_handler(func: Callable) -> Callable: + """ + Decorator to handle the Windows API BROKEN_PIPE_ERROR error. + + Parameters + ---------- + func: Callable + The method to wrap around + """ + + # NOTE: As of right now, this checks for the Windows API error 109 + # specifically. This could be abstracted to potentially utilize a + # callback method to further customize this. + + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as exception: + # handle a pywintypes exception that gets thrown when trying to exit + # from a command that utilizes ImageObserver(s) in + # EAGER container mode (start-api, start-lambda) + + # all containers would have been stopped, and deleted, however + # the pipes to those containers are still loaded somewhere + + if not platform.system() == "Windows": + raise + + win_error = getattr(exception, "winerror", None) + + if not win_error == BROKEN_PIPE_ERROR: + raise + + LOG.debug("Handling BROKEN_PIPE_ERROR pywintypes, exception ignored gracefully") + + return wrapper + + class ImageObserver(ResourceObserver): """ A class that will observe some docker images for any change. @@ -263,6 +304,7 @@ def __init__(self, on_change: Callable) -> None: self._images_observer_thread: Optional[Thread] = None self._lock: Lock = threading.Lock() + @broken_pipe_handler def _watch_images_events(self): for event in self.events: if event.get("Action", None) != "tag": diff --git a/samcli/lib/utils/s3.py b/samcli/lib/utils/s3.py new file mode 100644 index 0000000000..e841fb236c --- /dev/null +++ b/samcli/lib/utils/s3.py @@ -0,0 +1,74 @@ +"""Contains utility functions related to AWS S3 service""" +from typing import Any, Dict, Optional +from urllib.parse import parse_qs, urlparse + + +def parse_s3_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + version_property: Optional[str] = None, +) -> Dict: + if isinstance(url, str) and url.startswith("s3://"): + return _parse_s3_format_url( + url=url, + bucket_name_property=bucket_name_property, + object_key_property=object_key_property, + version_property=version_property, + ) + + if isinstance(url, str) and url.startswith("https://s3"): + return _parse_path_style_s3_url( + url=url, bucket_name_property=bucket_name_property, object_key_property=object_key_property + ) + + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) + + +def _parse_s3_format_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", + version_property: Optional[str] = None, +) -> Dict: + """ + Method for parsing s3 urls that begin with s3:// + e.g. s3://bucket/key + """ + parsed = urlparse(url) + query = parse_qs(parsed.query) + if parsed.netloc and parsed.path: + result = dict() + result[bucket_name_property] = parsed.netloc + result[object_key_property] = parsed.path.lstrip("/") + + # If there is a query string that has a single versionId field, + # set the object version and return + if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: + result[version_property] = query["versionId"][0] + + return result + + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) + + +def _parse_path_style_s3_url( + url: Any, + bucket_name_property: str = "Bucket", + object_key_property: str = "Key", +) -> Dict: + """ + Static method for parsing path style s3 urls. + e.g. https://s3.us-east-1.amazonaws.com/bucket/key + """ + parsed = urlparse(url) + result = dict() + # parsed.path would point to /bucket/key + if parsed.path: + s3_bucket_key = parsed.path.split("/", 2)[1:] + + result[bucket_name_property] = s3_bucket_key[0] + result[object_key_property] = s3_bucket_key[1] + + return result + raise ValueError("URL given to the parse method is not a valid S3 url {0}".format(url)) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index e63a8775e7..f979b2e9a3 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -686,7 +686,7 @@ def _request_handler(self, **kwargs): LOG.warning( "Failed to find a Function to invoke a Lambda authorizer, verify that " - "this Function exists locally if it is not a remote resource." + "this Function is defined and exists locally in the template." ) except Exception as ex: # re-raise the catch all exception after we track it in our telemetry diff --git a/samcli/runtime_config.json b/samcli/runtime_config.json index b648be6cc3..3609a8eea6 100644 --- a/samcli/runtime_config.json +++ b/samcli/runtime_config.json @@ -1,3 +1,3 @@ { - "app_template_repo_commit": "67f28fd83477e0e15b394f995afb33b2053b4074" + "app_template_repo_commit": "bb905c379830c3d8edbc196bda731076549028e3" } diff --git a/tests/end_to_end/test_stages.py b/tests/end_to_end/test_stages.py index 8e60211924..f405606331 100644 --- a/tests/end_to_end/test_stages.py +++ b/tests/end_to_end/test_stages.py @@ -12,6 +12,7 @@ from samcli.cli.global_config import GlobalConfig from filelock import FileLock +from samcli.lib.utils.s3 import parse_s3_url from tests.end_to_end.end_to_end_context import EndToEndTestContext from tests.testing_utils import CommandResult, run_command, run_command_with_input @@ -102,7 +103,7 @@ def _download_packaged_file(self): ) if zipped_fn_s3_loc: - s3_info = S3Uploader.parse_s3_url(zipped_fn_s3_loc) + s3_info = parse_s3_url(zipped_fn_s3_loc) self.s3_client.download_file(s3_info["Bucket"], s3_info["Key"], str(zip_file_path)) with zipfile.ZipFile(zip_file_path, "r") as zip_refzip: diff --git a/tests/unit/cli/test_context.py b/tests/unit/cli/test_context.py index 709182e0f5..06dce014a5 100644 --- a/tests/unit/cli/test_context.py +++ b/tests/unit/cli/test_context.py @@ -5,6 +5,8 @@ from unittest import TestCase from unittest.mock import patch, ANY +from rich.console import Console + from samcli.cli.context import Context from samcli.lib.utils.sam_logging import ( SamCliLogger, @@ -20,6 +22,10 @@ def test_must_initialize_with_defaults(self): self.assertEqual(ctx.debug, False, "debug must default to False") + def test_must_have_console(self): + ctx = Context() + self.assertTrue(isinstance(ctx.console, Console)) + def test_must_set_get_debug_flag(self): ctx = Context() diff --git a/tests/unit/commands/logs/core/__init__.py b/tests/unit/commands/logs/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/logs/core/test_command.py b/tests/unit/commands/logs/core/test_command.py new file mode 100644 index 0000000000..5b7b4e334e --- /dev/null +++ b/tests/unit/commands/logs/core/test_command.py @@ -0,0 +1,73 @@ +import unittest +from unittest.mock import Mock, patch +from samcli.commands.logs.core.command import LogsCommand +from samcli.commands.logs.command import DESCRIPTION +from tests.unit.cli.test_command import MockFormatter + + +class MockParams: + def __init__(self, rv, name): + self.rv = rv + self.name = name + + def get_help_record(self, ctx): + return self.rv + + +class TestLogsCommand(unittest.TestCase): + @patch.object(LogsCommand, "get_params") + def test_get_options_logs_command_text(self, mock_get_params): + ctx = Mock() + ctx.command_path = "sam logs" + ctx.parent.command_path = "sam" + formatter = MockFormatter(scrub_text=True) + # NOTE(sriram-mv): One option per option section. + mock_get_params.return_value = [ + MockParams(rv=("--region", "Region"), name="region"), + MockParams(rv=("--debug", ""), name="debug"), + MockParams(rv=("--config-file", ""), name="config_file"), + MockParams(rv=("--stack-name", ""), name="stack_name"), + MockParams(rv=("--tail", ""), name="tail"), + MockParams(rv=("--beta-features", ""), name="beta_features"), + ] + + cmd = LogsCommand(name="logs", requires_credentials=True, description=DESCRIPTION) + expected_output = { + "AWS Credential Options": [("", ""), ("--region", ""), ("", "")], + "Additional Options": [("", ""), ("--tail", ""), ("", "")], + "Beta Options": [("", ""), ("--beta-features", ""), ("", "")], + "Configuration Options": [("", ""), ("--config-file", ""), ("", "")], + "Description": [(cmd.description + cmd.description_addendum, "")], + "Examples": [], + "Fetch from Cloudwatch log groups": [ + ("", ""), + ( + "$ sam logs --cw-log-group " + "/aws/lambda/myfunction-123 " + "--cw-log-group " + "/aws/lambda/myfunction-456\x1b[0m", + "", + ), + ], + "Fetch logs from resource defined in nested Cloudformation stack": [ + ("", ""), + ("$ sam " "logs " "---stack-name " "mystack " "-n " "MyNestedStack/HelloWorldFunction\x1b[0m", ""), + ], + "Fetch logs from supported resources in Cloudformation stack": [ + ("", ""), + ("$ sam logs " "---stack-name " "mystack\x1b[0m", ""), + ], + "Fetch logs with Lambda Function Logical ID and Cloudformation Stack Name": [ + ("", ""), + ("$ " "sam " "logs " "-n " "HelloWorldFunction " "--stack-name " "mystack\x1b[0m", ""), + ], + "Log Identifier Options": [("", ""), ("--stack-name", ""), ("", "")], + "Other Options": [("", ""), ("--debug", ""), ("", "")], + "Tail new logs": [("", ""), ("$ sam logs -n HelloWorldFunction --stack-name mystack " "--tail\x1b[0m", "")], + "View logs for specific time range": [ + ("", ""), + ("$ sam logs -n HelloWorldFunction " "--stack-name mystack -s '10min ago' " "-e '2min ago'\x1b[0m", ""), + ], + } + cmd.format_options(ctx, formatter) + self.assertEqual(formatter.data, expected_output) diff --git a/tests/unit/commands/logs/core/test_formatter.py b/tests/unit/commands/logs/core/test_formatter.py new file mode 100644 index 0000000000..e59e90207b --- /dev/null +++ b/tests/unit/commands/logs/core/test_formatter.py @@ -0,0 +1,12 @@ +from shutil import get_terminal_size +from unittest import TestCase + +from samcli.cli.row_modifiers import BaseLineRowModifier +from samcli.commands.logs.core.formatters import LogsCommandHelpTextFormatter + + +class TestLogsCommandHelpTextFormatter(TestCase): + def test_logs_formatter(self): + self.formatter = LogsCommandHelpTextFormatter() + self.assertTrue(self.formatter.left_justification_length <= get_terminal_size().columns // 2) + self.assertIsInstance(self.formatter.modifiers[0], BaseLineRowModifier) diff --git a/tests/unit/commands/logs/core/test_options.py b/tests/unit/commands/logs/core/test_options.py new file mode 100644 index 0000000000..4b2acd844e --- /dev/null +++ b/tests/unit/commands/logs/core/test_options.py @@ -0,0 +1,12 @@ +from unittest import TestCase + +from click import Option + +from samcli.commands.logs.command import cli +from samcli.commands.logs.core.options import ALL_OPTIONS + + +class TestOptions(TestCase): + def test_all_options_formatted(self): + command_options = [param.human_readable_name if isinstance(param, Option) else None for param in cli.params] + self.assertEqual(sorted(ALL_OPTIONS), sorted(filter(lambda item: item is not None, command_options + ["help"]))) diff --git a/tests/unit/commands/package/core/__init__.py b/tests/unit/commands/package/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/package/core/test_command.py b/tests/unit/commands/package/core/test_command.py new file mode 100644 index 0000000000..fdfd571461 --- /dev/null +++ b/tests/unit/commands/package/core/test_command.py @@ -0,0 +1,85 @@ +import unittest +from unittest.mock import Mock, patch, MagicMock +from samcli.commands.package.core.command import PackageCommand +from samcli.commands.package.command import DESCRIPTION +from tests.unit.cli.test_command import MockFormatter + + +class MockParams: + def __init__(self, rv, name): + self.rv = rv + self.name = name + + def get_help_record(self, ctx): + return self.rv + + +class TestPackageCommand(unittest.TestCase): + @patch.object(PackageCommand, "get_params") + def test_get_options_package_command_text(self, mock_get_params): + with patch("click.get_current_context", return_value=MagicMock()) as mock_get_current_context: + # Set up the chain of calls to return 'mock' on .get() + mock_get_current_context.return_value.obj.console.capture().__enter__().get.return_value = "mock" + ctx = Mock() + ctx.command_path = "sam package" + ctx.parent.command_path = "sam" + formatter = MockFormatter(scrub_text=True) + # NOTE(sriram-mv): One option per option section. + mock_get_params.return_value = [ + MockParams(rv=("--region", "Region"), name="region"), + MockParams(rv=("--debug", ""), name="debug"), + MockParams(rv=("--config-file", ""), name="config_file"), + MockParams(rv=("--s3-prefix", ""), name="s3_prefix"), + MockParams(rv=("--s3-bucket", ""), name="s3_bucket"), + MockParams(rv=("--signing-profiles", ""), name="signing_profiles"), + MockParams(rv=("--stack-name", ""), name="stack_name"), + MockParams(rv=("--force-upload", ""), name="force_upload"), + MockParams(rv=("--beta-features", ""), name="beta_features"), + ] + + cmd = PackageCommand(name="package", requires_credentials=False, description=DESCRIPTION) + expected_output = { + "AWS Credential Options": [("", ""), ("--region", ""), ("", "")], + "Acronyms": [("", ""), ("S3", ""), ("ECR", ""), ("KMS", "")], + "Additional Options": [("", ""), ("--signing-profiles", ""), ("", "")], + "Automatic resolution of S3 buckets": [("", ""), ("$ sam package --resolve-s3\x1b[0m", "")], + "Beta Options": [("", ""), ("--beta-features", ""), ("", "")], + "Configuration Options": [("", ""), ("--config-file", ""), ("", "")], + "Customized location for uploading artifacts": [ + ("", ""), + ("$ sam package --s3-bucket " "S3_BUCKET " "--output-template-file " "packaged.yaml\x1b[0m", ""), + ], + "Description": [ + ( + "\n" + " Creates and uploads artifacts based on the package type " + "of a given resource.\n" + " It uploads local images to ECR for `Image` package " + "types.\n" + " It creates a zip of code and dependencies and uploads it " + "to S3 for `Zip` package types. \n" + " \n" + " A new template is returned which replaces references to " + "local artifacts\n" + " with the AWS location where the command uploaded the " + "artifacts.\n" + " \x1b[1m\n" + " This command may not require access to AWS " + "credentials.\x1b[0m", + "", + ) + ], + "Examples": [], + "Get packaged template": [ + ("", ""), + ("$ sam package --resolve-s3 --output-template-file " "packaged.yaml\x1b[0m", ""), + ], + "Infrastructure Options": [("", ""), ("--s3-prefix", ""), ("", "")], + "Other Options": [("", ""), ("--debug", ""), ("", "")], + "Package Management Options": [("", ""), ("--force-upload", ""), ("", "")], + "Required Options": [("", ""), ("--s3-bucket", ""), ("", "")], + "Supported Resources": [("\n", ""), ("mock", "")], + } + + cmd.format_options(ctx, formatter) + self.assertEqual(formatter.data, expected_output) diff --git a/tests/unit/commands/package/core/test_formatter.py b/tests/unit/commands/package/core/test_formatter.py new file mode 100644 index 0000000000..559b247fd6 --- /dev/null +++ b/tests/unit/commands/package/core/test_formatter.py @@ -0,0 +1,12 @@ +from shutil import get_terminal_size +from unittest import TestCase + +from samcli.cli.row_modifiers import BaseLineRowModifier +from samcli.commands.package.core.formatters import PackageCommandHelpTextFormatter + + +class TestPackageCommandHelpTextFormatter(TestCase): + def test_deploy_formatter(self): + self.formatter = PackageCommandHelpTextFormatter() + self.assertTrue(self.formatter.left_justification_length <= get_terminal_size().columns // 2) + self.assertIsInstance(self.formatter.modifiers[0], BaseLineRowModifier) diff --git a/tests/unit/commands/package/core/test_options.py b/tests/unit/commands/package/core/test_options.py new file mode 100644 index 0000000000..534aadb2f3 --- /dev/null +++ b/tests/unit/commands/package/core/test_options.py @@ -0,0 +1,12 @@ +from unittest import TestCase + +from click import Option + +from samcli.commands.package.command import cli +from samcli.commands.package.core.options import ALL_OPTIONS + + +class TestOptions(TestCase): + def test_all_options_formatted(self): + command_options = [param.human_readable_name if isinstance(param, Option) else None for param in cli.params] + self.assertEqual(sorted(ALL_OPTIONS), sorted(filter(lambda item: item is not None, command_options + ["help"]))) diff --git a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py index f40fb36bd8..69f7d76ff6 100644 --- a/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py +++ b/tests/unit/lib/bootstrap/companion_stack/test_companion_stack_manager.py @@ -48,8 +48,10 @@ def test_set_functions(self): @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.parse_s3_url") def test_create_companion_stack( self, + parse_s3_url_mock, s3_uploader_mock, mktempfile_mock, ): @@ -70,8 +72,10 @@ def test_create_companion_stack( @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.mktempfile") @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.S3Uploader") + @patch("samcli.lib.bootstrap.companion_stack.companion_stack_manager.parse_s3_url") def test_update_companion_stack( self, + parse_s3_url_mock, s3_uploader_mock, mktempfile_mock, ): diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 1a2e7f2227..41ac388714 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -19,7 +19,6 @@ AdditiveFilePermissionPermissionMapper, AdditiveDirPermissionPermissionMapper, ) -from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.uploaders import Destination from samcli.lib.package.utils import zip_folder, make_zip, make_zip_with_lambda_permissions, make_zip_with_permissions from samcli.lib.utils.packagetype import ZIP, IMAGE @@ -293,51 +292,6 @@ def _assert_is_valid_s3_url(self, url): def _assert_is_invalid_s3_url(self, url): self.assertFalse(is_s3_protocol_url(url), "{0} should be valid".format(url)) - def test_parse_s3_url(self): - valid = [ - {"url": "s3://foo/bar", "result": {"Bucket": "foo", "Key": "bar"}}, - {"url": "s3://foo/bar/cat/dog", "result": {"Bucket": "foo", "Key": "bar/cat/dog"}}, - { - "url": "s3://foo/bar/baz?versionId=abc¶m1=val1¶m2=val2", - "result": {"Bucket": "foo", "Key": "bar/baz", "VersionId": "abc"}, - }, - { - # VersionId is not returned if there are more than one versionId - # keys in query parameter - "url": "s3://foo/bar/baz?versionId=abc&versionId=123", - "result": {"Bucket": "foo", "Key": "bar/baz"}, - }, - { - # Path style url - "url": "https://s3-eu-west-1.amazonaws.com/bucket/key", - "result": {"Bucket": "bucket", "Key": "key"}, - }, - { - # Path style url - "url": "https://s3.us-east-1.amazonaws.com/bucket/key", - "result": {"Bucket": "bucket", "Key": "key"}, - }, - ] - - invalid = [ - # For purposes of exporter, we need S3 URLs to point to an object - # and not a bucket - "s3://foo", - "https://www.amazon.com", - "https://s3.us-east-1.amazonaws.com", - ] - - for config in valid: - result = S3Uploader.parse_s3_url( - config["url"], bucket_name_property="Bucket", object_key_property="Key", version_property="VersionId" - ) - - self.assertEqual(result, config["result"]) - - for url in invalid: - with self.assertRaises(ValueError): - S3Uploader.parse_s3_url(url) - def test_is_local_file(self): with tempfile.NamedTemporaryFile() as handle: self.assertTrue(is_local_file(handle.name)) diff --git a/tests/unit/lib/utils/test_file_observer.py b/tests/unit/lib/utils/test_file_observer.py index 739648ad4c..9400aac775 100644 --- a/tests/unit/lib/utils/test_file_observer.py +++ b/tests/unit/lib/utils/test_file_observer.py @@ -11,6 +11,7 @@ from samcli.lib.utils.file_observer import ( FileObserver, FileObserverException, + broken_pipe_handler, calculate_checksum, ImageObserver, ImageObserverException, @@ -1070,3 +1071,30 @@ def test_calculate_check_sum_for_dir(self, dir_checksum_mock, PathMock): path_mock.is_file.return_value = False dir_checksum_mock.return_value = "1234" self.assertEqual(calculate_checksum(path), "1234") + + +class TestBrokenPipeDecorator(TestCase): + def setUp(self): + self.mock_exception = Exception() + setattr(self.mock_exception, "winerror", 109) + + @patch("samcli.lib.utils.file_observer.platform.system") + def test_decorator_handle_gracefully(self, system_mock): + system_mock.return_value = "Windows" + + @broken_pipe_handler + def test_method(): + raise self.mock_exception + + test_method() + + @patch("samcli.lib.utils.file_observer.platform.system") + def test_decorator_raises_exception(self, system_mock): + system_mock.return_value = "not windows" + + @broken_pipe_handler + def test_method(): + raise self.mock_exception + + with self.assertRaises(Exception): + test_method() diff --git a/tests/unit/lib/utils/test_s3.py b/tests/unit/lib/utils/test_s3.py new file mode 100644 index 0000000000..c4900b908b --- /dev/null +++ b/tests/unit/lib/utils/test_s3.py @@ -0,0 +1,49 @@ +from unittest import TestCase +from samcli.lib.utils.s3 import parse_s3_url + + +class TestS3Utils(TestCase): + def test_parse_s3_url(self): + valid = [ + {"url": "s3://foo/bar", "result": {"Bucket": "foo", "Key": "bar"}}, + {"url": "s3://foo/bar/cat/dog", "result": {"Bucket": "foo", "Key": "bar/cat/dog"}}, + { + "url": "s3://foo/bar/baz?versionId=abc¶m1=val1¶m2=val2", + "result": {"Bucket": "foo", "Key": "bar/baz", "VersionId": "abc"}, + }, + { + # VersionId is not returned if there are more than one versionId + # keys in query parameter + "url": "s3://foo/bar/baz?versionId=abc&versionId=123", + "result": {"Bucket": "foo", "Key": "bar/baz"}, + }, + { + # Path style url + "url": "https://s3-eu-west-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, + { + # Path style url + "url": "https://s3.us-east-1.amazonaws.com/bucket/key", + "result": {"Bucket": "bucket", "Key": "key"}, + }, + ] + + invalid = [ + # For purposes of exporter, we need S3 URLs to point to an object + # and not a bucket + "s3://foo", + "https://www.amazon.com", + "https://s3.us-east-1.amazonaws.com", + ] + + for config in valid: + result = parse_s3_url( + config["url"], bucket_name_property="Bucket", object_key_property="Key", version_property="VersionId" + ) + + self.assertEqual(result, config["result"]) + + for url in invalid: + with self.assertRaises(ValueError): + parse_s3_url(url)